mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 00:43:37 +00:00
Re-write of Indexer API wrapper
Re-write of New Show seearch function Re-write of Existing show search helper function for determining indexer/indexerid Massive code cleanup and more bugs found and fixed Indexer code fully modualized for future proofing
This commit is contained in:
parent
108df09382
commit
95d7d728e0
128 changed files with 5314 additions and 4198 deletions
|
@ -75,7 +75,7 @@ def loadShowsFromDB():
|
|||
|
||||
for sqlShow in sqlResults:
|
||||
try:
|
||||
curShow = TVShow(sqlShow["indexer"], int(sqlShow["indexer_id"]))
|
||||
curShow = TVShow(int(sqlShow["indexer"]), int(sqlShow["indexer_id"]))
|
||||
sickbeard.showList.append(curShow)
|
||||
except Exception, e:
|
||||
logger.log(u"There was an error creating the show in " + sqlShow["location"] + ": " + str(e).decode('utf-8'), logger.ERROR)
|
||||
|
|
|
@ -167,11 +167,7 @@
|
|||
<a href="http://www.imdb.com/title/${cur_result["imdb_id"]}" onclick="window.open(this.href, '_blank'); return false;" title="http://www.imdb.com/title/${cur_result["imdb_id"]}"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" />
|
||||
#end if
|
||||
</td>
|
||||
#if 'Tvdb' in $cur_result["indexer"]:
|
||||
<td align="center"><a href="http://thetvdb.com/?tab=series&id=${cur_result["showid"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://thetvdb.com/?tab=series&id=${cur_result["showid"]}"><img alt="[tvdb]" height="16" width="16" src="$sbRoot/images/thetvdb16.png" /></a></td>
|
||||
#else
|
||||
<td align="center"><a href="http://tvrage.com/shows/id-$[cur_result["showid"]]" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://tvrage.com/shows/id-$[cur_result["showid"]]"><img alt="[tvrage]" height="16" width="16" src="$sbRoot/images/tvrage16.png" /></a></td>
|
||||
#end if
|
||||
<td align="center"><a href="$sickbeard.indexerApi($cur_result["indexer"]).config["show_url"]${cur_result["showid"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="$sickbeard.indexerApi($cur_result["indexer"]).config["show_url"]${cur_result["showid"]}"><img alt="["'+$sickbeard.indexerApi($cur_result["indexer"]).name+'"]" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($cur_result["indexer"]).config["icon"]" /></a></td>
|
||||
<td align="center">
|
||||
<a href="$sbRoot/home/searchEpisode?show=${cur_result["showid"]}&season=$cur_result["season"]&episode=$cur_result["episode"]" title="Manual Search" id="forceUpdate-${cur_result["showid"]}" class="forceUpdate epSearch"><img alt="[search]" height="16" width="16" src="$sbRoot/images/search32.png" id="forceUpdateImage-${cur_result["showid"]}" /></a>
|
||||
</td>
|
||||
|
@ -307,11 +303,7 @@
|
|||
#if $cur_result["imdb_id"]:
|
||||
<a href="http://www.imdb.com/title/${cur_result["imdb_id"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.imdb.com/title/${cur_result["imdb_id"]}"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" />
|
||||
#end if
|
||||
#if "Tvdb" in $cur_result["indexer"]:
|
||||
<a href="http://thetvdb.com/?tab=series&id=${cur_result["showid"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://thetvdb.com/?tab=series&id=${cur_result["showid"]}"><img alt="[tvdb]" height="16" width="16" src="$sbRoot/images/thetvdb16.png" /></a>
|
||||
#else
|
||||
<a href="http://tvrage.com/shows/id-$[cur_result["showid"]]" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://tvrage.com/shows/id-$[cur_result["showid"]]"><img alt="[tvrage]" height="16" width="16" src="$sbRoot/images/tvrage16.png" /></a>
|
||||
#end if
|
||||
<a href="$sickbeard.indexerApi($cur_result["indexer"]).config["show_url"]${cur_result["showid"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="$sickbeard.indexerApi($cur_result["indexer"]).config["show_url"]${cur_result["showid"]}"><img alt="$sickbeard.indexerApi($cur_result["indexer"]).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($cur_result["indexer"]).config["icon"]" /></a>
|
||||
<span><a href="$sbRoot/home/searchEpisode?show=${cur_result["showid"]}&season=$cur_result["season"]&episode=$cur_result["episode"]" title="Manual Search" id="forceUpdate-${cur_result["showid"]}" class="epSearch forceUpdate"><img alt="[search]" height="16" width="16" src="$sbRoot/images/search32.png" id="forceUpdateImage-${cur_result["showid"]}" /></a></span>
|
||||
</span>
|
||||
</div>
|
||||
|
|
|
@ -50,23 +50,14 @@
|
|||
- $show.genre[1:-1].replace('|',' | ')
|
||||
#end if
|
||||
<span class="tvshowLink" style="vertical-align: text-top">
|
||||
#if "Tvdb" in $show.indexer
|
||||
<a href="http://www.thetvdb.com/?tab=series&id=$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="http://www.thetvdb.com/?tab=series&id=$show.indexerid"><img alt="[tvdb]" height="16" width="16" src="$sbRoot/images/thetvdb16.png" style="margin-top: -1px;"/></a>
|
||||
#else
|
||||
<a href="http://www.tvrage.com/shows/id-$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="http://www.tvrage.com/shows/id-$show.indexerid"><img alt="[tvrage]" height="16" width="16" src="$sbRoot/images/tvrage16.png" style="margin-top: -1px;"/></a>
|
||||
#end if
|
||||
<a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a>
|
||||
</span>
|
||||
#else
|
||||
<img src="$sbRoot/images/flags/${$show.imdb_info['country_codes']}.png" width="16" height="11" style="margin-top: 3px; margin-left: 3px" /> ($show.imdb_info['year']) - $show.imdb_info['runtimes'] min - $show.imdb_info['genres'].replace('|',' | ')
|
||||
<span class="tvshowLink" style="vertical-align: text-top">
|
||||
<a href="http://www.imdb.com/title/$show.imdbid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.imdb.com/title/$show.imdbid"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" style="margin-top: -1px;"/>
|
||||
#if "Tvdb" in $show.indexer
|
||||
<a href="http://www.thetvdb.com/?tab=series&id=$show.indexerid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.thetvdb.com/?tab=series&id=$show.indexerid"><img alt="[tvdb]" height="16" width="16" src="$sbRoot/images/thetvdb16.png" style="margin-top: -1px;"/></a>
|
||||
#else
|
||||
<a href="http://www.tvrage.com/shows/id-$show.indexerid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.tvrage.com/shows/id-$show.indexerid"><img alt="[tvrage]" height="16" width="16" src="$sbRoot/images/tvrage16.png" style="margin-top: -1px;"/></a>
|
||||
#end if
|
||||
<a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a>
|
||||
</span>
|
||||
|
||||
#end if
|
||||
</span>
|
||||
#if $seasonResults:
|
||||
|
|
|
@ -112,8 +112,8 @@ This <b>DOES NOT</b> allow Sick Beard to download non-english TV episodes!<br />
|
|||
<input type="checkbox" name="dvdorder" #if $show.dvdorder == 1 then "checked=\"checked\"" else ""# /><br/>
|
||||
(check this if you wish to use the DVD order instead of the Airing order)
|
||||
<br/><br/>
|
||||
<b>Archive on first match: </b>
|
||||
#if $bestQualities
|
||||
<b>Archive on first match: </b>
|
||||
<input type="checkbox" name="archive_firstmatch" #if $show.archive_firstmatch == 1 then "checked=\"checked\"" else ""# /><br />
|
||||
(check this to have the episode archived after the first best match is found from your archive quality list)
|
||||
<br />
|
||||
|
|
|
@ -39,4 +39,4 @@
|
|||
|
||||
</div>
|
||||
|
||||
#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl")
|
||||
#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl")
|
|
@ -1,7 +1,5 @@
|
|||
#import sickbeard
|
||||
#from sickbeard.common import indexerStrings
|
||||
|
||||
#set $rowidx = 0
|
||||
<table id="addRootDirTable" class="sickbeardTable tablesorter">
|
||||
<thead><tr><th width="1%"><input type="checkbox" id="checkAll" checked=checked></th><th>Directory</th><th width="20%">Show Name (tvshow.nfo)<th width="20%">Indexer</td></tr></thead>
|
||||
<tfoot>
|
||||
|
@ -16,27 +14,23 @@
|
|||
#end if
|
||||
|
||||
#set $show_id = $curDir['dir']
|
||||
#set $indexer = 'Tvdb'
|
||||
#set $indexer = 1
|
||||
#if $curDir['existing_info'][0]:
|
||||
#set $show_id = $show_id + '|' + $str($curDir['existing_info'][0]) + '|' + str($curDir['existing_info'][1])
|
||||
#set $indexer = $str($curDir['existing_info'][2])
|
||||
#set $show_id = $show_id + '|' + $str($curDir['existing_info'][0]) + '|' + $str($curDir['existing_info'][1])
|
||||
#set $indexer = $curDir['existing_info'][2]
|
||||
#end if
|
||||
|
||||
#set $rowidx = $rowidx + 1
|
||||
|
||||
<tr id=$rowidx>
|
||||
<tr>
|
||||
<td><input type="checkbox" id="$show_id" class="dirCheck" checked=checked></td>
|
||||
<td><label for="$show_id">$curDir['display_dir']</label></td>
|
||||
#if 'Tvdb' in $indexer
|
||||
<td>#if $curDir['existing_info'][0] and $curDir['existing_info'][1] then '<a href="http://thetvdb.com/?tab=series&id='+$str($curDir['existing_info'][0])+'">'+$curDir['existing_info'][1]+'</a>' else "?"#</td>
|
||||
#elif 'TVRage' in $indexer
|
||||
<td>#if $curDir['existing_info'][1] then '<a href="http://tvrage.com/shows/id-'+$str($curDir['existing_info'][0])+'">'+$curDir['existing_info'][1]+'</a>' else "?"#</td>
|
||||
#else
|
||||
<td>$curDir['existing_info'][1]</td>
|
||||
#if $curDir['existing_info'][1]:
|
||||
<td><a href="$sickbeard.indexerApi($indexer).config["show_url"]$curDir['existing_info'][0]">$curDir['existing_info'][1]</a></td>
|
||||
#else:
|
||||
<td>?</td>
|
||||
#end if
|
||||
<td>
|
||||
<select name="indexer">
|
||||
#for $curIndexer in sorted($indexerStrings.items(), key=lambda x: x[1]):
|
||||
#for $curIndexer in $sickbeard.indexerApi().indexers.items():
|
||||
<option value="$curIndexer[0]" #if $curIndexer[0] == $indexer then "selected=\"selected\"" else ""#>$curIndexer[1]</option>
|
||||
#end for
|
||||
</select>
|
||||
|
@ -45,4 +39,4 @@
|
|||
#end for
|
||||
</tbody>
|
||||
</tbody>
|
||||
</table>
|
||||
</table>
|
|
@ -27,15 +27,11 @@
|
|||
<form id="addShowForm" method="post" action="$sbRoot/home/addShows/addNewShow" accept-charset="utf-8">
|
||||
|
||||
<fieldset class="sectionwrap">
|
||||
<legend class="legendStep">Find a show on the TVDB and TVRAGE</legend>
|
||||
<legend class="legendStep">Find a show on the TVDB or TVRAGE</legend>
|
||||
|
||||
<div class="stepDiv">
|
||||
#if $use_provided_info:
|
||||
#if 'Tvdb' in $provided_indexer
|
||||
Show retrieved from existing metadata: <a href="http://thetvdb.com/?tab=series&id=$provided_indexer_id">$provided_indexer_name</a>
|
||||
#else
|
||||
Show retrieved from existing metadata: <a href="http://tvrage.com/shows/id-$provided_indexer_id">$provided_indexer_name</a>
|
||||
#end if
|
||||
Show retrieved from existing metadata: <a href="$sickbeard.indexerApi($provided_indexer).config["show_url"]$provided_indexer_id">$provided_indexer_name</a>
|
||||
<input type="hidden" name="indexerLang" value="en" />
|
||||
<input type="hidden" name="whichSeries" value="$provided_indexer_id" />
|
||||
<input type="hidden" id="providedName" value="$provided_indexer_name" />
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#import sickbeard
|
||||
#from sickbeard.common import indexerStrings
|
||||
#set global $header="Post Processing"
|
||||
#set global $title="Post Processing"
|
||||
|
||||
|
@ -8,18 +7,18 @@
|
|||
#set global $topmenu="home"#
|
||||
#import os.path
|
||||
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl")
|
||||
#if $varExists('header')
|
||||
#if $varExists('header')
|
||||
<h1 class="header">$header</h1>
|
||||
#else
|
||||
#else
|
||||
<h1 class="title">$title</h1>
|
||||
#end if
|
||||
<form name="processForm" method="post" action="processEpisode" style="line-height: 44px">
|
||||
<input type="hidden" id="type" name="type" value="manual">
|
||||
<input type="hidden" id="type" name="type" value="manual">
|
||||
<b>Enter the folder containing the episode:</b> <input type="text" name="dir" id="episodeDir" size="50" /><br/>
|
||||
<b>Show Indexer to be used:</b>
|
||||
<select name="indexer" id="indexer" class="indexer">
|
||||
#for $curIndexer in sorted($indexerStrings.items(), key=lambda x: x[1]):
|
||||
<option value="$curIndexer[0]" #if $curIndexer[0] in $indexerStrings then "selected=\"selected\"" else ""#>$curIndexer[1]</option>
|
||||
#for $curIndexer in sorted($sickbeard.indexerApi().indexers.items(), key=lambda x: x[1]):
|
||||
<option value="$curIndexer[0]" #if $curIndexer[0] in $sickbeard.indexerApi().indexers then "selected=\"selected\"" else ""#>$curIndexer[1]</option>
|
||||
#end for
|
||||
</select>
|
||||
<br/>
|
||||
|
@ -39,7 +38,6 @@
|
|||
<b>Mark Dir/Files as priority download:</b> <input id="is_priority" name="is_priority" type="checkbox">
|
||||
<span style="line-height: 0; font-size: 12px;"><i>(Check it to replace the file even if it exists at higher quality)</i></span><br/>
|
||||
#if $sickbeard.USE_FAILED_DOWNLOADS:
|
||||
|
||||
<b>Mark download as failed:</b>  
|
||||
<input id="failed" name="failed" type="checkbox"><br />
|
||||
#end if
|
||||
|
@ -47,11 +45,10 @@
|
|||
<input id="submit" class="btn btn-primary" type="submit" value="Process" />
|
||||
</form>
|
||||
|
||||
|
||||
<script type="text/javascript" charset="utf-8">
|
||||
<!--
|
||||
jQuery('#episodeDir').fileBrowser({ title: 'Select Unprocessed Episode Folder', key: 'postprocessPath' });
|
||||
//-->
|
||||
</script>
|
||||
|
||||
#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl")
|
||||
#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl")
|
|
@ -115,7 +115,7 @@ $(document).ready(function(){
|
|||
$("#checkboxControls input").change(function(e){
|
||||
var whichClass = $(this).attr('id')
|
||||
$(this).showHideRows(whichClass)
|
||||
return
|
||||
|
||||
$('tr.'+whichClass).each(function(i){
|
||||
$(this).toggle();
|
||||
});
|
||||
|
|
|
@ -54,28 +54,28 @@ $(document).ready(function () {
|
|||
|
||||
var whichSeries = obj.join('|');
|
||||
|
||||
|
||||
resultStr += '<input type="radio" id="whichSeries" name="whichSeries" value="' + whichSeries + '"' + checked + ' /> ';
|
||||
if (obj[0] == 'Tvdb' && data.langid && data.langid != "") {
|
||||
resultStr += '<a href="http://thetvdb.com/?tab=series&id=' + obj[1] + '&lid=' + data.langid + '" onclick=\"window.open(this.href, \'_blank\'); return false;\" ><b>' + obj[2] + '</b></a>';
|
||||
} else if (obj[0] == 'Tvdb') {
|
||||
resultStr += '<a href="http://thetvdb.com/?tab=series&id=' + obj[1] + '" onclick=\"window.open(this.href, \'_blank\'); return false;\" ><b>' + obj[2] + '</b></a>';
|
||||
if (data.langid && data.langid != "") {
|
||||
resultStr += '<a href="'+ obj[2] + obj[3] + '&lid=' + data.langid + '" onclick=\"window.open(this.href, \'_blank\'); return false;\" ><b>' + obj[4] + '</b></a>';
|
||||
} else {
|
||||
resultStr += '<a href="http://tvrage.com/shows/id-' + obj[1] + '" onclick=\"window.open(this.href, \'_blank\'); return false;\" ><b>' + obj[2] + '</b></a>';
|
||||
resultStr += '<a href="'+ obj[2] + obj[3] + '" onclick=\"window.open(this.href, \'_blank\'); return false;\" ><b>' + obj[4] + '</b></a>';
|
||||
}
|
||||
|
||||
if (obj[3] !== null) {
|
||||
var startDate = new Date(obj[3]);
|
||||
if (obj[5] !== null) {
|
||||
var startDate = new Date(obj[5]);
|
||||
var today = new Date();
|
||||
if (startDate > today) {
|
||||
resultStr += ' (will debut on ' + obj[3] + ')';
|
||||
resultStr += ' (will debut on ' + obj[5] + ')';
|
||||
} else {
|
||||
resultStr += ' (started on ' + obj[3] + ')';
|
||||
resultStr += ' (started on ' + obj[5] + ')';
|
||||
}
|
||||
}
|
||||
|
||||
if (obj[0] !== null) {
|
||||
resultStr += ' [' + obj[0] + ']';
|
||||
}
|
||||
|
||||
resultStr += '<br />';
|
||||
});
|
||||
resultStr += '</ul>';
|
||||
|
@ -146,7 +146,7 @@ $(document).ready(function () {
|
|||
var show_name, sep_char;
|
||||
// if they've picked a radio button then use that
|
||||
if ($('input:radio[name=whichSeries]:checked').length) {
|
||||
show_name = $('input:radio[name=whichSeries]:checked').val().split('|')[2];
|
||||
show_name = $('input:radio[name=whichSeries]:checked').val().split('|')[4];
|
||||
}
|
||||
// if we provided a show in the hidden field, use that
|
||||
else if ($('input:hidden[name=whichSeries]').length && $('input:hidden[name=whichSeries]').val().length) {
|
||||
|
|
|
@ -42,16 +42,19 @@ from lib import requests
|
|||
|
||||
from tvdb_ui import BaseUI, ConsoleUI
|
||||
from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound,
|
||||
tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound)
|
||||
tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound)
|
||||
|
||||
# Cached Session Handler
|
||||
from lib.httpcache import CachingHTTPAdapter
|
||||
|
||||
s = requests.Session()
|
||||
s.mount('http://', CachingHTTPAdapter())
|
||||
|
||||
|
||||
def log():
|
||||
return logging.getLogger("tvdb_api")
|
||||
|
||||
|
||||
class ShowContainer(dict):
|
||||
"""Simple dict that holds a series of Show instances
|
||||
"""
|
||||
|
@ -74,13 +77,14 @@ class ShowContainer(dict):
|
|||
|
||||
_lastgc = time.time()
|
||||
del tbd
|
||||
|
||||
|
||||
super(ShowContainer, self).__setitem__(key, value)
|
||||
|
||||
|
||||
class Show(dict):
|
||||
"""Holds a dict of seasons, and show data.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
dict.__init__(self)
|
||||
self.data = {}
|
||||
|
@ -126,7 +130,7 @@ class Show(dict):
|
|||
raise tvdb_episodenotfound("Could not find any episodes that aired on %s" % date)
|
||||
return ret
|
||||
|
||||
def search(self, term = None, key = None):
|
||||
def search(self, term=None, key=None):
|
||||
"""
|
||||
Search all episodes in show. Can search all data, or a specific key (for
|
||||
example, episodename)
|
||||
|
@ -179,7 +183,7 @@ class Show(dict):
|
|||
"""
|
||||
results = []
|
||||
for cur_season in self.values():
|
||||
searchresult = cur_season.search(term = term, key = key)
|
||||
searchresult = cur_season.search(term=term, key=key)
|
||||
if len(searchresult) != 0:
|
||||
results.extend(searchresult)
|
||||
|
||||
|
@ -187,7 +191,7 @@ class Show(dict):
|
|||
|
||||
|
||||
class Season(dict):
|
||||
def __init__(self, show = None):
|
||||
def __init__(self, show=None):
|
||||
"""The show attribute points to the parent show
|
||||
"""
|
||||
self.show = show
|
||||
|
@ -208,7 +212,7 @@ class Season(dict):
|
|||
else:
|
||||
return dict.__getitem__(self, episode_number)
|
||||
|
||||
def search(self, term = None, key = None):
|
||||
def search(self, term=None, key=None):
|
||||
"""Search all episodes in season, returns a list of matching Episode
|
||||
instances.
|
||||
|
||||
|
@ -221,7 +225,7 @@ class Season(dict):
|
|||
"""
|
||||
results = []
|
||||
for ep in self.values():
|
||||
searchresult = ep.search(term = term, key = key)
|
||||
searchresult = ep.search(term=term, key=key)
|
||||
if searchresult is not None:
|
||||
results.append(
|
||||
searchresult
|
||||
|
@ -230,7 +234,7 @@ class Season(dict):
|
|||
|
||||
|
||||
class Episode(dict):
|
||||
def __init__(self, season = None):
|
||||
def __init__(self, season=None):
|
||||
"""The season attribute points to the parent season
|
||||
"""
|
||||
self.season = season
|
||||
|
@ -255,7 +259,7 @@ class Episode(dict):
|
|||
except KeyError:
|
||||
raise tvdb_attributenotfound("Cannot find attribute %s" % (repr(key)))
|
||||
|
||||
def search(self, term = None, key = None):
|
||||
def search(self, term=None, key=None):
|
||||
"""Search episode data for term, if it matches, return the Episode (self).
|
||||
The key parameter can be used to limit the search to a specific element,
|
||||
for example, episodename.
|
||||
|
@ -286,7 +290,7 @@ class Episode(dict):
|
|||
if key is not None and cur_key != key:
|
||||
# Do not search this key
|
||||
continue
|
||||
if cur_value.find( unicode(term).lower() ) > -1:
|
||||
if cur_value.find(unicode(term).lower()) > -1:
|
||||
return self
|
||||
|
||||
|
||||
|
@ -305,6 +309,7 @@ class Actor(dict):
|
|||
role,
|
||||
sortorder
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
return "<Actor \"%s\">" % (self.get("name"))
|
||||
|
||||
|
@ -315,20 +320,21 @@ class Tvdb:
|
|||
>>> t['Scrubs'][1][24]['episodename']
|
||||
u'My Last Day'
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
interactive = False,
|
||||
select_first = False,
|
||||
debug = False,
|
||||
cache = True,
|
||||
banners = False,
|
||||
actors = False,
|
||||
custom_ui = None,
|
||||
language = None,
|
||||
search_all_languages = False,
|
||||
apikey = None,
|
||||
forceConnect=False,
|
||||
useZip=False,
|
||||
dvdorder=False):
|
||||
interactive=False,
|
||||
select_first=False,
|
||||
debug=False,
|
||||
cache=True,
|
||||
banners=False,
|
||||
actors=False,
|
||||
custom_ui=None,
|
||||
language=None,
|
||||
search_all_languages=False,
|
||||
apikey=None,
|
||||
forceConnect=False,
|
||||
useZip=False,
|
||||
dvdorder=False):
|
||||
|
||||
"""interactive (True/False):
|
||||
When True, uses built-in console UI is used to select the correct show.
|
||||
|
@ -402,21 +408,21 @@ class Tvdb:
|
|||
And only the main language xml is used, the actor and banner xml are lost.
|
||||
"""
|
||||
|
||||
self.shows = ShowContainer() # Holds all Show classes
|
||||
self.corrections = {} # Holds show-name to show_id mapping
|
||||
self.shows = ShowContainer() # Holds all Show classes
|
||||
self.corrections = {} # Holds show-name to show_id mapping
|
||||
|
||||
self.config = {}
|
||||
|
||||
if apikey is not None:
|
||||
self.config['apikey'] = apikey
|
||||
else:
|
||||
self.config['apikey'] = "0629B785CE550C8D" # tvdb_api's API key
|
||||
self.config['apikey'] = "0629B785CE550C8D" # tvdb_api's API key
|
||||
|
||||
self.config['debug_enabled'] = debug # show debugging messages
|
||||
self.config['debug_enabled'] = debug # show debugging messages
|
||||
|
||||
self.config['custom_ui'] = custom_ui
|
||||
|
||||
self.config['interactive'] = interactive # prompt for correct series?
|
||||
self.config['interactive'] = interactive # prompt for correct series?
|
||||
|
||||
self.config['select_first'] = select_first
|
||||
|
||||
|
@ -445,8 +451,8 @@ class Tvdb:
|
|||
|
||||
if self.config['debug_enabled']:
|
||||
warnings.warn("The debug argument to tvdb_api.__init__ will be removed in the next version. "
|
||||
"To enable debug messages, use the following code before importing: "
|
||||
"import logging; logging.basicConfig(level=logging.DEBUG)")
|
||||
"To enable debug messages, use the following code before importing: "
|
||||
"import logging; logging.basicConfig(level=logging.DEBUG)")
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
|
||||
|
@ -454,8 +460,8 @@ class Tvdb:
|
|||
# Hard-coded here as it is realtively static, and saves another HTTP request, as
|
||||
# recommended on http://thetvdb.com/wiki/index.php/API:languages.xml
|
||||
self.config['valid_languages'] = [
|
||||
"da", "fi", "nl", "de", "it", "es", "fr","pl", "hu","el","tr",
|
||||
"ru","he","ja","pt","zh","cs","sl", "hr","ko","en","sv","no"
|
||||
"da", "fi", "nl", "de", "it", "es", "fr", "pl", "hu", "el", "tr",
|
||||
"ru", "he", "ja", "pt", "zh", "cs", "sl", "hr", "ko", "en", "sv", "no"
|
||||
]
|
||||
|
||||
# thetvdb.com should be based around numeric language codes,
|
||||
|
@ -463,9 +469,9 @@ class Tvdb:
|
|||
# requires the language ID, thus this mapping is required (mainly
|
||||
# for usage in tvdb_ui - internally tvdb_api will use the language abbreviations)
|
||||
self.config['langabbv_to_id'] = {'el': 20, 'en': 7, 'zh': 27,
|
||||
'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9,
|
||||
'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11,
|
||||
'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30}
|
||||
'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9,
|
||||
'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11,
|
||||
'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30}
|
||||
|
||||
if language is None:
|
||||
self.config['language'] = 'en'
|
||||
|
@ -591,9 +597,9 @@ class Tvdb:
|
|||
if sid not in self.shows:
|
||||
self.shows[sid] = Show()
|
||||
if seas not in self.shows[sid]:
|
||||
self.shows[sid][seas] = Season(show = self.shows[sid])
|
||||
self.shows[sid][seas] = Season(show=self.shows[sid])
|
||||
if ep not in self.shows[sid][seas]:
|
||||
self.shows[sid][seas][ep] = Episode(season = self.shows[sid][seas])
|
||||
self.shows[sid][seas][ep] = Episode(season=self.shows[sid][seas])
|
||||
self.shows[sid][seas][ep][attrib] = value
|
||||
|
||||
def _setShowData(self, sid, key, value):
|
||||
|
@ -610,28 +616,21 @@ class Tvdb:
|
|||
- Replaces & with &
|
||||
- Trailing whitespace
|
||||
"""
|
||||
data = data.replace(u"&", u"&")
|
||||
data = data.strip()
|
||||
if isinstance(data, str):
|
||||
data = data.replace(u"&", u"&")
|
||||
data = data.strip()
|
||||
return data
|
||||
|
||||
def search(self, series):
|
||||
"""This searches TheTVDB.com for the series name
|
||||
and returns the result list
|
||||
"""
|
||||
series = urllib.quote(series.encode("utf-8"))
|
||||
series = series.encode("utf-8")
|
||||
log().debug("Searching for show %s" % series)
|
||||
self.config['params_getSeries']['seriesname'] = series
|
||||
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||
allSeries = []
|
||||
for series in seriesEt:
|
||||
result = dict((k.tag.lower(), k.text) for k in series.getchildren())
|
||||
result['id'] = int(result['id'])
|
||||
result['lid'] = self.config['langabbv_to_id'][result['language']]
|
||||
if 'aliasnames' in result:
|
||||
result['aliasnames'] = result['aliasnames'].split("|")
|
||||
log().debug('Found series %(seriesname)s' % result)
|
||||
allSeries.append(result)
|
||||
|
||||
allSeries = [dict((s.tag.lower(), s.text) for s in x.getchildren()) for x in seriesEt]
|
||||
|
||||
return allSeries
|
||||
|
||||
def _getSeries(self, series):
|
||||
|
@ -648,14 +647,14 @@ class Tvdb:
|
|||
|
||||
if self.config['custom_ui'] is not None:
|
||||
log().debug("Using custom UI %s" % (repr(self.config['custom_ui'])))
|
||||
ui = self.config['custom_ui'](config = self.config)
|
||||
ui = self.config['custom_ui'](config=self.config)
|
||||
else:
|
||||
if not self.config['interactive']:
|
||||
log().debug('Auto-selecting first search result using BaseUI')
|
||||
ui = BaseUI(config = self.config)
|
||||
ui = BaseUI(config=self.config)
|
||||
else:
|
||||
log().debug('Interactively selecting show using ConsoleUI')
|
||||
ui = ConsoleUI(config = self.config)
|
||||
ui = ConsoleUI(config=self.config)
|
||||
|
||||
return ui.selectSeries(allSeries)
|
||||
|
||||
|
@ -678,7 +677,7 @@ class Tvdb:
|
|||
This interface will be improved in future versions.
|
||||
"""
|
||||
log().debug('Getting season banners for %s' % (sid))
|
||||
bannersEt = self._getetsrc( self.config['url_seriesBanner'] % (sid) )
|
||||
bannersEt = self._getetsrc(self.config['url_seriesBanner'] % (sid))
|
||||
banners = {}
|
||||
for cur_banner in bannersEt.findall('Banner'):
|
||||
bid = cur_banner.find('id').text
|
||||
|
@ -753,7 +752,7 @@ class Tvdb:
|
|||
cur_actors.append(curActor)
|
||||
self._setShowData(sid, '_actors', cur_actors)
|
||||
|
||||
def _getShowData(self, sid, language):
|
||||
def _getShowData(self, sid, language, seriesSearch=False):
|
||||
"""Takes a series ID, gets the epInfo URL and parses the TVDB
|
||||
XML file into the shows dict in layout:
|
||||
shows[series_id][season_number][episode_number]
|
||||
|
@ -778,17 +777,27 @@ class Tvdb:
|
|||
seriesInfoEt = self._getetsrc(
|
||||
self.config['url_seriesInfo'] % (sid, getShowInLanguage)
|
||||
)
|
||||
|
||||
if seriesInfoEt is None: return False
|
||||
for curInfo in seriesInfoEt.findall("Series")[0]:
|
||||
tag = curInfo.tag.lower()
|
||||
value = curInfo.text
|
||||
|
||||
if tag == 'seriesname' and value is None:
|
||||
return False
|
||||
|
||||
if value is not None:
|
||||
if tag == 'id':
|
||||
value = int(value)
|
||||
|
||||
if tag in ['banner', 'fanart', 'poster']:
|
||||
value = self.config['url_artworkPrefix'] % (value)
|
||||
else:
|
||||
value = self._cleanData(value)
|
||||
|
||||
self._setShowData(sid, tag, value)
|
||||
if seriesSearch:
|
||||
return True
|
||||
|
||||
# Parse banners
|
||||
if self.config['banners_enabled']:
|
||||
|
@ -806,7 +815,7 @@ class Tvdb:
|
|||
else:
|
||||
url = self.config['url_epInfo'] % (sid, language)
|
||||
|
||||
epsEt = self._getetsrc( url, language=language)
|
||||
epsEt = self._getetsrc(url, language=language)
|
||||
|
||||
for cur_ep in epsEt.findall("Episode"):
|
||||
|
||||
|
@ -818,7 +827,7 @@ class Tvdb:
|
|||
|
||||
if use_dvd:
|
||||
seas_no = int(cur_ep.find('DVD_season').text)
|
||||
ep_no = int(float(cur_ep.find('DVD_episodenumber').text))
|
||||
ep_no = int(float(cur_ep.find('DVD_episodenumber').text))
|
||||
else:
|
||||
seas_no = int(cur_ep.find('SeasonNumber').text)
|
||||
ep_no = int(cur_ep.find('EpisodeNumber').text)
|
||||
|
@ -834,7 +843,7 @@ class Tvdb:
|
|||
if (useDVD):
|
||||
log().debug('Use DVD Order? Yes')
|
||||
seas_no = int(cur_ep.find('DVD_season').text)
|
||||
ep_no = int(float(cur_ep.find('DVD_episodenumber').text))
|
||||
ep_no = int(float(cur_ep.find('DVD_episodenumber').text))
|
||||
else:
|
||||
log().debug('Use DVD Order? No')
|
||||
seas_no = int(cur_ep.find('SeasonNumber').text)
|
||||
|
@ -844,28 +853,34 @@ class Tvdb:
|
|||
tag = cur_item.tag.lower()
|
||||
value = cur_item.text
|
||||
if value is not None:
|
||||
if tag == 'id':
|
||||
value = int(value)
|
||||
|
||||
if tag == 'filename':
|
||||
value = self.config['url_artworkPrefix'] % (value)
|
||||
else:
|
||||
value = self._cleanData(value)
|
||||
self._setItem(sid, seas_no, ep_no, tag, value)
|
||||
|
||||
return True
|
||||
|
||||
def _nameToSid(self, name):
|
||||
"""Takes show name, returns the correct series ID (if the show has
|
||||
already been grabbed), or grabs all episodes and returns
|
||||
the correct SID.
|
||||
"""
|
||||
sid = set()
|
||||
if name in self.corrections:
|
||||
log().debug('Correcting %s to %s' % (name, self.corrections[name]) )
|
||||
log().debug('Correcting %s to %s' % (name, self.corrections[name]))
|
||||
sid = self.corrections[name]
|
||||
else:
|
||||
log().debug('Getting show %s' % (name))
|
||||
selected_series = self._getSeries( name )
|
||||
sname, sid = selected_series['seriesname'], selected_series['id']
|
||||
log().debug('Got %(seriesname)s, id %(id)s' % selected_series)
|
||||
|
||||
self.corrections[name] = sid
|
||||
self._getShowData(selected_series['id'], selected_series['language'])
|
||||
selected_series = self._getSeries(name)
|
||||
if isinstance(selected_series, dict):
|
||||
selected_series = [selected_series]
|
||||
[sid.add(int(x['id'])) for x in selected_series if
|
||||
self._getShowData(int(x['id']), self.config['language'], seriesSearch=True)]
|
||||
[self.corrections.update({x['seriesname']: int(x['id'])}) for x in selected_series]
|
||||
|
||||
return sid
|
||||
|
||||
|
@ -878,11 +893,10 @@ class Tvdb:
|
|||
if key not in self.shows:
|
||||
self._getShowData(key, self.config['language'])
|
||||
return self.shows[key]
|
||||
|
||||
key = key.lower() # make key lower case
|
||||
sid = self._nameToSid(key)
|
||||
log().debug('Got series id %s' % (sid))
|
||||
return self.shows[sid]
|
||||
|
||||
key = key.lower() # make key lower case
|
||||
sids = self._nameToSid(key)
|
||||
return [self.shows[sid] for sid in sids]
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.shows)
|
||||
|
@ -893,11 +907,13 @@ def main():
|
|||
grabs an episode name interactively.
|
||||
"""
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
tvdb_instance = Tvdb(interactive=True, cache=False)
|
||||
print tvdb_instance['Lost']['seriesname']
|
||||
print tvdb_instance['Lost'][1][4]['episodename']
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -29,6 +29,7 @@ try:
|
|||
except ImportError:
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
from collections import defaultdict
|
||||
from lib.dateutil.parser import parse
|
||||
from lib import requests
|
||||
|
||||
|
@ -318,8 +319,8 @@ class TVRage:
|
|||
|
||||
self.config['base_url'] = "http://services.tvrage.com"
|
||||
|
||||
self.config['url_getSeries'] = u"%(base_url)s/myfeeds/search.php" % self.config
|
||||
self.config['params_getSeries'] = {"key": self.config['apikey'], "show": ""}
|
||||
self.config['url_getSeries'] = u"%(base_url)s/feeds/search.php" % self.config
|
||||
self.config['params_getSeries'] = {"show": ""}
|
||||
|
||||
self.config['url_epInfo'] = u"%(base_url)s/myfeeds/episode_list.php" % self.config
|
||||
self.config['params_epInfo'] = {"key": self.config['apikey'], "sid": ""}
|
||||
|
@ -473,28 +474,21 @@ class TVRage:
|
|||
- Replaces & with &
|
||||
- Trailing whitespace
|
||||
"""
|
||||
data = data.replace(u"&", u"&")
|
||||
data = data.strip()
|
||||
if isinstance(data, str):
|
||||
data = data.replace(u"&", u"&")
|
||||
data = data.strip()
|
||||
return data
|
||||
|
||||
def search(self, series):
|
||||
"""This searches tvrage.com for the series name
|
||||
and returns the result list
|
||||
"""
|
||||
series = urllib.quote(series.encode("utf-8"))
|
||||
series = series.encode("utf-8")
|
||||
log().debug("Searching for show %s" % series)
|
||||
self.config['params_getSeries']['show'] = series
|
||||
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||
allSeries = []
|
||||
seriesResult = {}
|
||||
for series in seriesEt:
|
||||
for k in series.getchildren():
|
||||
seriesResult.setdefault(k.tag.lower(), k.text)
|
||||
allSeries = [dict((s.tag.lower(),s.text) for s in x.getchildren()) for x in seriesEt]
|
||||
|
||||
seriesResult['id'] = int(seriesResult['id'])
|
||||
log().debug('Found series %s' % seriesResult['seriesname'])
|
||||
allSeries.append(seriesResult)
|
||||
|
||||
return allSeries
|
||||
|
||||
def _getSeries(self, series):
|
||||
|
@ -518,7 +512,7 @@ class TVRage:
|
|||
|
||||
return ui.selectSeries(allSeries)
|
||||
|
||||
def _getShowData(self, sid):
|
||||
def _getShowData(self, sid, seriesSearch=False):
|
||||
"""Takes a series ID, gets the epInfo URL and parses the TVRAGE
|
||||
XML file into the shows dict in layout:
|
||||
shows[series_id][season_number][episode_number]
|
||||
|
@ -532,14 +526,22 @@ class TVRage:
|
|||
self.config['params_seriesInfo']
|
||||
)
|
||||
|
||||
if seriesInfoEt is None: return False
|
||||
for curInfo in seriesInfoEt:
|
||||
tag = curInfo.tag.lower()
|
||||
value = curInfo.text
|
||||
|
||||
if tag == 'seriesname' and value is None:
|
||||
return False
|
||||
|
||||
if tag == 'id':
|
||||
value = int(value)
|
||||
|
||||
if value is not None:
|
||||
value = self._cleanData(value)
|
||||
|
||||
self._setShowData(sid, tag, value)
|
||||
if seriesSearch: return True
|
||||
|
||||
try:
|
||||
# Parse genre data
|
||||
|
@ -572,28 +574,32 @@ class TVRage:
|
|||
|
||||
value = cur_item.text
|
||||
if value is not None:
|
||||
if tag == 'id':
|
||||
value = int(value)
|
||||
|
||||
value = self._cleanData(value)
|
||||
|
||||
self._setItem(sid, seas_no, ep_no, tag, value)
|
||||
except:
|
||||
continue
|
||||
return True
|
||||
|
||||
def _nameToSid(self, name):
|
||||
"""Takes show name, returns the correct series ID (if the show has
|
||||
already been grabbed), or grabs all episodes and returns
|
||||
the correct SID.
|
||||
"""
|
||||
sid = set()
|
||||
if name in self.corrections:
|
||||
log().debug('Correcting %s to %s' % (name, self.corrections[name]) )
|
||||
sid = self.corrections[name]
|
||||
else:
|
||||
log().debug('Getting show %s' % (name))
|
||||
selected_series = self._getSeries( name )
|
||||
sname, sid = selected_series['seriesname'], selected_series['id']
|
||||
log().debug('Got %(seriesname)s, id %(id)s' % selected_series)
|
||||
|
||||
self.corrections[name] = sid
|
||||
self._getShowData(selected_series['id'])
|
||||
if isinstance(selected_series, dict):
|
||||
selected_series = [selected_series]
|
||||
[sid.add(int(x['id'])) for x in selected_series if self._getShowData(int(x['id']), seriesSearch=True)]
|
||||
[self.corrections.update({x['seriesname']:int(x['id'])}) for x in selected_series]
|
||||
|
||||
return sid
|
||||
|
||||
|
@ -608,9 +614,8 @@ class TVRage:
|
|||
return self.shows[key]
|
||||
|
||||
key = key.lower() # make key lower case
|
||||
sid = self._nameToSid(key)
|
||||
log().debug('Got series id %s' % (sid))
|
||||
return self.shows[sid]
|
||||
sids = self._nameToSid(key)
|
||||
return [self.shows[sid] for sid in sids]
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.shows)
|
||||
|
|
|
@ -29,15 +29,19 @@ import urllib
|
|||
from threading import Lock
|
||||
|
||||
# apparently py2exe won't build these unless they're imported somewhere
|
||||
from sickbeard import providers, metadata, indexers
|
||||
from indexers import indexer_api, indexer_exceptions
|
||||
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen
|
||||
from sickbeard import providers, metadata
|
||||
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, \
|
||||
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen
|
||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator
|
||||
from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, subtitles, traktWatchListChecker
|
||||
from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
|
||||
subtitles, traktWatchListChecker
|
||||
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
|
||||
from sickbeard import logger
|
||||
from sickbeard import naming
|
||||
from sickbeard import scene_numbering
|
||||
from indexers.indexer_api import indexerApi
|
||||
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \
|
||||
indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts
|
||||
|
||||
from common import SD, SKIPPED, NAMING_REPEAT
|
||||
|
||||
|
@ -204,8 +208,8 @@ KAT_VERIFIED = False
|
|||
|
||||
PUBLICHD = None
|
||||
|
||||
SCC = False
|
||||
SCC_USERNAME = None
|
||||
SCC = False
|
||||
SCC_USERNAME = None
|
||||
SCC_PASSWORD = None
|
||||
|
||||
HDTORRENTS = False
|
||||
|
@ -255,7 +259,7 @@ SAB_APIKEY = None
|
|||
SAB_CATEGORY = None
|
||||
SAB_HOST = ''
|
||||
|
||||
NZBGET_USERNAME = None
|
||||
NZBGET_USERNAME = None
|
||||
NZBGET_PASSWORD = None
|
||||
NZBGET_CATEGORY = None
|
||||
NZBGET_HOST = None
|
||||
|
@ -410,7 +414,6 @@ DATE_PRESET = None
|
|||
TIME_PRESET = None
|
||||
TIME_PRESET_W_SECONDS = None
|
||||
|
||||
|
||||
USE_SUBTITLES = False
|
||||
SUBTITLES_LANGUAGES = []
|
||||
SUBTITLES_DIR = ''
|
||||
|
@ -434,55 +437,56 @@ TMDB_API_KEY = 'edc5f123313769de83a71e157758030b'
|
|||
|
||||
__INITIALIZED__ = False
|
||||
|
||||
|
||||
def get_backlog_cycle_time():
|
||||
cycletime = SEARCH_FREQUENCY*2+7
|
||||
cycletime = SEARCH_FREQUENCY * 2 + 7
|
||||
return max([cycletime, 720])
|
||||
|
||||
def initialize(consoleLogging=True):
|
||||
|
||||
def initialize(consoleLogging=True):
|
||||
with INIT_LOCK:
|
||||
|
||||
global ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
|
||||
USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \
|
||||
SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_HOST, \
|
||||
NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, currentSearchScheduler, backlogSearchScheduler, \
|
||||
TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_RATIO, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, \
|
||||
USE_XBMC, XBMC_NOTIFY_ONSNATCH, XBMC_NOTIFY_ONDOWNLOAD, XBMC_NOTIFY_ONSUBTITLEDOWNLOAD, XBMC_UPDATE_FULL, XBMC_UPDATE_ONLYFIRST, \
|
||||
XBMC_UPDATE_LIBRARY, XBMC_HOST, XBMC_USERNAME, XBMC_PASSWORD, \
|
||||
USE_TRAKT, TRAKT_USERNAME, TRAKT_PASSWORD, TRAKT_API, TRAKT_REMOVE_WATCHLIST, TRAKT_USE_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktWatchListCheckerSchedular, \
|
||||
USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_UPDATE_LIBRARY, \
|
||||
PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, \
|
||||
showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, UPDATE_SHOWS_ON_START, SORT_ARTICLE, showList, loadingShowList, \
|
||||
NEWZNAB_DATA, NZBS, NZBS_UID, NZBS_HASH, EZRSS, TVTORRENTS, TVTORRENTS_DIGEST, TVTORRENTS_HASH, TVTORRENTS_OPTIONS, BTN, BTN_API_KEY, BTN_OPTIONS, \
|
||||
THEPIRATEBAY, THEPIRATEBAY_TRUSTED, THEPIRATEBAY_PROXY, THEPIRATEBAY_PROXY_URL, THEPIRATEBAY_BLACKLIST, THEPIRATEBAY_OPTIONS, TORRENTLEECH, TORRENTLEECH_USERNAME, TORRENTLEECH_PASSWORD, TORRENTLEECH_OPTIONS, \
|
||||
IPTORRENTS, IPTORRENTS_USERNAME, IPTORRENTS_PASSWORD, IPTORRENTS_FREELEECH, IPTORRENTS_OPTIONS, KAT, KAT_VERIFIED, KAT_OPTIONS, PUBLICHD, PUBLICHD_OPTIONS, SCC, SCC_USERNAME, SCC_PASSWORD, SCC_OPTIONS, HDTORRENTS, HDTORRENTS_USERNAME, HDTORRENTS_PASSWORD, HDTORRENTS_UID, HDTORRENTS_HASH, HDTORRENTS_OPTIONS, TORRENTDAY, TORRENTDAY_USERNAME, TORRENTDAY_PASSWORD, TORRENTDAY_UID, TORRENTDAY_HASH, TORRENTDAY_FREELEECH, TORRENTDAY_OPTIONS, \
|
||||
HDBITS, HDBITS_USERNAME, HDBITS_PASSKEY, HDBITS_OPTIONS, TORRENT_DIR, USENET_RETENTION, SOCKET_TIMEOUT, SEARCH_FREQUENCY, DEFAULT_SEARCH_FREQUENCY, BACKLOG_SEARCH_FREQUENCY, \
|
||||
NEXTGEN, NEXTGEN_USERNAME, NEXTGEN_PASSWORD, NEXTGEN_FREELEECH, NEXTGEN_OPTIONS, \
|
||||
QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, \
|
||||
GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
||||
USE_GROWL, GROWL_HOST, GROWL_PASSWORD, USE_PROWL, PROWL_NOTIFY_ONSNATCH, PROWL_NOTIFY_ONDOWNLOAD, PROWL_NOTIFY_ONSUBTITLEDOWNLOAD, PROWL_API, PROWL_PRIORITY, PROG_DIR, \
|
||||
USE_PYTIVO, PYTIVO_NOTIFY_ONSNATCH, PYTIVO_NOTIFY_ONDOWNLOAD, PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD, PYTIVO_UPDATE_LIBRARY, PYTIVO_HOST, PYTIVO_SHARE_NAME, PYTIVO_TIVO_NAME, \
|
||||
USE_NMA, NMA_NOTIFY_ONSNATCH, NMA_NOTIFY_ONDOWNLOAD, NMA_NOTIFY_ONSUBTITLEDOWNLOAD, NMA_API, NMA_PRIORITY, \
|
||||
USE_PUSHALOT, PUSHALOT_NOTIFY_ONSNATCH, PUSHALOT_NOTIFY_ONDOWNLOAD, PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHALOT_AUTHORIZATIONTOKEN, \
|
||||
USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \
|
||||
versionCheckScheduler, VERSION_NOTIFY, PROCESS_AUTOMATICALLY, UNPACK, \
|
||||
KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_SEARCH_FREQUENCY, \
|
||||
showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, \
|
||||
NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_STRIP_YEAR, \
|
||||
RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
|
||||
WOMBLE, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList,\
|
||||
EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, \
|
||||
USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \
|
||||
USE_PUSHOVER, PUSHOVER_USERKEY, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHOVER_NOTIFY_ONSNATCH, \
|
||||
USE_LIBNOTIFY, LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD, USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, USE_NMJv2, NMJv2_HOST, NMJv2_DATABASE, NMJv2_DBLOC, USE_SYNOINDEX, \
|
||||
USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
||||
USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \
|
||||
USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \
|
||||
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, \
|
||||
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \
|
||||
METADATA_WDTV, METADATA_TIVO, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
|
||||
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \
|
||||
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY
|
||||
USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \
|
||||
SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_HOST, \
|
||||
NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, currentSearchScheduler, backlogSearchScheduler, \
|
||||
TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_RATIO, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, \
|
||||
USE_XBMC, XBMC_NOTIFY_ONSNATCH, XBMC_NOTIFY_ONDOWNLOAD, XBMC_NOTIFY_ONSUBTITLEDOWNLOAD, XBMC_UPDATE_FULL, XBMC_UPDATE_ONLYFIRST, \
|
||||
XBMC_UPDATE_LIBRARY, XBMC_HOST, XBMC_USERNAME, XBMC_PASSWORD, \
|
||||
USE_TRAKT, TRAKT_USERNAME, TRAKT_PASSWORD, TRAKT_API, TRAKT_REMOVE_WATCHLIST, TRAKT_USE_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktWatchListCheckerSchedular, \
|
||||
USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_UPDATE_LIBRARY, \
|
||||
PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, \
|
||||
showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, UPDATE_SHOWS_ON_START, SORT_ARTICLE, showList, loadingShowList, \
|
||||
NEWZNAB_DATA, NZBS, NZBS_UID, NZBS_HASH, EZRSS, TVTORRENTS, TVTORRENTS_DIGEST, TVTORRENTS_HASH, TVTORRENTS_OPTIONS, BTN, BTN_API_KEY, BTN_OPTIONS, \
|
||||
THEPIRATEBAY, THEPIRATEBAY_TRUSTED, THEPIRATEBAY_PROXY, THEPIRATEBAY_PROXY_URL, THEPIRATEBAY_BLACKLIST, THEPIRATEBAY_OPTIONS, TORRENTLEECH, TORRENTLEECH_USERNAME, TORRENTLEECH_PASSWORD, TORRENTLEECH_OPTIONS, \
|
||||
IPTORRENTS, IPTORRENTS_USERNAME, IPTORRENTS_PASSWORD, IPTORRENTS_FREELEECH, IPTORRENTS_OPTIONS, KAT, KAT_VERIFIED, KAT_OPTIONS, PUBLICHD, PUBLICHD_OPTIONS, SCC, SCC_USERNAME, SCC_PASSWORD, SCC_OPTIONS, HDTORRENTS, HDTORRENTS_USERNAME, HDTORRENTS_PASSWORD, HDTORRENTS_UID, HDTORRENTS_HASH, HDTORRENTS_OPTIONS, TORRENTDAY, TORRENTDAY_USERNAME, TORRENTDAY_PASSWORD, TORRENTDAY_UID, TORRENTDAY_HASH, TORRENTDAY_FREELEECH, TORRENTDAY_OPTIONS, \
|
||||
HDBITS, HDBITS_USERNAME, HDBITS_PASSKEY, HDBITS_OPTIONS, TORRENT_DIR, USENET_RETENTION, SOCKET_TIMEOUT, SEARCH_FREQUENCY, DEFAULT_SEARCH_FREQUENCY, BACKLOG_SEARCH_FREQUENCY, \
|
||||
NEXTGEN, NEXTGEN_USERNAME, NEXTGEN_PASSWORD, NEXTGEN_FREELEECH, NEXTGEN_OPTIONS, \
|
||||
QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, \
|
||||
GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
||||
USE_GROWL, GROWL_HOST, GROWL_PASSWORD, USE_PROWL, PROWL_NOTIFY_ONSNATCH, PROWL_NOTIFY_ONDOWNLOAD, PROWL_NOTIFY_ONSUBTITLEDOWNLOAD, PROWL_API, PROWL_PRIORITY, PROG_DIR, \
|
||||
USE_PYTIVO, PYTIVO_NOTIFY_ONSNATCH, PYTIVO_NOTIFY_ONDOWNLOAD, PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD, PYTIVO_UPDATE_LIBRARY, PYTIVO_HOST, PYTIVO_SHARE_NAME, PYTIVO_TIVO_NAME, \
|
||||
USE_NMA, NMA_NOTIFY_ONSNATCH, NMA_NOTIFY_ONDOWNLOAD, NMA_NOTIFY_ONSUBTITLEDOWNLOAD, NMA_API, NMA_PRIORITY, \
|
||||
USE_PUSHALOT, PUSHALOT_NOTIFY_ONSNATCH, PUSHALOT_NOTIFY_ONDOWNLOAD, PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHALOT_AUTHORIZATIONTOKEN, \
|
||||
USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \
|
||||
versionCheckScheduler, VERSION_NOTIFY, PROCESS_AUTOMATICALLY, UNPACK, \
|
||||
KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_SEARCH_FREQUENCY, \
|
||||
showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, \
|
||||
NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_STRIP_YEAR, \
|
||||
RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
|
||||
WOMBLE, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList, \
|
||||
EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, \
|
||||
USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \
|
||||
USE_PUSHOVER, PUSHOVER_USERKEY, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHOVER_NOTIFY_ONSNATCH, \
|
||||
USE_LIBNOTIFY, LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD, USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, USE_NMJv2, NMJv2_HOST, NMJv2_DATABASE, NMJv2_DBLOC, USE_SYNOINDEX, \
|
||||
USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
||||
USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \
|
||||
USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \
|
||||
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, \
|
||||
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \
|
||||
METADATA_WDTV, METADATA_TIVO, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
|
||||
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \
|
||||
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY
|
||||
|
||||
if __INITIALIZED__:
|
||||
return False
|
||||
|
@ -510,13 +514,13 @@ def initialize(consoleLogging=True):
|
|||
ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs')
|
||||
# put the log dir inside the data dir, unless an absolute path
|
||||
LOG_DIR = os.path.normpath(os.path.join(DATA_DIR, ACTUAL_LOG_DIR))
|
||||
|
||||
|
||||
if not helpers.makeDir(LOG_DIR):
|
||||
logger.log(u"!!! No log folder, logging to screen only!", logger.ERROR)
|
||||
|
||||
|
||||
SOCKET_TIMEOUT = check_setting_int(CFG, 'General', 'socket_timeout', 30)
|
||||
socket.setdefaulttimeout(SOCKET_TIMEOUT)
|
||||
|
||||
|
||||
try:
|
||||
WEB_PORT = check_setting_int(CFG, 'General', 'web_port', 8081)
|
||||
except:
|
||||
|
@ -533,14 +537,12 @@ def initialize(consoleLogging=True):
|
|||
WEB_USERNAME = check_setting_str(CFG, 'General', 'web_username', '')
|
||||
WEB_PASSWORD = check_setting_str(CFG, 'General', 'web_password', '')
|
||||
LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
|
||||
|
||||
|
||||
|
||||
LOCALHOST_IP = check_setting_str(CFG, 'General', 'localhost_ip', '')
|
||||
ANON_REDIRECT = check_setting_str(CFG, 'General', 'anon_redirect', 'http://dereferer.org/?')
|
||||
# attempt to help prevent users from breaking links by using a bad url
|
||||
if not ANON_REDIRECT.endswith('?'):
|
||||
ANON_REDIRECT = ''
|
||||
|
||||
|
||||
UPDATE_SHOWS_ON_START = bool(check_setting_int(CFG, 'General', 'update_shows_on_start', 0))
|
||||
SORT_ARTICLE = bool(check_setting_int(CFG, 'General', 'sort_article', 0))
|
||||
|
@ -661,7 +663,7 @@ def initialize(consoleLogging=True):
|
|||
NEXTGEN = bool(check_setting_int(CFG, 'NEXTGEN', 'nextgen', 0))
|
||||
NEXTGEN_USERNAME = check_setting_str(CFG, 'NEXTGEN', 'nextgen_username', '')
|
||||
NEXTGEN_PASSWORD = check_setting_str(CFG, 'NEXTGEN', 'nextgen_password', '')
|
||||
NEXTGEN_OPTIONS = check_setting_str(CFG, 'NEXTGEN', 'nextgen_options', '')
|
||||
NEXTGEN_OPTIONS = check_setting_str(CFG, 'NEXTGEN', 'nextgen_options', '')
|
||||
|
||||
KAT = bool(check_setting_int(CFG, 'KAT', 'kat', 0))
|
||||
KAT_VERIFIED = bool(check_setting_int(CFG, 'KAT', 'kat_verified', 1))
|
||||
|
@ -690,7 +692,7 @@ def initialize(consoleLogging=True):
|
|||
HDBITS_USERNAME = check_setting_str(CFG, 'HDBITS', 'hdbits_username', '')
|
||||
HDBITS_PASSKEY = check_setting_str(CFG, 'HDBITS', 'hdbits_passkey', '')
|
||||
HDBITS_OPTIONS = check_setting_str(CFG, 'HDBITS', 'hdbits_options', '')
|
||||
|
||||
|
||||
NZBS = bool(check_setting_int(CFG, 'NZBs', 'nzbs', 0))
|
||||
NZBS_UID = check_setting_str(CFG, 'NZBs', 'nzbs_uid', '')
|
||||
NZBS_HASH = check_setting_str(CFG, 'NZBs', 'nzbs_hash', '')
|
||||
|
@ -711,7 +713,7 @@ def initialize(consoleLogging=True):
|
|||
SAB_CATEGORY = check_setting_str(CFG, 'SABnzbd', 'sab_category', 'tv')
|
||||
SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '')
|
||||
|
||||
NZBGET_USERNAME = check_setting_str(CFG, 'NZBget', 'nzbget_username', 'nzbget')
|
||||
NZBGET_USERNAME = check_setting_str(CFG, 'NZBget', 'nzbget_username', 'nzbget')
|
||||
NZBGET_PASSWORD = check_setting_str(CFG, 'NZBget', 'nzbget_password', 'tegbzn6789')
|
||||
NZBGET_CATEGORY = check_setting_str(CFG, 'NZBget', 'nzbget_category', 'tv')
|
||||
NZBGET_HOST = check_setting_str(CFG, 'NZBget', 'nzbget_host', '')
|
||||
|
@ -763,7 +765,8 @@ def initialize(consoleLogging=True):
|
|||
USE_TWITTER = bool(check_setting_int(CFG, 'Twitter', 'use_twitter', 0))
|
||||
TWITTER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_onsnatch', 0))
|
||||
TWITTER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_ondownload', 0))
|
||||
TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_onsubtitledownload', 0))
|
||||
TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||
check_setting_int(CFG, 'Twitter', 'twitter_notify_onsubtitledownload', 0))
|
||||
TWITTER_USERNAME = check_setting_str(CFG, 'Twitter', 'twitter_username', '')
|
||||
TWITTER_PASSWORD = check_setting_str(CFG, 'Twitter', 'twitter_password', '')
|
||||
TWITTER_PREFIX = check_setting_str(CFG, 'Twitter', 'twitter_prefix', 'Sick Beard')
|
||||
|
@ -777,13 +780,15 @@ def initialize(consoleLogging=True):
|
|||
USE_PUSHOVER = bool(check_setting_int(CFG, 'Pushover', 'use_pushover', 0))
|
||||
PUSHOVER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsnatch', 0))
|
||||
PUSHOVER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_ondownload', 0))
|
||||
PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0))
|
||||
PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||
check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0))
|
||||
PUSHOVER_USERKEY = check_setting_str(CFG, 'Pushover', 'pushover_userkey', '')
|
||||
|
||||
USE_LIBNOTIFY = bool(check_setting_int(CFG, 'Libnotify', 'use_libnotify', 0))
|
||||
LIBNOTIFY_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsnatch', 0))
|
||||
LIBNOTIFY_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_ondownload', 0))
|
||||
LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsubtitledownload', 0))
|
||||
LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||
check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsubtitledownload', 0))
|
||||
|
||||
USE_NMJ = bool(check_setting_int(CFG, 'NMJ', 'use_nmj', 0))
|
||||
NMJ_HOST = check_setting_str(CFG, 'NMJ', 'nmj_host', '')
|
||||
|
@ -798,9 +803,12 @@ def initialize(consoleLogging=True):
|
|||
USE_SYNOINDEX = bool(check_setting_int(CFG, 'Synology', 'use_synoindex', 0))
|
||||
|
||||
USE_SYNOLOGYNOTIFIER = bool(check_setting_int(CFG, 'SynologyNotifier', 'use_synologynotifier', 0))
|
||||
SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsnatch', 0))
|
||||
SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_ondownload', 0))
|
||||
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsubtitledownload', 0))
|
||||
SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = bool(
|
||||
check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsnatch', 0))
|
||||
SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = bool(
|
||||
check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_ondownload', 0))
|
||||
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||
check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsubtitledownload', 0))
|
||||
|
||||
USE_TRAKT = bool(check_setting_int(CFG, 'Trakt', 'use_trakt', 0))
|
||||
TRAKT_USERNAME = check_setting_str(CFG, 'Trakt', 'trakt_username', '')
|
||||
|
@ -831,13 +839,15 @@ def initialize(consoleLogging=True):
|
|||
USE_PUSHALOT = bool(check_setting_int(CFG, 'Pushalot', 'use_pushalot', 0))
|
||||
PUSHALOT_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsnatch', 0))
|
||||
PUSHALOT_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_ondownload', 0))
|
||||
PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsubtitledownload', 0))
|
||||
PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||
check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsubtitledownload', 0))
|
||||
PUSHALOT_AUTHORIZATIONTOKEN = check_setting_str(CFG, 'Pushalot', 'pushalot_authorizationtoken', '')
|
||||
|
||||
USE_PUSHBULLET = bool(check_setting_int(CFG, 'Pushbullet', 'use_pushbullet', 0))
|
||||
PUSHBULLET_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsnatch', 0))
|
||||
PUSHBULLET_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_ondownload', 0))
|
||||
PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsubtitledownload', 0))
|
||||
PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||
check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsubtitledownload', 0))
|
||||
PUSHBULLET_API = check_setting_str(CFG, 'Pushbullet', 'pushbullet_api', '')
|
||||
PUSHBULLET_DEVICE = check_setting_str(CFG, 'Pushbullet', 'pushbullet_device', '')
|
||||
|
||||
|
@ -859,7 +869,9 @@ def initialize(consoleLogging=True):
|
|||
SUBTITLES_LANGUAGES = []
|
||||
SUBTITLES_DIR = check_setting_str(CFG, 'Subtitles', 'subtitles_dir', '')
|
||||
SUBTITLES_SERVICES_LIST = check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_LIST', '').split(',')
|
||||
SUBTITLES_SERVICES_ENABLED = [int(x) for x in check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_ENABLED', '').split('|') if x]
|
||||
SUBTITLES_SERVICES_ENABLED = [int(x) for x in
|
||||
check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_ENABLED', '').split('|')
|
||||
if x]
|
||||
SUBTITLES_DEFAULT = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_default', 0))
|
||||
SUBTITLES_HISTORY = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_history', 0))
|
||||
SUBTITLES_FINDER_FREQUENCY = check_setting_int(CFG, 'Subtitles', 'subtitles_finder_frequency', 1)
|
||||
|
@ -870,10 +882,11 @@ def initialize(consoleLogging=True):
|
|||
GIT_PATH = check_setting_str(CFG, 'General', 'git_path', '')
|
||||
|
||||
IGNORE_WORDS = check_setting_str(CFG, 'General', 'ignore_words', IGNORE_WORDS)
|
||||
|
||||
|
||||
CALENDAR_UNPROTECTED = bool(check_setting_int(CFG, 'General', 'calendar_unprotected', 0))
|
||||
|
||||
EXTRA_SCRIPTS = [x.strip() for x in check_setting_str(CFG, 'General', 'extra_scripts', '').split('|') if x.strip()]
|
||||
EXTRA_SCRIPTS = [x.strip() for x in check_setting_str(CFG, 'General', 'extra_scripts', '').split('|') if
|
||||
x.strip()]
|
||||
|
||||
USE_LISTVIEW = bool(check_setting_int(CFG, 'General', 'use_listview', 0))
|
||||
|
||||
|
@ -895,18 +908,18 @@ def initialize(consoleLogging=True):
|
|||
COMING_EPS_MISSED_RANGE = check_setting_int(CFG, 'GUI', 'coming_eps_missed_range', 7)
|
||||
DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x')
|
||||
TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p')
|
||||
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S",u"")
|
||||
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S", u"")
|
||||
|
||||
NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '')
|
||||
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
||||
|
||||
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
||||
|
||||
torrentRssData = check_setting_str(CFG, 'TorrentRss', 'torrentrss_data', '')
|
||||
torrentRssProviderList = providers.getTorrentRssProviderList(torrentRssData)
|
||||
|
||||
if not os.path.isfile(CONFIG_FILE):
|
||||
logger.log(u"Unable to find '" + CONFIG_FILE + "', all settings will be default!", logger.DEBUG)
|
||||
save_config()
|
||||
|
||||
|
||||
# start up all the threads
|
||||
logger.sb_log_instance.initLogging(consoleLogging=consoleLogging)
|
||||
|
||||
|
@ -934,8 +947,7 @@ def initialize(consoleLogging=True):
|
|||
(METADATA_PS3, metadata.ps3),
|
||||
(METADATA_WDTV, metadata.wdtv),
|
||||
(METADATA_TIVO, metadata.tivo),
|
||||
]:
|
||||
|
||||
]:
|
||||
(cur_metadata_config, cur_metadata_class) = cur_metadata_tuple
|
||||
tmp_provider = cur_metadata_class.metadata_class()
|
||||
tmp_provider.set_config(cur_metadata_config)
|
||||
|
@ -954,59 +966,59 @@ def initialize(consoleLogging=True):
|
|||
# the interval for this is stored inside the ShowUpdater class
|
||||
showUpdaterInstance = showUpdater.ShowUpdater()
|
||||
showUpdateScheduler = scheduler.Scheduler(showUpdaterInstance,
|
||||
cycleTime=showUpdaterInstance.updateInterval,
|
||||
threadName="SHOWUPDATER",
|
||||
runImmediately=False)
|
||||
cycleTime=showUpdaterInstance.updateInterval,
|
||||
threadName="SHOWUPDATER",
|
||||
runImmediately=False)
|
||||
|
||||
versionCheckScheduler = scheduler.Scheduler(versionChecker.CheckVersion(),
|
||||
cycleTime=datetime.timedelta(hours=12),
|
||||
threadName="CHECKVERSION",
|
||||
runImmediately=True)
|
||||
cycleTime=datetime.timedelta(hours=12),
|
||||
threadName="CHECKVERSION",
|
||||
runImmediately=True)
|
||||
|
||||
showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName="SHOWQUEUE",
|
||||
silent=True)
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName="SHOWQUEUE",
|
||||
silent=True)
|
||||
|
||||
searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName="SEARCHQUEUE",
|
||||
silent=True)
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName="SEARCHQUEUE",
|
||||
silent=True)
|
||||
|
||||
properFinderInstance = properFinder.ProperFinder()
|
||||
properFinderScheduler = scheduler.Scheduler(properFinderInstance,
|
||||
cycleTime=properFinderInstance.updateInterval,
|
||||
threadName="FINDPROPERS",
|
||||
runImmediately=True)
|
||||
cycleTime=properFinderInstance.updateInterval,
|
||||
threadName="FINDPROPERS",
|
||||
runImmediately=True)
|
||||
if not DOWNLOAD_PROPERS:
|
||||
properFinderScheduler.silent = True
|
||||
|
||||
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
|
||||
cycleTime=datetime.timedelta(minutes=10),
|
||||
threadName="POSTPROCESSER",
|
||||
runImmediately=True)
|
||||
cycleTime=datetime.timedelta(minutes=10),
|
||||
threadName="POSTPROCESSER",
|
||||
runImmediately=True)
|
||||
if not PROCESS_AUTOMATICALLY:
|
||||
autoPostProcesserScheduler.silent = True
|
||||
|
||||
|
||||
traktWatchListCheckerSchedular = scheduler.Scheduler(traktWatchListChecker.TraktChecker(),
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
threadName="TRAKTWATCHLIST",
|
||||
runImmediately=True)
|
||||
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
threadName="TRAKTWATCHLIST",
|
||||
runImmediately=True)
|
||||
|
||||
if not USE_TRAKT:
|
||||
traktWatchListCheckerSchedular.silent = True
|
||||
|
||||
|
||||
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
||||
cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()),
|
||||
cycleTime=datetime.timedelta(
|
||||
minutes=get_backlog_cycle_time()),
|
||||
threadName="BACKLOG",
|
||||
runImmediately=True)
|
||||
backlogSearchScheduler.action.cycleTime = BACKLOG_SEARCH_FREQUENCY
|
||||
|
||||
|
||||
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
||||
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
||||
threadName="FINDSUBTITLES",
|
||||
runImmediately=True)
|
||||
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
||||
threadName="FINDSUBTITLES",
|
||||
runImmediately=True)
|
||||
|
||||
if not USE_SUBTITLES:
|
||||
subtitlesFinderScheduler.silent = True
|
||||
|
@ -1017,13 +1029,13 @@ def initialize(consoleLogging=True):
|
|||
__INITIALIZED__ = True
|
||||
return True
|
||||
|
||||
def start():
|
||||
|
||||
def start():
|
||||
global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, \
|
||||
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
||||
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||
subtitlesFinderScheduler, started, USE_SUBTITLES, \
|
||||
traktWatchListCheckerSchedular, started
|
||||
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
||||
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||
subtitlesFinderScheduler, started, USE_SUBTITLES, \
|
||||
traktWatchListCheckerSchedular, started
|
||||
|
||||
with INIT_LOCK:
|
||||
|
||||
|
@ -1062,12 +1074,12 @@ def start():
|
|||
|
||||
started = True
|
||||
|
||||
def halt ():
|
||||
|
||||
def halt():
|
||||
global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, \
|
||||
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||
subtitlesFinderScheduler, started, \
|
||||
traktWatchListCheckerSchedular
|
||||
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||
subtitlesFinderScheduler, started, \
|
||||
traktWatchListCheckerSchedular
|
||||
|
||||
with INIT_LOCK:
|
||||
|
||||
|
@ -1147,7 +1159,6 @@ def halt ():
|
|||
except:
|
||||
pass
|
||||
|
||||
|
||||
__INITIALIZED__ = False
|
||||
|
||||
|
||||
|
@ -1158,7 +1169,6 @@ def sig_handler(signum=None, frame=None):
|
|||
|
||||
|
||||
def saveAll():
|
||||
|
||||
global showList
|
||||
|
||||
# write all shows
|
||||
|
@ -1172,7 +1182,6 @@ def saveAll():
|
|||
|
||||
|
||||
def saveAndShutdown(restart=False):
|
||||
|
||||
halt()
|
||||
|
||||
saveAll()
|
||||
|
@ -1197,7 +1206,8 @@ def saveAndShutdown(restart=False):
|
|||
popen_list = [os.path.join(PROG_DIR, 'updater.exe'), str(PID), sys.executable]
|
||||
else:
|
||||
logger.log(u"Unknown SB launch method, please file a bug report about this", logger.ERROR)
|
||||
popen_list = [sys.executable, os.path.join(PROG_DIR, 'updater.py'), str(PID), sys.executable, MY_FULLNAME ]
|
||||
popen_list = [sys.executable, os.path.join(PROG_DIR, 'updater.py'), str(PID), sys.executable,
|
||||
MY_FULLNAME]
|
||||
|
||||
if popen_list:
|
||||
popen_list += MY_ARGS
|
||||
|
@ -1212,20 +1222,24 @@ def saveAndShutdown(restart=False):
|
|||
|
||||
def invoke_command(to_call, *args, **kwargs):
|
||||
global invoked_command
|
||||
|
||||
def delegate():
|
||||
to_call(*args, **kwargs)
|
||||
|
||||
invoked_command = delegate
|
||||
logger.log(u"Placed invoked command: "+repr(invoked_command)+" for "+repr(to_call)+" with "+repr(args)+" and "+repr(kwargs), logger.DEBUG)
|
||||
logger.log(u"Placed invoked command: " + repr(invoked_command) + " for " + repr(to_call) + " with " + repr(
|
||||
args) + " and " + repr(kwargs), logger.DEBUG)
|
||||
|
||||
|
||||
def invoke_restart(soft=True):
|
||||
invoke_command(restart, soft=soft)
|
||||
|
||||
|
||||
def invoke_shutdown():
|
||||
invoke_command(saveAndShutdown)
|
||||
|
||||
|
||||
def restart(soft=True):
|
||||
|
||||
if soft:
|
||||
halt()
|
||||
saveAll()
|
||||
|
@ -1238,12 +1252,10 @@ def restart(soft=True):
|
|||
saveAndShutdown(restart=True)
|
||||
|
||||
|
||||
|
||||
def save_config():
|
||||
|
||||
new_config = ConfigObj()
|
||||
new_config.filename = CONFIG_FILE
|
||||
|
||||
|
||||
# For passwords you must include the word `password` in the item_name and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
|
||||
new_config['General'] = {}
|
||||
new_config['General']['config_version'] = CONFIG_VERSION
|
||||
|
@ -1354,8 +1366,8 @@ def save_config():
|
|||
new_config['NEXTGEN']['nextgen'] = int(NEXTGEN)
|
||||
new_config['NEXTGEN']['nextgen_username'] = NEXTGEN_USERNAME
|
||||
new_config['NEXTGEN']['nextgen_password'] = helpers.encrypt(NEXTGEN_PASSWORD, ENCRYPTION_VERSION)
|
||||
new_config['NEXTGEN']['nextgen_options'] = NEXTGEN_OPTIONS
|
||||
|
||||
new_config['NEXTGEN']['nextgen_options'] = NEXTGEN_OPTIONS
|
||||
|
||||
new_config['KAT'] = {}
|
||||
new_config['KAT']['kat'] = int(KAT)
|
||||
new_config['KAT']['kat_verified'] = int(KAT_VERIFIED)
|
||||
|
@ -1417,7 +1429,7 @@ def save_config():
|
|||
|
||||
new_config['NZBget'] = {}
|
||||
|
||||
new_config['NZBget']['nzbget_username'] = NZBGET_USERNAME
|
||||
new_config['NZBget']['nzbget_username'] = NZBGET_USERNAME
|
||||
new_config['NZBget']['nzbget_password'] = helpers.encrypt(NZBGET_PASSWORD, ENCRYPTION_VERSION)
|
||||
new_config['NZBget']['nzbget_category'] = NZBGET_CATEGORY
|
||||
new_config['NZBget']['nzbget_host'] = NZBGET_HOST
|
||||
|
@ -1519,7 +1531,8 @@ def save_config():
|
|||
new_config['SynologyNotifier']['use_synologynotifier'] = int(USE_SYNOLOGYNOTIFIER)
|
||||
new_config['SynologyNotifier']['synologynotifier_notify_onsnatch'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH)
|
||||
new_config['SynologyNotifier']['synologynotifier_notify_ondownload'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD)
|
||||
new_config['SynologyNotifier']['synologynotifier_notify_onsubtitledownload'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD)
|
||||
new_config['SynologyNotifier']['synologynotifier_notify_onsubtitledownload'] = int(
|
||||
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD)
|
||||
|
||||
new_config['Trakt'] = {}
|
||||
new_config['Trakt']['use_trakt'] = int(USE_TRAKT)
|
||||
|
@ -1564,7 +1577,6 @@ def save_config():
|
|||
new_config['Pushbullet']['pushbullet_api'] = PUSHBULLET_API
|
||||
new_config['Pushbullet']['pushbullet_device'] = PUSHBULLET_DEVICE
|
||||
|
||||
|
||||
new_config['Email'] = {}
|
||||
new_config['Email']['use_email'] = int(USE_EMAIL)
|
||||
new_config['Email']['email_notify_onsnatch'] = int(EMAIL_NOTIFY_ONSNATCH)
|
||||
|
@ -1606,7 +1618,7 @@ def save_config():
|
|||
new_config['Subtitles']['subtitles_history'] = int(SUBTITLES_HISTORY)
|
||||
new_config['Subtitles']['subtitles_finder_frequency'] = int(SUBTITLES_FINDER_FREQUENCY)
|
||||
|
||||
new_config['FailedDownloads']= {}
|
||||
new_config['FailedDownloads'] = {}
|
||||
new_config['FailedDownloads']['use_failed_downloads'] = int(USE_FAILED_DOWNLOADS)
|
||||
new_config['FailedDownloads']['delete_failed'] = int(DELETE_FAILED)
|
||||
|
||||
|
@ -1628,12 +1640,12 @@ def launchBrowser(startPort=None):
|
|||
except:
|
||||
logger.log(u"Unable to launch a browser", logger.ERROR)
|
||||
|
||||
def getEpList(epIDs, showid=None):
|
||||
|
||||
def getEpList(epIDs, showid=None):
|
||||
if epIDs == None or len(epIDs) == 0:
|
||||
return []
|
||||
|
||||
query = "SELECT * FROM tv_episodes WHERE indexerid in (%s)" % (",".join(['?']*len(epIDs)),)
|
||||
query = "SELECT * FROM tv_episodes WHERE indexerid in (%s)" % (",".join(['?'] * len(epIDs)),)
|
||||
params = epIDs
|
||||
|
||||
if showid != None:
|
||||
|
|
|
@ -24,18 +24,21 @@ from sickbeard import logger
|
|||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard import processTV
|
||||
|
||||
class PostProcesser():
|
||||
|
||||
class PostProcesser():
|
||||
def run(self):
|
||||
if not sickbeard.PROCESS_AUTOMATICALLY:
|
||||
return
|
||||
|
||||
if not ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
|
||||
logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist", logger.ERROR)
|
||||
logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist",
|
||||
logger.ERROR)
|
||||
return
|
||||
|
||||
if not ek.ek(os.path.isabs, sickbeard.TV_DOWNLOAD_DIR):
|
||||
logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " is relative (and probably not what you really want to process)", logger.ERROR)
|
||||
logger.log(
|
||||
u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " is relative (and probably not what you really want to process)",
|
||||
logger.ERROR)
|
||||
return
|
||||
|
||||
processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
|
||||
|
|
|
@ -38,7 +38,7 @@ def getWinDrives():
|
|||
assert os.name == 'nt'
|
||||
|
||||
drives = []
|
||||
bitmask = windll.kernel32.GetLogicalDrives() #@UndefinedVariable
|
||||
bitmask = windll.kernel32.GetLogicalDrives() #@UndefinedVariable
|
||||
for letter in string.uppercase:
|
||||
if bitmask & 1:
|
||||
drives.append(letter)
|
||||
|
@ -79,26 +79,27 @@ def foldersAtPath(path, includeParent=False):
|
|||
if path == parentPath and os.name == 'nt':
|
||||
parentPath = ""
|
||||
|
||||
fileList = [{ 'name': filename, 'path': ek.ek(os.path.join, path, filename) } for filename in ek.ek(os.listdir, path)]
|
||||
fileList = [{'name': filename, 'path': ek.ek(os.path.join, path, filename)} for filename in ek.ek(os.listdir, path)]
|
||||
fileList = filter(lambda entry: ek.ek(os.path.isdir, entry['path']), fileList)
|
||||
|
||||
# prune out directories to proect the user from doing stupid things (already lower case the dir to reduce calls)
|
||||
hideList = ["boot", "bootmgr", "cache", "msocache", "recovery", "$recycle.bin", "recycler", "system volume information", "temporary internet files"] # windows specific
|
||||
hideList += [".fseventd", ".spotlight", ".trashes", ".vol", "cachedmessages", "caches", "trash"] # osx specific
|
||||
hideList = ["boot", "bootmgr", "cache", "msocache", "recovery", "$recycle.bin", "recycler",
|
||||
"system volume information", "temporary internet files"] # windows specific
|
||||
hideList += [".fseventd", ".spotlight", ".trashes", ".vol", "cachedmessages", "caches", "trash"] # osx specific
|
||||
fileList = filter(lambda entry: entry['name'].lower() not in hideList, fileList)
|
||||
|
||||
fileList = sorted(fileList, lambda x, y: cmp(os.path.basename(x['name']).lower(), os.path.basename(y['path']).lower()))
|
||||
fileList = sorted(fileList,
|
||||
lambda x, y: cmp(os.path.basename(x['name']).lower(), os.path.basename(y['path']).lower()))
|
||||
|
||||
entries = [{'current_path': path}]
|
||||
if includeParent and parentPath != path:
|
||||
entries.append({ 'name': "..", 'path': parentPath })
|
||||
entries.append({'name': "..", 'path': parentPath})
|
||||
entries.extend(fileList)
|
||||
|
||||
return entries
|
||||
|
||||
|
||||
class WebFileBrowser:
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, path=''):
|
||||
cherrypy.response.headers['Content-Type'] = "application/json"
|
||||
|
@ -108,4 +109,4 @@ class WebFileBrowser:
|
|||
def complete(self, term):
|
||||
cherrypy.response.headers['Content-Type'] = "application/json"
|
||||
paths = [entry['path'] for entry in foldersAtPath(os.path.dirname(term)) if 'path' in entry]
|
||||
return json.dumps( paths )
|
||||
return json.dumps(paths)
|
||||
|
|
|
@ -25,9 +25,11 @@ import datetime
|
|||
|
||||
from common import USER_AGENT, Quality
|
||||
|
||||
|
||||
class SickBeardURLopener(urllib.FancyURLopener):
|
||||
version = USER_AGENT
|
||||
|
||||
|
||||
class AuthURLOpener(SickBeardURLopener):
|
||||
"""
|
||||
URLOpener class that supports http auth without needing interactive password entry.
|
||||
|
@ -36,13 +38,14 @@ class AuthURLOpener(SickBeardURLopener):
|
|||
user: username to use for HTTP auth
|
||||
pw: password to use for HTTP auth
|
||||
"""
|
||||
|
||||
def __init__(self, user, pw):
|
||||
self.username = user
|
||||
self.password = pw
|
||||
|
||||
# remember if we've tried the username/password before
|
||||
self.numTries = 0
|
||||
|
||||
|
||||
# call the base class
|
||||
urllib.FancyURLopener.__init__(self)
|
||||
|
||||
|
@ -56,7 +59,7 @@ class AuthURLOpener(SickBeardURLopener):
|
|||
if self.numTries == 0:
|
||||
self.numTries = 1
|
||||
return (self.username, self.password)
|
||||
|
||||
|
||||
# if we've tried before then return blank which cancels the request
|
||||
else:
|
||||
return ('', '')
|
||||
|
@ -66,6 +69,7 @@ class AuthURLOpener(SickBeardURLopener):
|
|||
self.numTries = 0
|
||||
return SickBeardURLopener.open(self, url)
|
||||
|
||||
|
||||
class SearchResult:
|
||||
"""
|
||||
Represents a search result from an indexer.
|
||||
|
@ -112,18 +116,21 @@ class SearchResult:
|
|||
def fileName(self):
|
||||
return self.episodes[0].prettyName() + "." + self.resultType
|
||||
|
||||
|
||||
class NZBSearchResult(SearchResult):
|
||||
"""
|
||||
Regular NZB result with an URL to the NZB
|
||||
"""
|
||||
resultType = "nzb"
|
||||
|
||||
|
||||
class NZBDataSearchResult(SearchResult):
|
||||
"""
|
||||
NZB result where the actual NZB XML data is stored in the extraInfo
|
||||
"""
|
||||
resultType = "nzbdata"
|
||||
|
||||
|
||||
class TorrentSearchResult(SearchResult):
|
||||
"""
|
||||
Torrent result with an URL to the torrent
|
||||
|
@ -131,27 +138,47 @@ class TorrentSearchResult(SearchResult):
|
|||
resultType = "torrent"
|
||||
|
||||
|
||||
class AllShowsListUI:
|
||||
"""
|
||||
This class is for tvdb-api. Instead of prompting with a UI to pick the
|
||||
desired result out of a list of shows it tries to be smart about it
|
||||
based on what shows are in SB.
|
||||
"""
|
||||
|
||||
def __init__(self, config, log=None):
|
||||
self.config = config
|
||||
self.log = log
|
||||
|
||||
def selectSeries(self, allSeries):
|
||||
# get all available shows
|
||||
if allSeries:
|
||||
return allSeries
|
||||
|
||||
|
||||
class ShowListUI:
|
||||
"""
|
||||
This class is for tvdb-api. Instead of prompting with a UI to pick the
|
||||
desired result out of a list of shows it tries to be smart about it
|
||||
based on what shows are in SB.
|
||||
"""
|
||||
|
||||
def __init__(self, config, log=None):
|
||||
self.config = config
|
||||
self.log = log
|
||||
|
||||
def selectSeries(self, allSeries):
|
||||
idList = [x.indexerid for x in sickbeard.showList]
|
||||
if sickbeard.showList:
|
||||
idList = [x.indexerid for x in sickbeard.showList]
|
||||
|
||||
# try to pick a show that's in my show list
|
||||
for curShow in allSeries:
|
||||
if int(curShow['id']) in idList:
|
||||
return curShow
|
||||
# try to pick a show that's in my show list
|
||||
for curShow in allSeries:
|
||||
if int(curShow['id']) in idList:
|
||||
return curShow
|
||||
|
||||
# if nothing matches then just go with the first match I guess
|
||||
# if nothing matches then return everything
|
||||
return allSeries[0]
|
||||
|
||||
|
||||
class Proper:
|
||||
def __init__(self, name, url, date):
|
||||
self.name = name
|
||||
|
@ -166,7 +193,8 @@ class Proper:
|
|||
self.episode = -1
|
||||
|
||||
def __str__(self):
|
||||
return str(self.date)+" "+self.name+" "+str(self.season)+"x"+str(self.episode)+" of "+str(self.indexerid+" from "+self.indexer)
|
||||
return str(self.date) + " " + self.name + " " + str(self.season) + "x" + str(self.episode) + " of " + str(
|
||||
self.indexerid) + " from " + str(sickbeard.indexerApi(self.indexer).name)
|
||||
|
||||
|
||||
class ErrorViewer():
|
||||
|
@ -188,10 +216,12 @@ class ErrorViewer():
|
|||
def clear():
|
||||
ErrorViewer.errors = []
|
||||
|
||||
|
||||
class UIError():
|
||||
"""
|
||||
Represents an error to be displayed in the web UI.
|
||||
"""
|
||||
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
self.time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
|
|
@ -21,7 +21,7 @@ __all__ = ['utorrent',
|
|||
'deluge',
|
||||
'download_station',
|
||||
'rtorrent'
|
||||
]
|
||||
]
|
||||
|
||||
import sickbeard
|
||||
|
||||
|
@ -62,24 +62,24 @@ http_error_code = {
|
|||
505: 'HTTP Version Not Supported',
|
||||
}
|
||||
|
||||
default_host = {'utorrent':'http://localhost:8000',
|
||||
'transmission' :'http://localhost:9091',
|
||||
'deluge':'http://localhost:8112',
|
||||
default_host = {'utorrent': 'http://localhost:8000',
|
||||
'transmission': 'http://localhost:9091',
|
||||
'deluge': 'http://localhost:8112',
|
||||
'download_station': 'http://localhost:5000',
|
||||
'rtorrent': 'scgi://localhost:5000',
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def getClientModule(name):
|
||||
|
||||
name = name.lower()
|
||||
prefix = "sickbeard.clients."
|
||||
|
||||
return __import__(prefix+name, fromlist=__all__)
|
||||
|
||||
return __import__(prefix + name, fromlist=__all__)
|
||||
|
||||
|
||||
def getClientIstance(name):
|
||||
|
||||
module = getClientModule(name)
|
||||
className = module.api.__class__.__name__
|
||||
|
||||
|
||||
return getattr(module, className)
|
||||
|
|
@ -23,44 +23,43 @@ import sickbeard
|
|||
from sickbeard import logger
|
||||
from sickbeard.clients.generic import GenericClient
|
||||
|
||||
class DelugeAPI(GenericClient):
|
||||
|
||||
class DelugeAPI(GenericClient):
|
||||
def __init__(self, host=None, username=None, password=None):
|
||||
|
||||
super(DelugeAPI, self).__init__('Deluge', host, username, password)
|
||||
|
||||
|
||||
self.url = self.host + 'json'
|
||||
|
||||
|
||||
def _get_auth(self):
|
||||
|
||||
|
||||
post_data = json.dumps({"method": "auth.login",
|
||||
"params": [self.password],
|
||||
"id": 1
|
||||
})
|
||||
try:
|
||||
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
|
||||
except:
|
||||
return None
|
||||
|
||||
self.auth = self.response.json()["result"]
|
||||
|
||||
|
||||
post_data = json.dumps({"method": "web.connected",
|
||||
"params": [],
|
||||
"id": 10
|
||||
})
|
||||
})
|
||||
try:
|
||||
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
self.auth = self.response.json()["result"]
|
||||
|
||||
post_data = json.dumps({"method": "web.connected",
|
||||
"params": [],
|
||||
"id": 10
|
||||
})
|
||||
try:
|
||||
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
|
||||
except:
|
||||
return None
|
||||
|
||||
connected = self.response.json()['result']
|
||||
|
||||
|
||||
if not connected:
|
||||
post_data = json.dumps({"method": "web.get_hosts",
|
||||
"params": [],
|
||||
"id": 11
|
||||
})
|
||||
})
|
||||
try:
|
||||
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
|
||||
except:
|
||||
|
@ -69,21 +68,20 @@ class DelugeAPI(GenericClient):
|
|||
if len(hosts) == 0:
|
||||
logger.log(self.name + u': WebUI does not contain daemons', logger.ERROR)
|
||||
return None
|
||||
|
||||
|
||||
post_data = json.dumps({"method": "web.connect",
|
||||
"params": [hosts[0][0]],
|
||||
"id": 11
|
||||
})
|
||||
})
|
||||
try:
|
||||
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
post_data = json.dumps({"method": "web.connected",
|
||||
"params": [],
|
||||
"id": 10
|
||||
})
|
||||
})
|
||||
try:
|
||||
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
|
||||
except:
|
||||
|
@ -93,86 +91,90 @@ class DelugeAPI(GenericClient):
|
|||
if not connected:
|
||||
logger.log(self.name + u': WebUI could not connect to daemon', logger.ERROR)
|
||||
return None
|
||||
|
||||
|
||||
return self.auth
|
||||
|
||||
|
||||
def _add_torrent_uri(self, result):
|
||||
|
||||
post_data = json.dumps({"method": "core.add_torrent_magnet",
|
||||
"params": [result.url,{"move_completed": "true", "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
|
||||
"params": [result.url, {"move_completed": "true",
|
||||
"move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
|
||||
"id": 2
|
||||
})
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
|
||||
result.hash = self.response.json()['result']
|
||||
|
||||
|
||||
return self.response.json()['result']
|
||||
|
||||
|
||||
def _add_torrent_file(self, result):
|
||||
|
||||
post_data = json.dumps({"method": "core.add_torrent_file",
|
||||
"params": [result.name + '.torrent', b64encode(result.content),{"move_completed": "true", "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
|
||||
"params": [result.name + '.torrent', b64encode(result.content),
|
||||
{"move_completed": "true",
|
||||
"move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
|
||||
"id": 2
|
||||
})
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
|
||||
result.hash = self.response.json()['result']
|
||||
|
||||
|
||||
return self.response.json()['result']
|
||||
|
||||
|
||||
def _set_torrent_label(self, result):
|
||||
|
||||
|
||||
label = sickbeard.TORRENT_LABEL.lower()
|
||||
if label:
|
||||
# check if label already exists and create it if not
|
||||
post_data = json.dumps({"method": 'label.get_labels',
|
||||
"params": [],
|
||||
"id": 3
|
||||
})
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
labels = self.response.json()['result']
|
||||
|
||||
|
||||
if labels != None:
|
||||
if label not in labels:
|
||||
logger.log(self.name + ': ' + label +u" label does not exist in Deluge we must add it", logger.DEBUG)
|
||||
logger.log(self.name + ': ' + label + u" label does not exist in Deluge we must add it",
|
||||
logger.DEBUG)
|
||||
post_data = json.dumps({"method": 'label.add',
|
||||
"params": [label],
|
||||
"id": 4
|
||||
})
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
logger.log(self.name + ': ' + label +u" label added to Deluge", logger.DEBUG)
|
||||
|
||||
logger.log(self.name + ': ' + label + u" label added to Deluge", logger.DEBUG)
|
||||
|
||||
# add label to torrent
|
||||
post_data = json.dumps({ "method": 'label.set_torrent',
|
||||
"params": [result.hash, label],
|
||||
"id": 5
|
||||
})
|
||||
post_data = json.dumps({"method": 'label.set_torrent',
|
||||
"params": [result.hash, label],
|
||||
"id": 5
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
logger.log(self.name + ': ' + label +u" label added to torrent", logger.DEBUG)
|
||||
logger.log(self.name + ': ' + label + u" label added to torrent", logger.DEBUG)
|
||||
else:
|
||||
logger.log(self.name + ': ' + u"label plugin not detected", logger.DEBUG)
|
||||
return False
|
||||
|
||||
|
||||
return not self.response.json()['error']
|
||||
|
||||
|
||||
|
||||
def _set_torrent_ratio(self, result):
|
||||
|
||||
if sickbeard.TORRENT_RATIO:
|
||||
post_data = json.dumps({"method": "core.set_torrent_stop_at_ratio",
|
||||
"params": [result.hash, True],
|
||||
"id": 5
|
||||
})
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
|
||||
post_data = json.dumps({"method": "core.set_torrent_stop_ratio",
|
||||
"params": [result.hash,float(sickbeard.TORRENT_RATIO)],
|
||||
"params": [result.hash, float(sickbeard.TORRENT_RATIO)],
|
||||
"id": 6
|
||||
})
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
return not self.response.json()['error']
|
||||
|
||||
|
||||
return True
|
||||
|
||||
def _set_torrent_path(self, result):
|
||||
|
@ -181,30 +183,31 @@ class DelugeAPI(GenericClient):
|
|||
post_data = json.dumps({"method": "core.set_torrent_move_completed",
|
||||
"params": [result.hash, True],
|
||||
"id": 7
|
||||
})
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
|
||||
post_data = json.dumps({"method": "core.set_torrent_move_completed_path",
|
||||
"params": [result.hash, sickbeard.TORRENT_PATH],
|
||||
"id": 8
|
||||
})
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
|
||||
return not self.response.json()['error']
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _set_torrent_pause(self, result):
|
||||
|
||||
|
||||
if sickbeard.TORRENT_PAUSED:
|
||||
post_data = json.dumps({"method": "core.pause_torrent",
|
||||
"params": [[result.hash]],
|
||||
"id": 9
|
||||
})
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
return not self.response.json()['error']
|
||||
|
||||
return True
|
||||
|
||||
return True
|
||||
|
||||
|
||||
api = DelugeAPI()
|
|
@ -23,49 +23,50 @@
|
|||
import sickbeard
|
||||
from sickbeard.clients.generic import GenericClient
|
||||
|
||||
|
||||
class DownloadStationAPI(GenericClient):
|
||||
|
||||
def __init__(self, host=None, username=None, password=None):
|
||||
|
||||
|
||||
super(DownloadStationAPI, self).__init__('DownloadStation', host, username, password)
|
||||
|
||||
self.url = self.host + 'webapi/DownloadStation/task.cgi'
|
||||
|
||||
|
||||
def _get_auth(self):
|
||||
|
||||
|
||||
auth_url = self.host + 'webapi/auth.cgi?api=SYNO.API.Auth&version=2&method=login&account=' + self.username + '&passwd=' + self.password + '&session=DownloadStation&format=sid'
|
||||
|
||||
|
||||
try:
|
||||
self.response = self.session.get(auth_url)
|
||||
self.auth = self.response.json()['data']['sid']
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
return self.auth
|
||||
|
||||
|
||||
def _add_torrent_uri(self, result):
|
||||
|
||||
data = {'api':'SYNO.DownloadStation.Task',
|
||||
'version':'1', 'method':'create',
|
||||
'session':'DownloadStation',
|
||||
'_sid':self.auth,
|
||||
'uri':result.url
|
||||
}
|
||||
|
||||
data = {'api': 'SYNO.DownloadStation.Task',
|
||||
'version': '1', 'method': 'create',
|
||||
'session': 'DownloadStation',
|
||||
'_sid': self.auth,
|
||||
'uri': result.url
|
||||
}
|
||||
self._request(method='post', data=data)
|
||||
|
||||
|
||||
return self.response.json()['success']
|
||||
|
||||
|
||||
def _add_torrent_file(self, result):
|
||||
|
||||
data = {'api':'SYNO.DownloadStation.Task',
|
||||
'version':'1',
|
||||
'method':'create',
|
||||
'session':'DownloadStation',
|
||||
'_sid':self.auth
|
||||
}
|
||||
files = {'file':(result.name + '.torrent', result.content)}
|
||||
data = {'api': 'SYNO.DownloadStation.Task',
|
||||
'version': '1',
|
||||
'method': 'create',
|
||||
'session': 'DownloadStation',
|
||||
'_sid': self.auth
|
||||
}
|
||||
files = {'file': (result.name + '.torrent', result.content)}
|
||||
self._request(method='post', data=data, files=files)
|
||||
|
||||
|
||||
return self.response.json()['success']
|
||||
|
||||
|
||||
api = DownloadStationAPI()
|
||||
|
|
|
@ -10,36 +10,40 @@ from sickbeard.clients import http_error_code
|
|||
from lib.bencode import bencode, bdecode
|
||||
from lib import requests
|
||||
|
||||
|
||||
class GenericClient(object):
|
||||
|
||||
def __init__(self, name, host=None, username=None, password=None):
|
||||
|
||||
self.name = name
|
||||
self.username = sickbeard.TORRENT_USERNAME if username is None else username
|
||||
self.password = sickbeard.TORRENT_PASSWORD if password is None else password
|
||||
self.host = sickbeard.TORRENT_HOST if host is None else host
|
||||
|
||||
|
||||
self.url = None
|
||||
self.response = None
|
||||
self.auth = None
|
||||
self.last_time = time.time()
|
||||
self.session = requests.session()
|
||||
self.session.auth = (self.username, self.password)
|
||||
|
||||
|
||||
def _request(self, method='get', params={}, data=None, files=None):
|
||||
|
||||
if time.time() > self.last_time + 1800 or not self.auth:
|
||||
self.last_time = time.time()
|
||||
self._get_auth()
|
||||
|
||||
logger.log(self.name + u': Requested a ' + method.upper() + ' connection to url '+ self.url + ' with Params= ' + str(params) + ' Data=' + str(data if data else 'None')[0:99] + ('...' if len(data if data else 'None') > 200 else ''), logger.DEBUG)
|
||||
|
||||
|
||||
logger.log(
|
||||
self.name + u': Requested a ' + method.upper() + ' connection to url ' + self.url + ' with Params= ' + str(
|
||||
params) + ' Data=' + str(data if data else 'None')[0:99] + (
|
||||
'...' if len(data if data else 'None') > 200 else ''), logger.DEBUG)
|
||||
|
||||
if not self.auth:
|
||||
logger.log(self.name + u': Authentication Failed' , logger.ERROR)
|
||||
logger.log(self.name + u': Authentication Failed', logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
try:
|
||||
self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files, timeout=10, verify=False)
|
||||
self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files,
|
||||
timeout=10, verify=False)
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
logger.log(self.name + u': Unable to connect ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
@ -53,19 +57,20 @@ class GenericClient(object):
|
|||
logger.log(self.name + u': Connection Timeout ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
except Exception, e:
|
||||
logger.log(self.name + u': Unknown exception raised when send torrent to ' + self.name + ': ' + ex(e), logger.ERROR)
|
||||
logger.log(self.name + u': Unknown exception raised when send torrent to ' + self.name + ': ' + ex(e),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
if self.response.status_code == 401:
|
||||
logger.log(self.name + u': Invalid Username or Password, check your config', logger.ERROR)
|
||||
logger.log(self.name + u': Invalid Username or Password, check your config', logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
if self.response.status_code in http_error_code.keys():
|
||||
logger.log(self.name + u': ' + http_error_code[self.response.status_code], logger.DEBUG)
|
||||
return False
|
||||
|
||||
logger.log(self.name + u': Response to '+ method.upper() + ' request is ' + self.response.text, logger.DEBUG)
|
||||
|
||||
|
||||
logger.log(self.name + u': Response to ' + method.upper() + ' request is ' + self.response.text, logger.DEBUG)
|
||||
|
||||
return True
|
||||
|
||||
def _get_auth(self):
|
||||
|
@ -73,20 +78,20 @@ class GenericClient(object):
|
|||
This should be overridden and should return the auth_id needed for the client
|
||||
"""
|
||||
return None
|
||||
|
||||
|
||||
def _add_torrent_uri(self, result):
|
||||
"""
|
||||
This should be overridden should return the True/False from the client
|
||||
when a torrent is added via url (magnet or .torrent link)
|
||||
"""
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
def _add_torrent_file(self, result):
|
||||
"""
|
||||
This should be overridden should return the True/False from the client
|
||||
when a torrent is added via result.content (only .torrent file)
|
||||
"""
|
||||
return False
|
||||
return False
|
||||
|
||||
def _set_torrent_label(self, result):
|
||||
"""
|
||||
|
@ -94,7 +99,7 @@ class GenericClient(object):
|
|||
when a torrent is set with label
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
def _set_torrent_ratio(self, result):
|
||||
"""
|
||||
This should be overridden should return the True/False from the client
|
||||
|
@ -113,18 +118,18 @@ class GenericClient(object):
|
|||
"""
|
||||
This should be overridden should return the True/False from the client
|
||||
when a torrent is set with path
|
||||
"""
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
def _set_torrent_pause(self, result):
|
||||
"""
|
||||
This should be overridden should return the True/False from the client
|
||||
when a torrent is set with pause
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
def _get_torrent_hash(self, result):
|
||||
|
||||
|
||||
if result.url.startswith('magnet'):
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0]
|
||||
if len(torrent_hash) == 32:
|
||||
|
@ -134,38 +139,38 @@ class GenericClient(object):
|
|||
torrent_hash = sha1(bencode(info)).hexdigest()
|
||||
|
||||
return torrent_hash
|
||||
|
||||
|
||||
def sendTORRENT(self, result):
|
||||
|
||||
|
||||
r_code = False
|
||||
|
||||
logger.log(u'Calling ' + self.name + ' Client', logger.DEBUG)
|
||||
|
||||
if not self._get_auth():
|
||||
logger.log(self.name + u': Authentication Failed' , logger.ERROR)
|
||||
logger.log(self.name + u': Authentication Failed', logger.ERROR)
|
||||
return r_code
|
||||
|
||||
|
||||
try:
|
||||
|
||||
result.hash = self._get_torrent_hash(result)
|
||||
|
||||
|
||||
if result.url.startswith('magnet'):
|
||||
r_code = self._add_torrent_uri(result)
|
||||
else:
|
||||
r_code = self._add_torrent_file(result)
|
||||
|
||||
|
||||
if not r_code:
|
||||
return False
|
||||
|
||||
|
||||
if not self._set_torrent_pause(result):
|
||||
logger.log(self.name + u': Unable to set the pause for Torrent', logger.ERROR)
|
||||
|
||||
|
||||
if not self._set_torrent_label(result):
|
||||
logger.log(self.name + u': Unable to set the label for Torrent', logger.ERROR)
|
||||
|
||||
|
||||
if not self._set_torrent_ratio(result):
|
||||
logger.log(self.name + u': Unable to set the ratio for Torrent', logger.ERROR)
|
||||
|
||||
|
||||
if not self._set_torrent_path(result):
|
||||
logger.log(self.name + u': Unable to set the path for Torrent', logger.ERROR)
|
||||
|
||||
|
@ -176,7 +181,7 @@ class GenericClient(object):
|
|||
logger.log(self.name + u': Failed Sending Torrent ', logger.ERROR)
|
||||
logger.log(self.name + u': Exception raised when sending torrent: ' + ex(e), logger.DEBUG)
|
||||
return r_code
|
||||
|
||||
|
||||
return r_code
|
||||
|
||||
def testAuthentication(self):
|
||||
|
@ -186,16 +191,16 @@ class GenericClient(object):
|
|||
except requests.exceptions.ConnectionError, e:
|
||||
return False, 'Error: ' + self.name + ' Connection Error'
|
||||
except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL):
|
||||
return False,'Error: Invalid ' + self.name + ' host'
|
||||
return False, 'Error: Invalid ' + self.name + ' host'
|
||||
|
||||
if self.response.status_code == 401:
|
||||
return False, 'Error: Invalid ' + self.name + ' Username or Password, check your config!'
|
||||
|
||||
try:
|
||||
self._get_auth()
|
||||
if self.response.status_code == 200 and self.auth:
|
||||
return True, 'Success: Connected and Authenticated'
|
||||
else:
|
||||
return False, 'Error: Unable to get ' + self.name + ' Authentication, check your config!'
|
||||
except Exception:
|
||||
return False, 'Error: Unable to connect to '+ self.name
|
||||
if self.response.status_code == 401:
|
||||
return False, 'Error: Invalid ' + self.name + ' Username or Password, check your config!'
|
||||
|
||||
try:
|
||||
self._get_auth()
|
||||
if self.response.status_code == 200 and self.auth:
|
||||
return True, 'Success: Connected and Authenticated'
|
||||
else:
|
||||
return False, 'Error: Unable to get ' + self.name + ' Authentication, check your config!'
|
||||
except Exception:
|
||||
return False, 'Error: Unable to connect to ' + self.name
|
||||
|
|
|
@ -23,8 +23,8 @@ from sickbeard.clients.generic import GenericClient
|
|||
from lib.rtorrent import RTorrent
|
||||
from lib.rtorrent.err import MethodError
|
||||
|
||||
class rTorrentAPI(GenericClient):
|
||||
|
||||
class rTorrentAPI(GenericClient):
|
||||
def __init__(self, host=None, username=None, password=None):
|
||||
super(rTorrentAPI, self).__init__('rTorrent', host, username, password)
|
||||
|
||||
|
@ -112,8 +112,8 @@ class rTorrentAPI(GenericClient):
|
|||
if not result:
|
||||
return False
|
||||
|
||||
# group_name = 'sb_test'.lower() ##### Use provider instead of _test
|
||||
# if not self._set_torrent_ratio(group_name):
|
||||
# group_name = 'sb_test'.lower() ##### Use provider instead of _test
|
||||
# if not self._set_torrent_ratio(group_name):
|
||||
# return False
|
||||
|
||||
# Send request to rTorrent
|
||||
|
@ -145,36 +145,36 @@ class rTorrentAPI(GenericClient):
|
|||
def _set_torrent_ratio(self, name):
|
||||
|
||||
# if not name:
|
||||
# return False
|
||||
# return False
|
||||
#
|
||||
# if not self.auth:
|
||||
# return False
|
||||
# return False
|
||||
#
|
||||
# views = self.auth.get_views()
|
||||
#
|
||||
# if name not in views:
|
||||
# self.auth.create_group(name)
|
||||
# self.auth.create_group(name)
|
||||
|
||||
# group = self.auth.get_group(name)
|
||||
|
||||
# ratio = int(float(sickbeard.TORRENT_RATIO) * 100)
|
||||
#
|
||||
# try:
|
||||
# if ratio > 0:
|
||||
#
|
||||
# # Explicitly set all group options to ensure it is setup correctly
|
||||
# group.set_upload('1M')
|
||||
# group.set_min(ratio)
|
||||
# group.set_max(ratio)
|
||||
# group.set_command('d.stop')
|
||||
# group.enable()
|
||||
# else:
|
||||
# # Reset group action and disable it
|
||||
# group.set_command()
|
||||
# group.disable()
|
||||
# if ratio > 0:
|
||||
#
|
||||
# # Explicitly set all group options to ensure it is setup correctly
|
||||
# group.set_upload('1M')
|
||||
# group.set_min(ratio)
|
||||
# group.set_max(ratio)
|
||||
# group.set_command('d.stop')
|
||||
# group.enable()
|
||||
# else:
|
||||
# # Reset group action and disable it
|
||||
# group.set_command()
|
||||
# group.disable()
|
||||
#
|
||||
# except:
|
||||
# return False
|
||||
# return False
|
||||
|
||||
return True
|
||||
|
||||
|
@ -187,6 +187,7 @@ class rTorrentAPI(GenericClient):
|
|||
else:
|
||||
return False, 'Error: Unable to get ' + self.name + ' Authentication, check your config!'
|
||||
except Exception:
|
||||
return False, 'Error: Unable to connect to '+ self.name
|
||||
return False, 'Error: Unable to connect to ' + self.name
|
||||
|
||||
|
||||
api = rTorrentAPI()
|
||||
|
|
|
@ -23,91 +23,91 @@ from base64 import b64encode
|
|||
import sickbeard
|
||||
from sickbeard.clients.generic import GenericClient
|
||||
|
||||
|
||||
class TransmissionAPI(GenericClient):
|
||||
|
||||
def __init__(self, host=None, username=None, password=None):
|
||||
|
||||
|
||||
super(TransmissionAPI, self).__init__('Transmission', host, username, password)
|
||||
|
||||
|
||||
self.url = self.host + 'transmission/rpc'
|
||||
|
||||
def _get_auth(self):
|
||||
|
||||
post_data = json.dumps({'method': 'session-get',})
|
||||
post_data = json.dumps({'method': 'session-get', })
|
||||
|
||||
try:
|
||||
try:
|
||||
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
|
||||
self.auth = re.search('X-Transmission-Session-Id:\s*(\w+)', self.response.text).group(1)
|
||||
except:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
self.session.headers.update({'x-transmission-session-id': self.auth})
|
||||
|
||||
|
||||
#Validating Transmission authorization
|
||||
post_data = json.dumps({'arguments': {},
|
||||
'method': 'session-get',
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
return self.auth
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
return self.auth
|
||||
|
||||
def _add_torrent_uri(self, result):
|
||||
|
||||
arguments = { 'filename': result.url,
|
||||
'paused': 1 if sickbeard.TORRENT_PAUSED else 0,
|
||||
'download-dir': sickbeard.TORRENT_PATH
|
||||
}
|
||||
post_data = json.dumps({ 'arguments': arguments,
|
||||
'method': 'torrent-add',
|
||||
})
|
||||
arguments = {'filename': result.url,
|
||||
'paused': 1 if sickbeard.TORRENT_PAUSED else 0,
|
||||
'download-dir': sickbeard.TORRENT_PATH
|
||||
}
|
||||
post_data = json.dumps({'arguments': arguments,
|
||||
'method': 'torrent-add',
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
return self.response.json()['result'] == "success"
|
||||
|
||||
def _add_torrent_file(self, result):
|
||||
|
||||
arguments = { 'metainfo': b64encode(result.content),
|
||||
'paused': 1 if sickbeard.TORRENT_PAUSED else 0,
|
||||
'download-dir': sickbeard.TORRENT_PATH
|
||||
}
|
||||
arguments = {'metainfo': b64encode(result.content),
|
||||
'paused': 1 if sickbeard.TORRENT_PAUSED else 0,
|
||||
'download-dir': sickbeard.TORRENT_PATH
|
||||
}
|
||||
post_data = json.dumps({'arguments': arguments,
|
||||
'method': 'torrent-add',
|
||||
})
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
|
||||
return self.response.json()['result'] == "success"
|
||||
|
||||
def _set_torrent_ratio(self, result):
|
||||
|
||||
|
||||
torrent_id = self._get_torrent_hash(result)
|
||||
|
||||
|
||||
if sickbeard.TORRENT_RATIO == '':
|
||||
# Use global settings
|
||||
ratio = None
|
||||
mode = 0
|
||||
elif float(sickbeard.TORRENT_RATIO) == 0:
|
||||
ratio = 0
|
||||
mode = 2
|
||||
mode = 2
|
||||
elif float(sickbeard.TORRENT_RATIO) > 0:
|
||||
ratio = float(sickbeard.TORRENT_RATIO)
|
||||
mode = 1 # Stop seeding at seedRatioLimit
|
||||
mode = 1 # Stop seeding at seedRatioLimit
|
||||
|
||||
arguments = { 'ids': [torrent_id],
|
||||
'seedRatioLimit': ratio,
|
||||
'seedRatioMode': mode
|
||||
}
|
||||
arguments = {'ids': [torrent_id],
|
||||
'seedRatioLimit': ratio,
|
||||
'seedRatioMode': mode
|
||||
}
|
||||
post_data = json.dumps({'arguments': arguments,
|
||||
'method': 'torrent-set',
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
return self.response.json()['result'] == "success"
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
return self.response.json()['result'] == "success"
|
||||
|
||||
def _set_torrent_priority(self, result):
|
||||
|
||||
torrent_id = self._get_torrent_hash(result)
|
||||
|
||||
arguments = { 'ids': [torrent_id]}
|
||||
arguments = {'ids': [torrent_id]}
|
||||
|
||||
if result.priority == -1:
|
||||
arguments['priority-low'] = []
|
||||
|
@ -120,13 +120,13 @@ class TransmissionAPI(GenericClient):
|
|||
arguments['bandwidthPriority'] = 1
|
||||
else:
|
||||
arguments['priority-normal'] = []
|
||||
|
||||
|
||||
post_data = json.dumps({'arguments': arguments,
|
||||
'method': 'torrent-set',
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
return self.response.json()['result'] == "success"
|
||||
|
||||
})
|
||||
self._request(method='post', data=post_data)
|
||||
|
||||
return self.response.json()['result'] == "success"
|
||||
|
||||
|
||||
api = TransmissionAPI()
|
||||
|
|
|
@ -21,56 +21,57 @@ import re
|
|||
import sickbeard
|
||||
from sickbeard.clients.generic import GenericClient
|
||||
|
||||
|
||||
class uTorrentAPI(GenericClient):
|
||||
|
||||
def __init__(self, host=None, username=None, password=None):
|
||||
|
||||
|
||||
super(uTorrentAPI, self).__init__('uTorrent', host, username, password)
|
||||
|
||||
|
||||
self.url = self.host + 'gui/'
|
||||
|
||||
|
||||
def _request(self, method='get', params={}, files=None):
|
||||
|
||||
params.update({'token':self.auth})
|
||||
params.update({'token': self.auth})
|
||||
return super(uTorrentAPI, self)._request(method=method, params=params, files=files)
|
||||
|
||||
def _get_auth(self):
|
||||
|
||||
try:
|
||||
try:
|
||||
self.response = self.session.get(self.url + 'token.html')
|
||||
self.auth = re.findall("<div.*?>(.*?)</", self.response.text)[0]
|
||||
except:
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
return self.auth if not self.response.status_code == 404 else None
|
||||
|
||||
|
||||
def _add_torrent_uri(self, result):
|
||||
|
||||
params={'action':'add-url', 's': result.url}
|
||||
params = {'action': 'add-url', 's': result.url}
|
||||
return self._request(params=params)
|
||||
|
||||
def _add_torrent_file(self, result):
|
||||
|
||||
params = {'action':'add-file'}
|
||||
files={'torrent_file': (result.name + '.torrent', result.content)}
|
||||
return self._request(method='post', params=params, files=files)
|
||||
params = {'action': 'add-file'}
|
||||
files = {'torrent_file': (result.name + '.torrent', result.content)}
|
||||
return self._request(method='post', params=params, files=files)
|
||||
|
||||
def _set_torrent_label(self, result):
|
||||
|
||||
params = {'action':'setprops',
|
||||
'hash':result.hash,
|
||||
's':'label',
|
||||
'v':sickbeard.TORRENT_LABEL
|
||||
}
|
||||
|
||||
params = {'action': 'setprops',
|
||||
'hash': result.hash,
|
||||
's': 'label',
|
||||
'v': sickbeard.TORRENT_LABEL
|
||||
}
|
||||
return self._request(params=params)
|
||||
|
||||
|
||||
def _set_torrent_pause(self, result):
|
||||
|
||||
if sickbeard.TORRENT_PAUSED:
|
||||
params = {'action':'pause', 'hash':result.hash}
|
||||
params = {'action': 'pause', 'hash': result.hash}
|
||||
else:
|
||||
params = {'action':'start', 'hash':result.hash}
|
||||
|
||||
params = {'action': 'start', 'hash': result.hash}
|
||||
|
||||
return self._request(params=params)
|
||||
|
||||
|
||||
|
||||
api = uTorrentAPI()
|
|
@ -26,8 +26,8 @@ from sickbeard import version
|
|||
|
||||
INSTANCE_ID = str(uuid.uuid1())
|
||||
USER_AGENT = ('Sick Beard/' + version.SICKBEARD_VERSION.replace(' ', '-') +
|
||||
' (' + platform.system() + '; ' + platform.release() +
|
||||
'; ' + INSTANCE_ID + ')')
|
||||
' (' + platform.system() + '; ' + platform.release() +
|
||||
'; ' + INSTANCE_ID + ')')
|
||||
|
||||
mediaExtensions = ['avi', 'mkv', 'mpg', 'mpeg', 'wmv',
|
||||
'ogm', 'mp4', 'iso', 'img', 'divx',
|
||||
|
@ -52,18 +52,18 @@ notifyStrings[NOTIFY_DOWNLOAD] = "Download Finished"
|
|||
notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD] = "Subtitle Download Finished"
|
||||
|
||||
### Episode statuses
|
||||
UNKNOWN = -1 # should never happen
|
||||
UNAIRED = 1 # episodes that haven't aired yet
|
||||
SNATCHED = 2 # qualified with quality
|
||||
WANTED = 3 # episodes we don't have but want to get
|
||||
DOWNLOADED = 4 # qualified with quality
|
||||
SKIPPED = 5 # episodes we don't want
|
||||
ARCHIVED = 6 # episodes that you don't have locally (counts toward download completion stats)
|
||||
IGNORED = 7 # episodes that you don't want included in your download stats
|
||||
SNATCHED_PROPER = 9 # qualified with quality
|
||||
SUBTITLED = 10 # qualified with quality
|
||||
FAILED = 11 #episode downloaded or snatched we don't want
|
||||
SNATCHED_BEST = 12 # episode redownloaded using best quality
|
||||
UNKNOWN = -1 # should never happen
|
||||
UNAIRED = 1 # episodes that haven't aired yet
|
||||
SNATCHED = 2 # qualified with quality
|
||||
WANTED = 3 # episodes we don't have but want to get
|
||||
DOWNLOADED = 4 # qualified with quality
|
||||
SKIPPED = 5 # episodes we don't want
|
||||
ARCHIVED = 6 # episodes that you don't have locally (counts toward download completion stats)
|
||||
IGNORED = 7 # episodes that you don't want included in your download stats
|
||||
SNATCHED_PROPER = 9 # qualified with quality
|
||||
SUBTITLED = 10 # qualified with quality
|
||||
FAILED = 11 #episode downloaded or snatched we don't want
|
||||
SNATCHED_BEST = 12 # episode redownloaded using best quality
|
||||
|
||||
NAMING_REPEAT = 1
|
||||
NAMING_EXTEND = 2
|
||||
|
@ -80,28 +80,21 @@ multiEpStrings[NAMING_EXTEND] = "Extend"
|
|||
multiEpStrings[NAMING_LIMITED_EXTEND] = "Extend (Limited)"
|
||||
multiEpStrings[NAMING_LIMITED_EXTEND_E_PREFIXED] = "Extend (Limited, E-prefixed)"
|
||||
|
||||
### Notification Types
|
||||
INDEXER_TVDB = "Tvdb"
|
||||
INDEXER_TVRAGE = "TVRage"
|
||||
|
||||
indexerStrings = {}
|
||||
indexerStrings[INDEXER_TVDB] = "TheTVDB"
|
||||
indexerStrings[INDEXER_TVRAGE] = "TVRage"
|
||||
|
||||
class Quality:
|
||||
NONE = 0 # 0
|
||||
SDTV = 1 # 1
|
||||
SDDVD = 1 << 1 # 2
|
||||
HDTV = 1 << 2 # 4
|
||||
RAWHDTV = 1 << 3 # 8 -- 720p/1080i mpeg2 (trollhd releases)
|
||||
FULLHDTV = 1 << 4 # 16 -- 1080p HDTV (QCF releases)
|
||||
HDWEBDL = 1 << 5 # 32
|
||||
NONE = 0 # 0
|
||||
SDTV = 1 # 1
|
||||
SDDVD = 1 << 1 # 2
|
||||
HDTV = 1 << 2 # 4
|
||||
RAWHDTV = 1 << 3 # 8 -- 720p/1080i mpeg2 (trollhd releases)
|
||||
FULLHDTV = 1 << 4 # 16 -- 1080p HDTV (QCF releases)
|
||||
HDWEBDL = 1 << 5 # 32
|
||||
FULLHDWEBDL = 1 << 6 # 64 -- 1080p web-dl
|
||||
HDBLURAY = 1 << 7 # 128
|
||||
FULLHDBLURAY = 1 << 8 # 256
|
||||
HDBLURAY = 1 << 7 # 128
|
||||
FULLHDBLURAY = 1 << 8 # 256
|
||||
|
||||
# put these bits at the other end of the spectrum, far enough out that they shouldn't interfere
|
||||
UNKNOWN = 1 << 15 # 32768
|
||||
UNKNOWN = 1 << 15 # 32768
|
||||
|
||||
qualityStrings = {NONE: "N/A",
|
||||
UNKNOWN: "Unknown",
|
||||
|
@ -125,7 +118,8 @@ class Quality:
|
|||
def _getStatusStrings(status):
|
||||
toReturn = {}
|
||||
for x in Quality.qualityStrings.keys():
|
||||
toReturn[Quality.compositeStatus(status, x)] = Quality.statusPrefixes[status] + " (" + Quality.qualityStrings[x] + ")"
|
||||
toReturn[Quality.compositeStatus(status, x)] = Quality.statusPrefixes[status] + " (" + \
|
||||
Quality.qualityStrings[x] + ")"
|
||||
return toReturn
|
||||
|
||||
@staticmethod
|
||||
|
@ -156,21 +150,21 @@ class Quality:
|
|||
Return The quality from an episode File renamed by Sickbeard
|
||||
If no quality is achieved it will try sceneQuality regex
|
||||
"""
|
||||
|
||||
|
||||
name = os.path.basename(name)
|
||||
|
||||
# if we have our exact text then assume we put it there
|
||||
for x in sorted(Quality.qualityStrings.keys(), reverse=True):
|
||||
if x == Quality.UNKNOWN:
|
||||
continue
|
||||
if x == Quality.NONE: #Last chance
|
||||
if x == Quality.NONE: #Last chance
|
||||
return Quality.sceneQuality(name)
|
||||
|
||||
regex = '\W' + Quality.qualityStrings[x].replace(' ','\W') + '\W'
|
||||
|
||||
regex = '\W' + Quality.qualityStrings[x].replace(' ', '\W') + '\W'
|
||||
regex_match = re.search(regex, name, re.I)
|
||||
if regex_match:
|
||||
return x
|
||||
|
||||
|
||||
@staticmethod
|
||||
def sceneQuality(name):
|
||||
"""
|
||||
|
@ -187,22 +181,23 @@ class Quality:
|
|||
return Quality.SDTV
|
||||
elif checkName(["(dvdrip|b[r|d]rip)(.ws)?.(xvid|divx|x264)"], any) and not checkName(["(720|1080)[pi]"], all):
|
||||
return Quality.SDDVD
|
||||
elif checkName(["720p", "hdtv", "x264"], all) or checkName(["hr.ws.pdtv.x264"], any) and not checkName(["(1080)[pi]"], all):
|
||||
return Quality.HDTV
|
||||
elif checkName(["720p", "hdtv", "x264"], all) or checkName(["hr.ws.pdtv.x264"], any) and not checkName(
|
||||
["(1080)[pi]"], all):
|
||||
return Quality.HDTV
|
||||
elif checkName(["720p|1080i", "hdtv", "mpeg-?2"], all) or checkName(["1080i.hdtv", "h.?264"], all):
|
||||
return Quality.RAWHDTV
|
||||
elif checkName(["1080p", "hdtv", "x264"], all):
|
||||
return Quality.FULLHDTV
|
||||
return Quality.RAWHDTV
|
||||
elif checkName(["1080p", "hdtv", "x264"], all):
|
||||
return Quality.FULLHDTV
|
||||
elif checkName(["720p", "web.dl", "h.?264"], all) or checkName(["720p", "itunes", "h.?264"], all):
|
||||
return Quality.HDWEBDL
|
||||
elif checkName(["1080p", "web.dl", "h.?264"], all) or checkName(["1080p", "itunes", "h.?264"], all):
|
||||
return Quality.FULLHDWEBDL
|
||||
return Quality.HDWEBDL
|
||||
elif checkName(["1080p", "web.dl", "h.?264"], all) or checkName(["1080p", "itunes", "h.?264"], all):
|
||||
return Quality.FULLHDWEBDL
|
||||
elif checkName(["720p", "webrip", "x264"], all):
|
||||
return Quality.HDWEBDL
|
||||
return Quality.HDWEBDL
|
||||
elif checkName(["1080p", "webrip", "x264"], all):
|
||||
return Quality.FULLHDWEBDL
|
||||
return Quality.FULLHDWEBDL
|
||||
elif checkName(["720p", "bluray|hddvd|b[r|d]rip", "x264"], all):
|
||||
return Quality.HDBLURAY
|
||||
return Quality.HDBLURAY
|
||||
elif checkName(["1080p", "bluray|hddvd|b[r|d]rip", "x264"], all):
|
||||
return Quality.FULLHDBLURAY
|
||||
else:
|
||||
|
@ -212,8 +207,8 @@ class Quality:
|
|||
def assumeQuality(name):
|
||||
if name.lower().endswith((".avi", ".mp4")):
|
||||
return Quality.SDTV
|
||||
# elif name.lower().endswith(".mkv"):
|
||||
# return Quality.HDTV
|
||||
# elif name.lower().endswith(".mkv"):
|
||||
# return Quality.HDTV
|
||||
elif name.lower().endswith(".ts"):
|
||||
return Quality.RAWHDTV
|
||||
else:
|
||||
|
@ -232,7 +227,7 @@ class Quality:
|
|||
"""Returns a tuple containing (status, quality)"""
|
||||
if status == UNKNOWN:
|
||||
return (UNKNOWN, Quality.UNKNOWN)
|
||||
|
||||
|
||||
for x in sorted(Quality.qualityStrings.keys(), reverse=True):
|
||||
if status > x * 100:
|
||||
return (status - x * 100, x)
|
||||
|
@ -252,18 +247,23 @@ class Quality:
|
|||
FAILED = None
|
||||
SNATCHED_BEST = None
|
||||
|
||||
|
||||
Quality.DOWNLOADED = [Quality.compositeStatus(DOWNLOADED, x) for x in Quality.qualityStrings.keys()]
|
||||
Quality.SNATCHED = [Quality.compositeStatus(SNATCHED, x) for x in Quality.qualityStrings.keys()]
|
||||
Quality.SNATCHED_PROPER = [Quality.compositeStatus(SNATCHED_PROPER, x) for x in Quality.qualityStrings.keys()]
|
||||
Quality.FAILED = [Quality.compositeStatus(FAILED, x) for x in Quality.qualityStrings.keys()]
|
||||
Quality.SNATCHED_BEST = [Quality.compositeStatus(SNATCHED_BEST, x) for x in Quality.qualityStrings.keys()]
|
||||
|
||||
SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], [])
|
||||
HD = Quality.combineQualities([Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY], []) # HD720p + HD1080p
|
||||
HD720p = Quality.combineQualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], [])
|
||||
HD1080p = Quality.combineQualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], [])
|
||||
ANY = Quality.combineQualities([Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD
|
||||
|
||||
SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], [])
|
||||
HD = Quality.combineQualities(
|
||||
[Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY],
|
||||
[]) # HD720p + HD1080p
|
||||
HD720p = Quality.combineQualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], [])
|
||||
HD1080p = Quality.combineQualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], [])
|
||||
ANY = Quality.combineQualities(
|
||||
[Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
|
||||
Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD
|
||||
|
||||
# legacy template, cant remove due to reference in mainDB upgrade?
|
||||
BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
|
||||
|
||||
|
@ -274,6 +274,7 @@ qualityPresetStrings = {SD: "SD",
|
|||
HD1080p: "HD1080p",
|
||||
ANY: "Any"}
|
||||
|
||||
|
||||
class StatusStrings:
|
||||
def __init__(self):
|
||||
self.statusStrings = {UNKNOWN: "Unknown",
|
||||
|
@ -302,18 +303,20 @@ class StatusStrings:
|
|||
def has_key(self, name):
|
||||
return name in self.statusStrings or name in Quality.DOWNLOADED or name in Quality.SNATCHED or name in Quality.SNATCHED_PROPER or name in Quality.SNATCHED_BEST
|
||||
|
||||
|
||||
statusStrings = StatusStrings()
|
||||
|
||||
|
||||
class Overview:
|
||||
UNAIRED = UNAIRED # 1
|
||||
UNAIRED = UNAIRED # 1
|
||||
QUAL = 2
|
||||
WANTED = WANTED # 3
|
||||
WANTED = WANTED # 3
|
||||
GOOD = 4
|
||||
SKIPPED = SKIPPED # 5
|
||||
|
||||
SKIPPED = SKIPPED # 5
|
||||
|
||||
# For both snatched statuses. Note: SNATCHED/QUAL have same value and break dict.
|
||||
SNATCHED = SNATCHED_PROPER = SNATCHED_BEST # 9
|
||||
|
||||
|
||||
overviewStrings = {SKIPPED: "skipped",
|
||||
WANTED: "wanted",
|
||||
QUAL: "qual",
|
||||
|
@ -325,9 +328,8 @@ class Overview:
|
|||
XML_NSMAP = {'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
|
||||
'xsd': 'http://www.w3.org/2001/XMLSchema'}
|
||||
|
||||
|
||||
countryList = {'Australia': 'AU',
|
||||
'Canada': 'CA',
|
||||
'USA': 'US'
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,9 +29,9 @@ from sickbeard import db
|
|||
import sickbeard
|
||||
|
||||
naming_ep_type = ("%(seasonnumber)dx%(episodenumber)02d",
|
||||
"s%(seasonnumber)02de%(episodenumber)02d",
|
||||
"S%(seasonnumber)02dE%(episodenumber)02d",
|
||||
"%(seasonnumber)02dx%(episodenumber)02d")
|
||||
"s%(seasonnumber)02de%(episodenumber)02d",
|
||||
"S%(seasonnumber)02dE%(episodenumber)02d",
|
||||
"%(seasonnumber)02dx%(episodenumber)02d")
|
||||
naming_ep_type_text = ("1x02", "s01e02", "S01E02", "01x02")
|
||||
|
||||
naming_multi_ep_type = {0: ["-%(episodenumber)02d"] * len(naming_ep_type),
|
||||
|
@ -44,7 +44,6 @@ naming_sep_type_text = (" - ", "space")
|
|||
|
||||
|
||||
def change_HTTPS_CERT(https_cert):
|
||||
|
||||
if https_cert == '':
|
||||
sickbeard.HTTPS_CERT = ''
|
||||
return True
|
||||
|
@ -60,7 +59,6 @@ def change_HTTPS_CERT(https_cert):
|
|||
|
||||
|
||||
def change_HTTPS_KEY(https_key):
|
||||
|
||||
if https_key == '':
|
||||
sickbeard.HTTPS_KEY = ''
|
||||
return True
|
||||
|
@ -76,7 +74,6 @@ def change_HTTPS_KEY(https_key):
|
|||
|
||||
|
||||
def change_LOG_DIR(log_dir, web_log):
|
||||
|
||||
log_dir_changed = False
|
||||
abs_log_dir = os.path.normpath(os.path.join(sickbeard.DATA_DIR, log_dir))
|
||||
web_log_value = checkbox_to_value(web_log)
|
||||
|
@ -110,7 +107,6 @@ def change_LOG_DIR(log_dir, web_log):
|
|||
|
||||
|
||||
def change_NZB_DIR(nzb_dir):
|
||||
|
||||
if nzb_dir == '':
|
||||
sickbeard.NZB_DIR = ''
|
||||
return True
|
||||
|
@ -126,7 +122,6 @@ def change_NZB_DIR(nzb_dir):
|
|||
|
||||
|
||||
def change_TORRENT_DIR(torrent_dir):
|
||||
|
||||
if torrent_dir == '':
|
||||
sickbeard.TORRENT_DIR = ''
|
||||
return True
|
||||
|
@ -142,7 +137,6 @@ def change_TORRENT_DIR(torrent_dir):
|
|||
|
||||
|
||||
def change_TV_DOWNLOAD_DIR(tv_download_dir):
|
||||
|
||||
if tv_download_dir == '':
|
||||
sickbeard.TV_DOWNLOAD_DIR = ''
|
||||
return True
|
||||
|
@ -158,7 +152,6 @@ def change_TV_DOWNLOAD_DIR(tv_download_dir):
|
|||
|
||||
|
||||
def change_SEARCH_FREQUENCY(freq):
|
||||
|
||||
sickbeard.SEARCH_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_SEARCH_FREQUENCY)
|
||||
|
||||
if sickbeard.SEARCH_FREQUENCY < sickbeard.MIN_SEARCH_FREQUENCY:
|
||||
|
@ -169,7 +162,6 @@ def change_SEARCH_FREQUENCY(freq):
|
|||
|
||||
|
||||
def change_VERSION_NOTIFY(version_notify):
|
||||
|
||||
oldSetting = sickbeard.VERSION_NOTIFY
|
||||
|
||||
sickbeard.VERSION_NOTIFY = version_notify
|
||||
|
@ -235,7 +227,6 @@ def clean_host(host, default_port=None):
|
|||
|
||||
|
||||
def clean_hosts(hosts, default_port=None):
|
||||
|
||||
cleaned_hosts = []
|
||||
|
||||
for cur_host in [x.strip() for x in hosts.split(",")]:
|
||||
|
@ -344,7 +335,7 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True):
|
|||
encryption_version = sickbeard.ENCRYPTION_VERSION
|
||||
else:
|
||||
encryption_version = 0
|
||||
|
||||
|
||||
try:
|
||||
my_val = helpers.decrypt(config[cfg_name][item_name], encryption_version)
|
||||
except:
|
||||
|
@ -363,7 +354,6 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True):
|
|||
|
||||
|
||||
class ConfigMigrator():
|
||||
|
||||
def __init__(self, config_obj):
|
||||
"""
|
||||
Initializes a config migrator that can take the config from the version indicated in the config
|
||||
|
@ -380,7 +370,7 @@ class ConfigMigrator():
|
|||
3: 'Rename omgwtfnzb variables',
|
||||
4: 'Add newznab catIDs',
|
||||
5: 'Metadata update'
|
||||
}
|
||||
}
|
||||
|
||||
def migrate_config(self):
|
||||
"""
|
||||
|
@ -388,7 +378,9 @@ class ConfigMigrator():
|
|||
"""
|
||||
|
||||
if self.config_version > self.expected_config_version:
|
||||
logger.log_error_and_exit(u"Your config version (" + str(self.config_version) + ") has been incremented past what this version of Sick Beard supports (" + str(self.expected_config_version) + ").\n" + \
|
||||
logger.log_error_and_exit(u"Your config version (" + str(
|
||||
self.config_version) + ") has been incremented past what this version of Sick Beard supports (" + str(
|
||||
self.expected_config_version) + ").\n" + \
|
||||
"If you have used other forks or a newer version of Sick Beard, your config file may be unusable due to their modifications.")
|
||||
|
||||
sickbeard.CONFIG_VERSION = self.config_version
|
||||
|
@ -452,7 +444,8 @@ class ConfigMigrator():
|
|||
new_season_format = new_season_format.replace('09', '%0S')
|
||||
new_season_format = new_season_format.replace('9', '%S')
|
||||
|
||||
logger.log(u"Changed season folder format from " + old_season_format + " to " + new_season_format + ", prepending it to your naming config")
|
||||
logger.log(
|
||||
u"Changed season folder format from " + old_season_format + " to " + new_season_format + ", prepending it to your naming config")
|
||||
sickbeard.NAMING_PATTERN = new_season_format + os.sep + sickbeard.NAMING_PATTERN
|
||||
|
||||
except (TypeError, ValueError):
|
||||
|
@ -552,7 +545,8 @@ class ConfigMigrator():
|
|||
try:
|
||||
name, url, key, enabled = cur_provider_data.split("|")
|
||||
except ValueError:
|
||||
logger.log(u"Skipping Newznab provider string: '" + cur_provider_data + "', incorrect format", logger.ERROR)
|
||||
logger.log(u"Skipping Newznab provider string: '" + cur_provider_data + "', incorrect format",
|
||||
logger.ERROR)
|
||||
continue
|
||||
|
||||
if name == 'Sick Beard Index':
|
||||
|
@ -623,7 +617,8 @@ class ConfigMigrator():
|
|||
logger.log(u"Upgrading " + metadata_name + " metadata, new value: " + metadata)
|
||||
|
||||
else:
|
||||
logger.log(u"Skipping " + metadata_name + " metadata: '" + metadata + "', incorrect format", logger.ERROR)
|
||||
logger.log(u"Skipping " + metadata_name + " metadata: '" + metadata + "', incorrect format",
|
||||
logger.ERROR)
|
||||
metadata = '0|0|0|0|0|0|0|0|0|0'
|
||||
logger.log(u"Setting " + metadata_name + " metadata, new value: " + metadata)
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
from sickbeard import db
|
||||
|
||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||
class InitialSchema (db.SchemaUpgrade):
|
||||
class InitialSchema(db.SchemaUpgrade):
|
||||
def test(self):
|
||||
return self.hasTable("lastUpdate")
|
||||
|
||||
|
@ -36,12 +36,15 @@ class InitialSchema (db.SchemaUpgrade):
|
|||
else:
|
||||
self.connection.action(query[0], query[1:])
|
||||
|
||||
|
||||
class AddSceneExceptions(InitialSchema):
|
||||
def test(self):
|
||||
return self.hasTable("scene_exceptions")
|
||||
|
||||
def execute(self):
|
||||
self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, tvdb_id INTEGER KEY, show_name TEXT)")
|
||||
self.connection.action(
|
||||
"CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, tvdb_id INTEGER KEY, show_name TEXT)")
|
||||
|
||||
|
||||
class AddSceneNameCache(AddSceneExceptions):
|
||||
def test(self):
|
||||
|
@ -50,6 +53,7 @@ class AddSceneNameCache(AddSceneExceptions):
|
|||
def execute(self):
|
||||
self.connection.action("CREATE TABLE scene_names (tvdb_id INTEGER, name TEXT)")
|
||||
|
||||
|
||||
class AddNetworkTimezones(AddSceneNameCache):
|
||||
def test(self):
|
||||
return self.hasTable("network_timezones")
|
||||
|
@ -57,19 +61,24 @@ class AddNetworkTimezones(AddSceneNameCache):
|
|||
def execute(self):
|
||||
self.connection.action("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)")
|
||||
|
||||
|
||||
class AddXemNumbering(AddNetworkTimezones):
|
||||
def test(self):
|
||||
return self.hasTable("xem_numbering")
|
||||
|
||||
def execute(self):
|
||||
self.connection.action("CREATE TABLE xem_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
|
||||
self.connection.action(
|
||||
"CREATE TABLE xem_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
|
||||
|
||||
|
||||
class AddXemRefresh(AddXemNumbering):
|
||||
def test(self):
|
||||
return self.hasTable("xem_refresh")
|
||||
|
||||
def execute(self):
|
||||
self.connection.action("CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)")
|
||||
self.connection.action(
|
||||
"CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)")
|
||||
|
||||
|
||||
class ConvertSceneExceptionsToIndexerID(AddXemRefresh):
|
||||
def test(self):
|
||||
|
@ -77,10 +86,13 @@ class ConvertSceneExceptionsToIndexerID(AddXemRefresh):
|
|||
|
||||
def execute(self):
|
||||
self.connection.action("ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions")
|
||||
self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT)")
|
||||
self.connection.action("INSERT INTO scene_exceptions(exception_id, indexer_id, show_name) SELECT exception_id, tvdb_id, show_name FROM tmp_scene_exceptions")
|
||||
self.connection.action(
|
||||
"CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT)")
|
||||
self.connection.action(
|
||||
"INSERT INTO scene_exceptions(exception_id, indexer_id, show_name) SELECT exception_id, tvdb_id, show_name FROM tmp_scene_exceptions")
|
||||
self.connection.action("DROP TABLE tmp_scene_exceptions")
|
||||
|
||||
|
||||
class ConvertSceneNamesToIndexerID(ConvertSceneExceptionsToIndexerID):
|
||||
def test(self):
|
||||
return self.hasColumn("scene_names", "indexer_id")
|
||||
|
|
|
@ -49,6 +49,7 @@ class SizeAndProvider(InitialSchema):
|
|||
|
||||
class History(SizeAndProvider):
|
||||
"""Snatch history that can't be modified by the user"""
|
||||
|
||||
def test(self):
|
||||
return self.hasTable('history')
|
||||
|
||||
|
@ -59,6 +60,7 @@ class History(SizeAndProvider):
|
|||
|
||||
class HistoryStatus(History):
|
||||
"""Store episode status before snatch to revert to if necessary"""
|
||||
|
||||
def test(self):
|
||||
return self.hasColumn('history', 'old_status')
|
||||
|
||||
|
|
|
@ -26,11 +26,11 @@ from sickbeard import db, common, helpers, logger
|
|||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
||||
|
||||
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
||||
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
||||
MAX_DB_VERSION = 27
|
||||
|
||||
class MainSanityCheck(db.DBSanityCheck):
|
||||
|
||||
class MainSanityCheck(db.DBSanityCheck):
|
||||
def check(self):
|
||||
self.fix_duplicate_shows()
|
||||
self.fix_duplicate_episodes()
|
||||
|
@ -38,18 +38,23 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
|
||||
def fix_duplicate_shows(self):
|
||||
|
||||
sqlResults = self.connection.select("SELECT show_id, indexer_id, COUNT(indexer_id) as count FROM tv_shows GROUP BY indexer_id HAVING count > 1")
|
||||
sqlResults = self.connection.select(
|
||||
"SELECT show_id, indexer_id, COUNT(indexer_id) as count FROM tv_shows GROUP BY indexer_id HAVING count > 1")
|
||||
|
||||
for cur_duplicate in sqlResults:
|
||||
|
||||
logger.log(u"Duplicate show detected! indexer_id: " + str(cur_duplicate["indexer_id"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG)
|
||||
logger.log(u"Duplicate show detected! indexer_id: " + str(cur_duplicate["indexer_id"]) + u" count: " + str(
|
||||
cur_duplicate["count"]), logger.DEBUG)
|
||||
|
||||
cur_dupe_results = self.connection.select("SELECT show_id, indexer_id FROM tv_shows WHERE indexer_id = ? LIMIT ?",
|
||||
[cur_duplicate["indexer_id"], int(cur_duplicate["count"])-1]
|
||||
)
|
||||
cur_dupe_results = self.connection.select(
|
||||
"SELECT show_id, indexer_id FROM tv_shows WHERE indexer_id = ? LIMIT ?",
|
||||
[cur_duplicate["indexer_id"], int(cur_duplicate["count"]) - 1]
|
||||
)
|
||||
|
||||
for cur_dupe_id in cur_dupe_results:
|
||||
logger.log(u"Deleting duplicate show with indexer_id: " + str(cur_dupe_id["indexer_id"]) + u" show_id: " + str(cur_dupe_id["show_id"]))
|
||||
logger.log(
|
||||
u"Deleting duplicate show with indexer_id: " + str(cur_dupe_id["indexer_id"]) + u" show_id: " + str(
|
||||
cur_dupe_id["show_id"]))
|
||||
self.connection.action("DELETE FROM tv_shows WHERE show_id = ?", [cur_dupe_id["show_id"]])
|
||||
|
||||
else:
|
||||
|
@ -57,15 +62,20 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
|
||||
def fix_duplicate_episodes(self):
|
||||
|
||||
sqlResults = self.connection.select("SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1")
|
||||
sqlResults = self.connection.select(
|
||||
"SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1")
|
||||
|
||||
for cur_duplicate in sqlResults:
|
||||
|
||||
logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: "+str(cur_duplicate["season"]) + u" episode: "+str(cur_duplicate["episode"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG)
|
||||
logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: " + str(
|
||||
cur_duplicate["season"]) + u" episode: " + str(cur_duplicate["episode"]) + u" count: " + str(
|
||||
cur_duplicate["count"]), logger.DEBUG)
|
||||
|
||||
cur_dupe_results = self.connection.select("SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?",
|
||||
[cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"], int(cur_duplicate["count"])-1]
|
||||
)
|
||||
cur_dupe_results = self.connection.select(
|
||||
"SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?",
|
||||
[cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"],
|
||||
int(cur_duplicate["count"]) - 1]
|
||||
)
|
||||
|
||||
for cur_dupe_id in cur_dupe_results:
|
||||
logger.log(u"Deleting duplicate episode with episode_id: " + str(cur_dupe_id["episode_id"]))
|
||||
|
@ -76,16 +86,19 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
|
||||
def fix_orphan_episodes(self):
|
||||
|
||||
sqlResults = self.connection.select("SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL")
|
||||
sqlResults = self.connection.select(
|
||||
"SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL")
|
||||
|
||||
for cur_orphan in sqlResults:
|
||||
logger.log(u"Orphan episode detected! episode_id: " + str(cur_orphan["episode_id"]) + " showid: " + str(cur_orphan["showid"]), logger.DEBUG)
|
||||
logger.log(u"Deleting orphan episode with episode_id: "+str(cur_orphan["episode_id"]))
|
||||
logger.log(u"Orphan episode detected! episode_id: " + str(cur_orphan["episode_id"]) + " showid: " + str(
|
||||
cur_orphan["showid"]), logger.DEBUG)
|
||||
logger.log(u"Deleting orphan episode with episode_id: " + str(cur_orphan["episode_id"]))
|
||||
self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan["episode_id"]])
|
||||
|
||||
else:
|
||||
logger.log(u"No orphan episodes, check passed")
|
||||
|
||||
|
||||
def backupDatabase(version):
|
||||
logger.log(u"Backing up database before upgrade")
|
||||
if not helpers.backupVersionedFile(db.dbFilename(), version):
|
||||
|
@ -93,12 +106,13 @@ def backupDatabase(version):
|
|||
else:
|
||||
logger.log(u"Proceeding with upgrade")
|
||||
|
||||
|
||||
# ======================
|
||||
# = Main DB Migrations =
|
||||
# ======================
|
||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||
|
||||
class InitialSchema (db.SchemaUpgrade):
|
||||
class InitialSchema(db.SchemaUpgrade):
|
||||
def test(self):
|
||||
return self.hasTable("db_version")
|
||||
|
||||
|
@ -115,7 +129,7 @@ class InitialSchema (db.SchemaUpgrade):
|
|||
"CREATE INDEX idx_showid ON tv_episodes (showid);",
|
||||
"CREATE UNIQUE INDEX idx_tvdb_id ON tv_shows (tvdb_id);",
|
||||
"INSERT INTO db_version (db_version) VALUES (18);"
|
||||
]
|
||||
]
|
||||
for query in queries:
|
||||
self.connection.action(query)
|
||||
|
||||
|
@ -123,20 +137,21 @@ class InitialSchema (db.SchemaUpgrade):
|
|||
cur_db_version = self.checkDBVersion()
|
||||
|
||||
if cur_db_version < MIN_DB_VERSION:
|
||||
logger.log_error_and_exit(u"Your database version (" + str(cur_db_version) + ") is too old to migrate from what this version of Sick Beard supports (" + \
|
||||
logger.log_error_and_exit(u"Your database version (" + str(
|
||||
cur_db_version) + ") is too old to migrate from what this version of Sick Beard supports (" + \
|
||||
str(MIN_DB_VERSION) + ").\n" + \
|
||||
"Upgrade using a previous version (tag) build 496 to build 501 of Sick Beard first or remove database file to begin fresh."
|
||||
)
|
||||
)
|
||||
|
||||
if cur_db_version > MAX_DB_VERSION:
|
||||
logger.log_error_and_exit(u"Your database version (" + str(cur_db_version) + ") has been incremented past what this version of Sick Beard supports (" + \
|
||||
logger.log_error_and_exit(u"Your database version (" + str(
|
||||
cur_db_version) + ") has been incremented past what this version of Sick Beard supports (" + \
|
||||
str(MAX_DB_VERSION) + ").\n" + \
|
||||
"If you have used other forks of Sick Beard, your database may be unusable due to their modifications."
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class AddSizeAndSceneNameFields(InitialSchema):
|
||||
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 10
|
||||
|
||||
|
@ -160,7 +175,8 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
|||
# if there is no size yet then populate it for us
|
||||
if (not cur_ep["file_size"] or not int(cur_ep["file_size"])) and ek.ek(os.path.isfile, cur_ep["location"]):
|
||||
cur_size = ek.ek(os.path.getsize, cur_ep["location"])
|
||||
self.connection.action("UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?", [cur_size, int(cur_ep["episode_id"])])
|
||||
self.connection.action("UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?",
|
||||
[cur_size, int(cur_ep["episode_id"])])
|
||||
|
||||
# check each snatch to see if we can use it to get a release name from
|
||||
history_results = self.connection.select("SELECT * FROM history WHERE provider != -1 ORDER BY date ASC")
|
||||
|
@ -168,10 +184,12 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
|||
logger.log(u"Adding release name to all episodes still in history")
|
||||
for cur_result in history_results:
|
||||
# find the associated download, if there isn't one then ignore it
|
||||
download_results = self.connection.select("SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?",
|
||||
[cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"]])
|
||||
download_results = self.connection.select(
|
||||
"SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?",
|
||||
[cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"]])
|
||||
if not download_results:
|
||||
logger.log(u"Found a snatch in the history for "+cur_result["resource"]+" but couldn't find the associated download, skipping it", logger.DEBUG)
|
||||
logger.log(u"Found a snatch in the history for " + cur_result[
|
||||
"resource"] + " but couldn't find the associated download, skipping it", logger.DEBUG)
|
||||
continue
|
||||
|
||||
nzb_name = cur_result["resource"]
|
||||
|
@ -182,10 +200,13 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
|||
file_name = file_name.rpartition('.')[0]
|
||||
|
||||
# find the associated episode on disk
|
||||
ep_results = self.connection.select("SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''",
|
||||
[cur_result["showid"], cur_result["season"], cur_result["episode"]])
|
||||
ep_results = self.connection.select(
|
||||
"SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''",
|
||||
[cur_result["showid"], cur_result["season"], cur_result["episode"]])
|
||||
if not ep_results:
|
||||
logger.log(u"The episode "+nzb_name+" was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG)
|
||||
logger.log(
|
||||
u"The episode " + nzb_name + " was found in history but doesn't exist on disk anymore, skipping",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
# get the status/quality of the existing ep and make sure it's what we expect
|
||||
|
@ -198,7 +219,7 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
|||
|
||||
# make sure this is actually a real release name and not a season pack or something
|
||||
for cur_name in (nzb_name, file_name):
|
||||
logger.log(u"Checking if "+cur_name+" is actually a good release name", logger.DEBUG)
|
||||
logger.log(u"Checking if " + cur_name + " is actually a good release name", logger.DEBUG)
|
||||
try:
|
||||
np = NameParser(False)
|
||||
parse_result = np.parse(cur_name)
|
||||
|
@ -207,7 +228,8 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
|||
|
||||
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
|
||||
# if all is well by this point we'll just put the release name into the database
|
||||
self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [cur_name, ep_results[0]["episode_id"]])
|
||||
self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?",
|
||||
[cur_name, ep_results[0]["episode_id"]])
|
||||
break
|
||||
|
||||
# check each snatch to see if we can use it to get a release name from
|
||||
|
@ -232,21 +254,24 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
|||
if not parse_result.release_group:
|
||||
continue
|
||||
|
||||
logger.log(u"Name "+ep_file_name+" gave release group of "+parse_result.release_group+", seems valid", logger.DEBUG)
|
||||
self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [ep_file_name, cur_result["episode_id"]])
|
||||
logger.log(
|
||||
u"Name " + ep_file_name + " gave release group of " + parse_result.release_group + ", seems valid",
|
||||
logger.DEBUG)
|
||||
self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?",
|
||||
[ep_file_name, cur_result["episode_id"]])
|
||||
|
||||
self.incDBVersion()
|
||||
|
||||
class RenameSeasonFolders(AddSizeAndSceneNameFields):
|
||||
|
||||
class RenameSeasonFolders(AddSizeAndSceneNameFields):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 11
|
||||
|
||||
def execute(self):
|
||||
|
||||
# rename the column
|
||||
self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows")
|
||||
self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)")
|
||||
self.connection.action(
|
||||
"CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)")
|
||||
sql = "INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang) SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows"
|
||||
self.connection.action(sql)
|
||||
|
||||
|
@ -258,27 +283,29 @@ class RenameSeasonFolders(AddSizeAndSceneNameFields):
|
|||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddSubtitlesSupport(RenameSeasonFolders):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 12
|
||||
|
||||
def execute(self):
|
||||
|
||||
self.addColumn("tv_shows", "subtitles")
|
||||
self.addColumn("tv_episodes", "subtitles", "TEXT", "")
|
||||
self.addColumn("tv_episodes", "subtitles_searchcount")
|
||||
self.addColumn("tv_episodes", "subtitles_lastsearch", "TIMESTAMP", str(datetime.datetime.min))
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddIMDbInfo(RenameSeasonFolders):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 13
|
||||
|
||||
def execute(self):
|
||||
|
||||
self.connection.action("CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
|
||||
self.connection.action(
|
||||
"CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class Add1080pAndRawHDQualities(AddIMDbInfo):
|
||||
"""Add support for 1080p related qualities along with RawHD
|
||||
|
||||
|
@ -311,17 +338,17 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
|||
|
||||
result = old_quality
|
||||
# move fullhdbluray from 1<<5 to 1<<8 if set
|
||||
if(result & (1<<5)):
|
||||
result = result & ~(1<<5)
|
||||
result = result | (1<<8)
|
||||
if (result & (1 << 5)):
|
||||
result = result & ~(1 << 5)
|
||||
result = result | (1 << 8)
|
||||
# move hdbluray from 1<<4 to 1<<7 if set
|
||||
if(result & (1<<4)):
|
||||
result = result & ~(1<<4)
|
||||
result = result | (1<<7)
|
||||
if (result & (1 << 4)):
|
||||
result = result & ~(1 << 4)
|
||||
result = result | (1 << 7)
|
||||
# move hdwebdl from 1<<3 to 1<<5 if set
|
||||
if(result & (1<<3)):
|
||||
result = result & ~(1<<3)
|
||||
result = result | (1<<5)
|
||||
if (result & (1 << 3)):
|
||||
result = result & ~(1 << 3)
|
||||
result = result | (1 << 5)
|
||||
|
||||
return result
|
||||
|
||||
|
@ -350,12 +377,19 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
|||
sickbeard.save_config()
|
||||
|
||||
# upgrade previous HD to HD720p -- shift previous qualities to new placevalues
|
||||
old_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], [])
|
||||
new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY], [])
|
||||
old_hd = common.Quality.combineQualities(
|
||||
[common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], [])
|
||||
new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY],
|
||||
[])
|
||||
|
||||
# update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template
|
||||
old_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], [])
|
||||
new_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, common.Quality.UNKNOWN], [])
|
||||
old_any = common.Quality.combineQualities(
|
||||
[common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2,
|
||||
common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], [])
|
||||
new_any = common.Quality.combineQualities(
|
||||
[common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV,
|
||||
common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY,
|
||||
common.Quality.UNKNOWN], [])
|
||||
|
||||
# update qualities (including templates)
|
||||
logger.log(u"[1/4] Updating pre-defined templates and the quality for each show...", logger.MESSAGE)
|
||||
|
@ -376,7 +410,8 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
|||
ql = []
|
||||
episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800")
|
||||
for cur_episode in episodes:
|
||||
ql.append(["UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [self._update_status(cur_episode["status"]), cur_episode["episode_id"]]])
|
||||
ql.append(["UPDATE tv_episodes SET status = ? WHERE episode_id = ?",
|
||||
[self._update_status(cur_episode["status"]), cur_episode["episode_id"]]])
|
||||
self.connection.mass_action(ql)
|
||||
|
||||
# make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together
|
||||
|
@ -386,7 +421,8 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
|||
ql = []
|
||||
historyAction = self.connection.select("SELECT * FROM history WHERE action < 3276800 AND action >= 800")
|
||||
for cur_entry in historyAction:
|
||||
ql.append(["UPDATE history SET action = ? WHERE showid = ? AND date = ?", [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]])
|
||||
ql.append(["UPDATE history SET action = ? WHERE showid = ? AND date = ?",
|
||||
[self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]])
|
||||
self.connection.mass_action(ql)
|
||||
|
||||
# update previous history so it shows the correct quality
|
||||
|
@ -394,7 +430,8 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
|||
ql = []
|
||||
historyQuality = self.connection.select("SELECT * FROM history WHERE quality < 32768 AND quality >= 8")
|
||||
for cur_entry in historyQuality:
|
||||
ql.append(["UPDATE history SET quality = ? WHERE showid = ? AND date = ?", [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]])
|
||||
ql.append(["UPDATE history SET quality = ? WHERE showid = ? AND date = ?",
|
||||
[self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]])
|
||||
self.connection.mass_action(ql)
|
||||
|
||||
self.incDBVersion()
|
||||
|
@ -403,6 +440,7 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
|||
logger.log(u"Performing a vacuum on the database.", logger.DEBUG)
|
||||
self.connection.action("VACUUM")
|
||||
|
||||
|
||||
class AddProperNamingSupport(Add1080pAndRawHDQualities):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 15
|
||||
|
@ -411,6 +449,7 @@ class AddProperNamingSupport(Add1080pAndRawHDQualities):
|
|||
self.addColumn("tv_episodes", "is_proper")
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddEmailSubscriptionTable(AddProperNamingSupport):
|
||||
def test(self):
|
||||
return self.hasColumn("tv_shows", "notify_list")
|
||||
|
@ -419,6 +458,7 @@ class AddEmailSubscriptionTable(AddProperNamingSupport):
|
|||
self.addColumn('tv_shows', 'notify_list', 'TEXT', None)
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddShowidTvdbidIndex(AddEmailSubscriptionTable):
|
||||
""" Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries """
|
||||
|
||||
|
@ -439,6 +479,7 @@ class AddShowidTvdbidIndex(AddEmailSubscriptionTable):
|
|||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddLastUpdateTVDB(AddShowidTvdbidIndex):
|
||||
""" Adding column last_update_tvdb to tv_shows for controlling nightly updates """
|
||||
|
||||
|
@ -454,6 +495,7 @@ class AddLastUpdateTVDB(AddShowidTvdbidIndex):
|
|||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddLastProperSearch(AddLastUpdateTVDB):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 19
|
||||
|
@ -467,6 +509,7 @@ class AddLastProperSearch(AddLastUpdateTVDB):
|
|||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddDvdOrderOption(AddLastProperSearch):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 20
|
||||
|
@ -478,6 +521,7 @@ class AddDvdOrderOption(AddLastProperSearch):
|
|||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddIndicesToTvEpisodes(AddDvdOrderOption):
|
||||
""" Adding indices to tv episodes """
|
||||
|
||||
|
@ -498,6 +542,7 @@ class AddIndicesToTvEpisodes(AddDvdOrderOption):
|
|||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class ConvertTVShowsToIndexerScheme(AddIndicesToTvEpisodes):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 22
|
||||
|
@ -507,14 +552,17 @@ class ConvertTVShowsToIndexerScheme(AddIndicesToTvEpisodes):
|
|||
|
||||
logger.log(u"Converting TV Shows table to Indexer Scheme...")
|
||||
self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows")
|
||||
self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer TEXT, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC)")
|
||||
self.connection.action("UPDATE tv_shows SET indexer = 'Tvdb'")
|
||||
self.connection.action(
|
||||
"CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMBERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC)")
|
||||
self.connection.action("UPDATE tv_shows SET indexer = 1")
|
||||
self.connection.action("UPDATE tv_shows SET classification = 'Scripted'")
|
||||
self.connection.action("INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_indexer, dvdorder) SELECT show_id, tvdb_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows")
|
||||
self.connection.action(
|
||||
"INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_indexer, dvdorder) SELECT show_id, tvdb_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows")
|
||||
self.connection.action("DROP TABLE tmp_tv_shows")
|
||||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class ConvertTVEpisodesToIndexerScheme(ConvertTVShowsToIndexerScheme):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 23
|
||||
|
@ -524,13 +572,16 @@ class ConvertTVEpisodesToIndexerScheme(ConvertTVShowsToIndexerScheme):
|
|||
|
||||
logger.log(u"Converting TV Episodes table to Indexer Scheme...")
|
||||
self.connection.action("ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes")
|
||||
self.connection.action("CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer TEXT, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC)")
|
||||
self.connection.action("UPDATE tv_episodes SET indexer = 'Tvdb'")
|
||||
self.connection.action("INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper FROM tmp_tv_episodes")
|
||||
self.connection.action(
|
||||
"CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC)")
|
||||
self.connection.action("UPDATE tv_episodes SET indexer = 1")
|
||||
self.connection.action(
|
||||
"INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper FROM tmp_tv_episodes")
|
||||
self.connection.action("DROP TABLE tmp_tv_episodes")
|
||||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class ConvertIMDBInfoToIndexerScheme(ConvertTVEpisodesToIndexerScheme):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 24
|
||||
|
@ -540,12 +591,15 @@ class ConvertIMDBInfoToIndexerScheme(ConvertTVEpisodesToIndexerScheme):
|
|||
|
||||
logger.log(u"Converting IMDB Info table to Indexer Scheme...")
|
||||
self.connection.action("ALTER TABLE imdb_info RENAME TO tmp_imdb_info")
|
||||
self.connection.action("CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
|
||||
self.connection.action("INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info")
|
||||
self.connection.action(
|
||||
"CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
|
||||
self.connection.action(
|
||||
"INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info")
|
||||
self.connection.action("DROP TABLE tmp_imdb_info")
|
||||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class ConvertInfoToIndexerScheme(ConvertIMDBInfoToIndexerScheme):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 25
|
||||
|
@ -555,12 +609,15 @@ class ConvertInfoToIndexerScheme(ConvertIMDBInfoToIndexerScheme):
|
|||
|
||||
logger.log(u"Converting Info table to Indexer Scheme...")
|
||||
self.connection.action("ALTER TABLE info RENAME TO tmp_info")
|
||||
self.connection.action("CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)")
|
||||
self.connection.action("INSERT INTO info(last_backlog, last_indexer, last_proper_search) SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info")
|
||||
self.connection.action(
|
||||
"CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)")
|
||||
self.connection.action(
|
||||
"INSERT INTO info(last_backlog, last_indexer, last_proper_search) SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info")
|
||||
self.connection.action("DROP TABLE tmp_info")
|
||||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddArchiveFirstMatchOption(ConvertInfoToIndexerScheme):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 26
|
||||
|
@ -572,6 +629,7 @@ class AddArchiveFirstMatchOption(ConvertInfoToIndexerScheme):
|
|||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddSceneNumbering(AddArchiveFirstMatchOption):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 27
|
||||
|
@ -582,6 +640,7 @@ class AddSceneNumbering(AddArchiveFirstMatchOption):
|
|||
if self.hasTable("scene_numbering"):
|
||||
self.connection.action("DROP TABLE scene_numbering")
|
||||
|
||||
self.connection.action("CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
|
||||
self.connection.action(
|
||||
"CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
|
||||
|
||||
self.incDBVersion()
|
|
@ -32,6 +32,7 @@ from sickbeard.exceptions import ex
|
|||
|
||||
db_lock = threading.Lock()
|
||||
|
||||
|
||||
def dbFilename(filename="sickbeard.db", suffix=None):
|
||||
"""
|
||||
@param filename: The sqlite database filename to use. If not specified,
|
||||
|
@ -44,6 +45,7 @@ def dbFilename(filename="sickbeard.db", suffix=None):
|
|||
filename = "%s.%s" % (filename, suffix)
|
||||
return ek.ek(os.path.join, sickbeard.DATA_DIR, filename)
|
||||
|
||||
|
||||
class DBConnection:
|
||||
def __init__(self, filename="sickbeard.db", suffix=None, row_type=None):
|
||||
|
||||
|
@ -127,7 +129,7 @@ class DBConnection:
|
|||
logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG)
|
||||
sqlResult.append(self.connection.execute(qu[0], qu[1]))
|
||||
self.connection.commit()
|
||||
logger.log(u"Transaction with " + str(len(querylist)) + u" query's executed", logger.DEBUG)
|
||||
logger.log(u"Transaction with " + str(len(querylist)) + u" query's executed", logger.DEBUG)
|
||||
return sqlResult
|
||||
except sqlite3.OperationalError, e:
|
||||
sqlResult = []
|
||||
|
@ -198,15 +200,16 @@ class DBConnection:
|
|||
|
||||
changesBefore = self.connection.total_changes
|
||||
|
||||
genParams = lambda myDict : [x + " = ?" for x in myDict.keys()]
|
||||
genParams = lambda myDict: [x + " = ?" for x in myDict.keys()]
|
||||
|
||||
query = "UPDATE " + tableName + " SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(genParams(keyDict))
|
||||
query = "UPDATE " + tableName + " SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(
|
||||
genParams(keyDict))
|
||||
|
||||
self.action(query, valueDict.values() + keyDict.values())
|
||||
|
||||
if self.connection.total_changes == changesBefore:
|
||||
query = "INSERT INTO " + tableName + " (" + ", ".join(valueDict.keys() + keyDict.keys()) + ")" + \
|
||||
" VALUES (" + ", ".join(["?"] * len(valueDict.keys() + keyDict.keys())) + ")"
|
||||
" VALUES (" + ", ".join(["?"] * len(valueDict.keys() + keyDict.keys())) + ")"
|
||||
self.action(query, valueDict.values() + keyDict.values())
|
||||
|
||||
def tableInfo(self, tableName):
|
||||
|
@ -214,7 +217,7 @@ class DBConnection:
|
|||
cursor = self.connection.execute("PRAGMA table_info(%s)" % tableName)
|
||||
columns = {}
|
||||
for column in cursor:
|
||||
columns[column['name']] = { 'type': column['type'] }
|
||||
columns[column['name']] = {'type': column['type']}
|
||||
return columns
|
||||
|
||||
# http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
|
||||
|
@ -224,9 +227,11 @@ class DBConnection:
|
|||
d[col[0]] = row[idx]
|
||||
return d
|
||||
|
||||
|
||||
def sanityCheckDatabase(connection, sanity_check):
|
||||
sanity_check(connection).check()
|
||||
|
||||
|
||||
class DBSanityCheck(object):
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
@ -234,6 +239,7 @@ class DBSanityCheck(object):
|
|||
def check(self):
|
||||
pass
|
||||
|
||||
|
||||
# ===============
|
||||
# = Upgrade API =
|
||||
# ===============
|
||||
|
@ -242,9 +248,11 @@ def upgradeDatabase(connection, schema):
|
|||
logger.log(u"Checking database structure...", logger.MESSAGE)
|
||||
_processUpgrade(connection, schema)
|
||||
|
||||
|
||||
def prettyName(class_name):
|
||||
return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)])
|
||||
|
||||
|
||||
def _processUpgrade(connection, upgradeClass):
|
||||
instance = upgradeClass(connection)
|
||||
logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG)
|
||||
|
@ -262,8 +270,9 @@ def _processUpgrade(connection, upgradeClass):
|
|||
for upgradeSubClass in upgradeClass.__subclasses__():
|
||||
_processUpgrade(connection, upgradeSubClass)
|
||||
|
||||
|
||||
# Base migration class. All future DB changes should be subclassed from this class
|
||||
class SchemaUpgrade (object):
|
||||
class SchemaUpgrade(object):
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
|
|
|
@ -30,29 +30,36 @@ def fixStupidEncodings(x, silent=False):
|
|||
try:
|
||||
return x.decode(sickbeard.SYS_ENCODING)
|
||||
except UnicodeDecodeError:
|
||||
logger.log(u"Unable to decode value: "+repr(x), logger.ERROR)
|
||||
logger.log(u"Unable to decode value: " + repr(x), logger.ERROR)
|
||||
return None
|
||||
elif type(x) == unicode:
|
||||
return x
|
||||
else:
|
||||
logger.log(u"Unknown value passed in, ignoring it: "+str(type(x))+" ("+repr(x)+":"+repr(type(x))+")", logger.DEBUG if silent else logger.ERROR)
|
||||
logger.log(
|
||||
u"Unknown value passed in, ignoring it: " + str(type(x)) + " (" + repr(x) + ":" + repr(type(x)) + ")",
|
||||
logger.DEBUG if silent else logger.ERROR)
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def fixListEncodings(x):
|
||||
if type(x) != list and type(x) != tuple:
|
||||
return x
|
||||
else:
|
||||
return filter(lambda x: x != None, map(fixStupidEncodings, x))
|
||||
|
||||
|
||||
def callPeopleStupid(x):
|
||||
try:
|
||||
return x.encode(sickbeard.SYS_ENCODING)
|
||||
except UnicodeEncodeError:
|
||||
logger.log(u"YOUR COMPUTER SUCKS! Your data is being corrupted by a bad locale/encoding setting. Report this error on the forums or IRC please: "+repr(x)+", "+sickbeard.SYS_ENCODING, logger.ERROR)
|
||||
logger.log(
|
||||
u"YOUR COMPUTER SUCKS! Your data is being corrupted by a bad locale/encoding setting. Report this error on the forums or IRC please: " + repr(
|
||||
x) + ", " + sickbeard.SYS_ENCODING, logger.ERROR)
|
||||
return x.encode(sickbeard.SYS_ENCODING, 'ignore')
|
||||
|
||||
|
||||
def ek(func, *args, **kwargs):
|
||||
result = None
|
||||
|
||||
|
|
|
@ -104,10 +104,6 @@ class NewzbinAPIThrottled(SickBeardException):
|
|||
"Newzbin has throttled us, deal with it"
|
||||
|
||||
|
||||
class TVRageException(SickBeardException):
|
||||
"TVRage API did something bad"
|
||||
|
||||
|
||||
class ShowDirNotFoundException(SickBeardException):
|
||||
"The show dir doesn't exist"
|
||||
|
||||
|
|
|
@ -76,7 +76,9 @@ class FailedProcessor(object):
|
|||
|
||||
self._show_obj = helpers.findCertainShow(sickbeard.showList, show_id)
|
||||
if self._show_obj is None:
|
||||
self._log(u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)", logger.WARNING)
|
||||
self._log(
|
||||
u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)",
|
||||
logger.WARNING)
|
||||
raise exceptions.FailedProcessingFailed()
|
||||
|
||||
for episode in parsed.episode_numbers:
|
||||
|
@ -105,7 +107,7 @@ class FailedProcessor(object):
|
|||
for show_name in show_names:
|
||||
found_info = helpers.searchDBForShow(show_name)
|
||||
if found_info is not None:
|
||||
return(found_info[1])
|
||||
return (found_info[1])
|
||||
|
||||
return None
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@ import re
|
|||
import urllib
|
||||
import datetime
|
||||
|
||||
|
||||
from sickbeard import db
|
||||
from sickbeard import logger
|
||||
from sickbeard import exceptions
|
||||
|
@ -38,7 +37,7 @@ def prepareFailedName(release):
|
|||
"""Standardizes release name for failed DB"""
|
||||
|
||||
fixed = urllib.unquote(release)
|
||||
if(fixed.endswith(".nzb")):
|
||||
if (fixed.endswith(".nzb")):
|
||||
fixed = fixed.rpartition(".")[0]
|
||||
|
||||
fixed = re.sub("[\.\-\+\ ]", "_", fixed)
|
||||
|
@ -56,8 +55,10 @@ def logFailed(release):
|
|||
sql_results = myDB.select("SELECT * FROM history WHERE release=?", [release])
|
||||
|
||||
if len(sql_results) == 0:
|
||||
log_str += _log_helper(u"Release not found in snatch history. Recording it as bad with no size and no proivder.", logger.WARNING)
|
||||
log_str += _log_helper(u"Future releases of the same name from providers that don't return size will be skipped.", logger.WARNING)
|
||||
log_str += _log_helper(
|
||||
u"Release not found in snatch history. Recording it as bad with no size and no proivder.", logger.WARNING)
|
||||
log_str += _log_helper(
|
||||
u"Future releases of the same name from providers that don't return size will be skipped.", logger.WARNING)
|
||||
elif len(sql_results) > 1:
|
||||
log_str += _log_helper(u"Multiple logged snatches found for release", logger.WARNING)
|
||||
sizes = len(set(x["size"] for x in sql_results))
|
||||
|
@ -66,7 +67,9 @@ def logFailed(release):
|
|||
log_str += _log_helper(u"However, they're all the same size. Continuing with found size.", logger.WARNING)
|
||||
size = sql_results[0]["size"]
|
||||
else:
|
||||
log_str += _log_helper(u"They also vary in size. Deleting the logged snatches and recording this release with no size/provider", logger.WARNING)
|
||||
log_str += _log_helper(
|
||||
u"They also vary in size. Deleting the logged snatches and recording this release with no size/provider",
|
||||
logger.WARNING)
|
||||
for result in sql_results:
|
||||
deleteLoggedSnatch(result["release"], result["size"], result["provider"])
|
||||
|
||||
|
@ -128,13 +131,15 @@ def revertEpisode(show_obj, season, episode=None):
|
|||
log_str += _log_helper(u"Found in history")
|
||||
ep_obj.status = history_eps[episode]['old_status']
|
||||
else:
|
||||
log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED", logger.WARNING)
|
||||
log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED",
|
||||
logger.WARNING)
|
||||
ep_obj.status = WANTED
|
||||
|
||||
ep_obj.saveToDB()
|
||||
|
||||
except exceptions.EpisodeNotFoundException, e:
|
||||
log_str += _log_helper(u"Unable to create episode, please set its status manually: " + exceptions.ex(e), logger.WARNING)
|
||||
log_str += _log_helper(u"Unable to create episode, please set its status manually: " + exceptions.ex(e),
|
||||
logger.WARNING)
|
||||
else:
|
||||
# Whole season
|
||||
log_str += _log_helper(u"Setting season to wanted: " + str(season))
|
||||
|
@ -145,13 +150,15 @@ def revertEpisode(show_obj, season, episode=None):
|
|||
log_str += _log_helper(u"Found in history")
|
||||
ep_obj.status = history_eps[ep_obj]['old_status']
|
||||
else:
|
||||
log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED", logger.WARNING)
|
||||
log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED",
|
||||
logger.WARNING)
|
||||
ep_obj.status = WANTED
|
||||
|
||||
ep_obj.saveToDB()
|
||||
|
||||
return log_str
|
||||
|
||||
|
||||
def markFailed(show_obj, season, episode=None):
|
||||
log_str = u""
|
||||
|
||||
|
@ -165,7 +172,8 @@ def markFailed(show_obj, season, episode=None):
|
|||
ep_obj.saveToDB()
|
||||
|
||||
except exceptions.EpisodeNotFoundException, e:
|
||||
log_str += _log_helper(u"Unable to get episode, please set its status manually: " + exceptions.ex(e), logger.WARNING)
|
||||
log_str += _log_helper(u"Unable to get episode, please set its status manually: " + exceptions.ex(e),
|
||||
logger.WARNING)
|
||||
else:
|
||||
# Whole season
|
||||
for ep_obj in show_obj.getAllEpisodes(season):
|
||||
|
@ -176,6 +184,7 @@ def markFailed(show_obj, season, episode=None):
|
|||
|
||||
return log_str
|
||||
|
||||
|
||||
def logSnatch(searchResult):
|
||||
myDB = db.DBConnection("failed.db")
|
||||
|
||||
|
@ -196,7 +205,8 @@ def logSnatch(searchResult):
|
|||
myDB.action(
|
||||
"INSERT INTO history (date, size, release, provider, showid, season, episode, old_status)"
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
[logDate, searchResult.size, release, provider, show_obj.indexerid, episode.season, episode.episode, old_status])
|
||||
[logDate, searchResult.size, release, provider, show_obj.indexerid, episode.season, episode.episode,
|
||||
old_status])
|
||||
|
||||
|
||||
def deleteLoggedSnatch(release, size, provider):
|
||||
|
@ -210,7 +220,8 @@ def deleteLoggedSnatch(release, size, provider):
|
|||
|
||||
def trimHistory():
|
||||
myDB = db.DBConnection("failed.db")
|
||||
myDB.action("DELETE FROM history WHERE date < " + str((datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
|
||||
myDB.action("DELETE FROM history WHERE date < " + str(
|
||||
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
|
||||
|
||||
|
||||
def findRelease(show, season, episode):
|
||||
|
@ -227,10 +238,13 @@ def findRelease(show, season, episode):
|
|||
myDB = db.DBConnection("failed.db")
|
||||
|
||||
# Clear old snatches for this release if any exist
|
||||
myDB.action("DELETE FROM history WHERE showid=" + str(show.indexerid) + " AND season=" + str(season) + " AND episode=" + str(episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(show.indexerid) + " AND season=" + str(season) + " AND episode=" + str(episode) + ")")
|
||||
myDB.action("DELETE FROM history WHERE showid=" + str(show.indexerid) + " AND season=" + str(
|
||||
season) + " AND episode=" + str(episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(
|
||||
show.indexerid) + " AND season=" + str(season) + " AND episode=" + str(episode) + ")")
|
||||
|
||||
# Search for release in snatch history
|
||||
results = myDB.select("SELECT release, provider, date FROM history WHERE showid=? AND season=? AND episode=?",[show.indexerid, season, episode])
|
||||
results = myDB.select("SELECT release, provider, date FROM history WHERE showid=? AND season=? AND episode=?",
|
||||
[show.indexerid, season, episode])
|
||||
|
||||
for result in results:
|
||||
release = str(result["release"])
|
||||
|
@ -238,7 +252,7 @@ def findRelease(show, season, episode):
|
|||
date = result["date"]
|
||||
|
||||
# Clear any incomplete snatch records for this release if any exist
|
||||
myDB.action("DELETE FROM history WHERE release=? AND date!=?",[release, date])
|
||||
myDB.action("DELETE FROM history WHERE release=? AND date!=?", [release, date])
|
||||
|
||||
# Found a previously failed release
|
||||
logger.log(u"Failed release found for season (%s): (%s)" % (season, result["release"]), logger.DEBUG)
|
||||
|
|
|
@ -21,13 +21,14 @@ import threading
|
|||
|
||||
from sickbeard import logger
|
||||
|
||||
|
||||
class QueuePriorities:
|
||||
LOW = 10
|
||||
NORMAL = 20
|
||||
HIGH = 30
|
||||
|
||||
class GenericQueue(object):
|
||||
|
||||
class GenericQueue(object):
|
||||
def __init__(self):
|
||||
|
||||
self.currentItem = None
|
||||
|
@ -38,13 +39,13 @@ class GenericQueue(object):
|
|||
self.queue_name = "QUEUE"
|
||||
|
||||
self.min_priority = 0
|
||||
|
||||
|
||||
self.currentItem = None
|
||||
|
||||
def pause(self):
|
||||
logger.log(u"Pausing queue")
|
||||
self.min_priority = 999999999999
|
||||
|
||||
|
||||
def unpause(self):
|
||||
logger.log(u"Unpausing queue")
|
||||
self.min_priority = 0
|
||||
|
@ -52,7 +53,7 @@ class GenericQueue(object):
|
|||
def add_item(self, item):
|
||||
item.added = datetime.datetime.now()
|
||||
self.queue.append(item)
|
||||
|
||||
|
||||
return item
|
||||
|
||||
def run(self):
|
||||
|
@ -69,7 +70,7 @@ class GenericQueue(object):
|
|||
if len(self.queue) > 0:
|
||||
|
||||
# sort by priority
|
||||
def sorter(x,y):
|
||||
def sorter(x, y):
|
||||
"""
|
||||
Sorts by priority descending then time ascending
|
||||
"""
|
||||
|
@ -81,10 +82,10 @@ class GenericQueue(object):
|
|||
elif y.added > x.added:
|
||||
return -1
|
||||
else:
|
||||
return y.priority-x.priority
|
||||
return y.priority - x.priority
|
||||
|
||||
self.queue.sort(cmp=sorter)
|
||||
|
||||
|
||||
queueItem = self.queue[0]
|
||||
|
||||
if queueItem.priority < self.min_priority:
|
||||
|
@ -101,8 +102,9 @@ class GenericQueue(object):
|
|||
# take it out of the queue
|
||||
del self.queue[0]
|
||||
|
||||
|
||||
class QueueItem:
|
||||
def __init__(self, name, action_id = 0):
|
||||
def __init__(self, name, action_id=0):
|
||||
self.name = name
|
||||
|
||||
self.inProgress = False
|
||||
|
@ -112,14 +114,14 @@ class QueueItem:
|
|||
self.thread_name = None
|
||||
|
||||
self.action_id = action_id
|
||||
|
||||
|
||||
self.added = None
|
||||
|
||||
def get_thread_name(self):
|
||||
if self.thread_name:
|
||||
return self.thread_name
|
||||
else:
|
||||
return self.name.replace(" ","-").upper()
|
||||
return self.name.replace(" ", "-").upper()
|
||||
|
||||
def execute(self):
|
||||
"""Implementing classes should call this"""
|
||||
|
|
|
@ -69,7 +69,8 @@ class GitHub(object):
|
|||
|
||||
Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/
|
||||
"""
|
||||
access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'], params={'per_page': 100, 'sha': self.branch})
|
||||
access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'],
|
||||
params={'per_page': 100, 'sha': self.branch})
|
||||
return access_API
|
||||
|
||||
def compare(self, base, head, per_page=1):
|
||||
|
@ -84,5 +85,7 @@ class GitHub(object):
|
|||
|
||||
Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/
|
||||
"""
|
||||
access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head], params={'per_page': per_page})
|
||||
access_API = self._access_API(
|
||||
['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head],
|
||||
params={'per_page': per_page})
|
||||
return access_API
|
||||
|
|
|
@ -31,10 +31,10 @@ import httplib
|
|||
import urlparse
|
||||
import uuid
|
||||
import base64
|
||||
import string
|
||||
|
||||
from lib import requests
|
||||
from itertools import izip, cycle
|
||||
from contextlib import closing
|
||||
|
||||
try:
|
||||
import json
|
||||
|
@ -47,20 +47,17 @@ except ImportError:
|
|||
import elementtree.ElementTree as etree
|
||||
|
||||
from xml.dom.minidom import Node
|
||||
from datetime import datetime as dt
|
||||
|
||||
import sickbeard
|
||||
|
||||
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
||||
from sickbeard import logger, classes
|
||||
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP, indexerStrings
|
||||
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP
|
||||
|
||||
from sickbeard import db
|
||||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard import notifiers
|
||||
|
||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
||||
|
||||
from lib import subliminal
|
||||
#from sickbeard.subtitles import EXTENSIONS
|
||||
|
||||
|
@ -88,6 +85,7 @@ def indentXML(elem, level=0):
|
|||
if level and (not elem.tail or not elem.tail.strip()):
|
||||
elem.tail = i
|
||||
|
||||
|
||||
def replaceExtension(filename, newExt):
|
||||
'''
|
||||
>>> replaceExtension('foo.avi', 'mkv')
|
||||
|
@ -107,6 +105,7 @@ def replaceExtension(filename, newExt):
|
|||
else:
|
||||
return sepFile[0] + "." + newExt
|
||||
|
||||
|
||||
def isMediaFile(filename):
|
||||
# ignore samples
|
||||
if re.search('(^|[\W_])(sample\d*)[\W_]', filename, re.I):
|
||||
|
@ -117,31 +116,34 @@ def isMediaFile(filename):
|
|||
return False
|
||||
|
||||
sepFile = filename.rpartition(".")
|
||||
|
||||
|
||||
if re.search('extras?$', sepFile[0], re.I):
|
||||
return False
|
||||
|
||||
|
||||
if sepFile[2].lower() in mediaExtensions:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def isRarFile(filename):
|
||||
archive_regex = '(?P<file>^(?P<base>(?:(?!\.part\d+\.rar$).)*)\.(?:(?:part0*1\.)?rar)$)'
|
||||
|
||||
|
||||
if re.search(archive_regex, filename):
|
||||
return True
|
||||
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def isBeingWritten(filepath):
|
||||
# Return True if file was modified within 60 seconds. it might still be being written to.
|
||||
# Return True if file was modified within 60 seconds. it might still be being written to.
|
||||
ctime = max(ek.ek(os.path.getctime, filepath), ek.ek(os.path.getmtime, filepath))
|
||||
if ctime > time.time() - 60:
|
||||
return True
|
||||
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def sanitizeFileName(name):
|
||||
'''
|
||||
>>> sanitizeFileName('a/b/c')
|
||||
|
@ -153,14 +155,14 @@ def sanitizeFileName(name):
|
|||
>>> sanitizeFileName('.a.b..')
|
||||
'a.b'
|
||||
'''
|
||||
|
||||
|
||||
# remove bad chars from the filename
|
||||
name = re.sub(r'[\\/\*]', '-', name)
|
||||
name = re.sub(r'[:"<>|?]', '', name)
|
||||
|
||||
|
||||
# remove leading/trailing periods and spaces
|
||||
name = name.strip(' .')
|
||||
|
||||
|
||||
return name
|
||||
|
||||
|
||||
|
@ -177,7 +179,7 @@ Returns a byte-string retrieved from the url provider.
|
|||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
it = iter(req_headers)
|
||||
|
@ -196,12 +198,14 @@ Returns a byte-string retrieved from the url provider.
|
|||
|
||||
return resp.content if resp.ok else None
|
||||
|
||||
|
||||
def _remove_file_failed(file):
|
||||
try:
|
||||
ek.ek(os.remove,file)
|
||||
ek.ek(os.remove, file)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def download_file(url, filename):
|
||||
try:
|
||||
r = requests.get(url, stream=True)
|
||||
|
@ -228,9 +232,10 @@ def download_file(url, filename):
|
|||
_remove_file_failed(filename)
|
||||
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
|
||||
return False
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def findCertainShow(showList, indexerid):
|
||||
results = filter(lambda x: x.indexerid == indexerid, showList)
|
||||
if len(results) == 0:
|
||||
|
@ -240,6 +245,7 @@ def findCertainShow(showList, indexerid):
|
|||
else:
|
||||
return results[0]
|
||||
|
||||
|
||||
def makeDir(path):
|
||||
if not ek.ek(os.path.isdir, path):
|
||||
try:
|
||||
|
@ -252,8 +258,7 @@ def makeDir(path):
|
|||
|
||||
|
||||
def searchDBForShow(regShowName, indexer_id=None):
|
||||
|
||||
showNames = [re.sub('[. -]', ' ', regShowName),regShowName]
|
||||
showNames = [re.sub('[. -]', ' ', regShowName), regShowName]
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
|
@ -263,20 +268,25 @@ def searchDBForShow(regShowName, indexer_id=None):
|
|||
|
||||
show = get_show_by_name(showName, sickbeard.showList)
|
||||
if show:
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?", [show.name, show.name])
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?",
|
||||
[show.name, show.name])
|
||||
else:
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?", [showName, showName])
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?",
|
||||
[showName, showName])
|
||||
|
||||
if len(sqlResults) == 1:
|
||||
return (sqlResults[0]["indexer"], int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
||||
return (int(sqlResults[0]["indexer"]), int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
||||
|
||||
else:
|
||||
|
||||
# if we didn't get exactly one result then try again with the year stripped off if possible
|
||||
match = re.match(yearRegex, showName)
|
||||
if match and match.group(1):
|
||||
logger.log(u"Unable to match original name but trying to manually strip and specify show year", logger.DEBUG)
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE (show_name LIKE ? OR show_name LIKE ?) AND startyear = ?", [match.group(1) + '%', match.group(1) + '%', match.group(3)])
|
||||
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
|
||||
logger.DEBUG)
|
||||
sqlResults = myDB.select(
|
||||
"SELECT * FROM tv_shows WHERE (show_name LIKE ? OR show_name LIKE ?) AND startyear = ?",
|
||||
[match.group(1) + '%', match.group(1) + '%', match.group(3)])
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG)
|
||||
|
@ -285,49 +295,52 @@ def searchDBForShow(regShowName, indexer_id=None):
|
|||
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name", logger.DEBUG)
|
||||
continue
|
||||
else:
|
||||
return (sqlResults[0]["indexer"], int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
||||
return (int(sqlResults[0]["indexer"]), int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
||||
|
||||
return None
|
||||
|
||||
def searchIndexersForShow(regShowName, indexer_id = None):
|
||||
|
||||
showNames = [re.sub('[. -]', ' ', regShowName),regShowName]
|
||||
def searchIndexersForShow(regShowName, indexer_id=None):
|
||||
showNames = [re.sub('[. -]', ' ', regShowName), regShowName]
|
||||
|
||||
for name in showNames:
|
||||
for indexer in indexerStrings:
|
||||
logger.log(u"Trying to find the " + name + " on " + indexer, logger.DEBUG)
|
||||
# Query Indexers for each search term and build the list of results
|
||||
for indexer in sickbeard.indexerApi().indexers:
|
||||
def searchShows():
|
||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
try:
|
||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||
|
||||
lINDEXER_API_PARMS['search_all_languages'] = True
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
showObj = t[name]
|
||||
return indexer
|
||||
except (indexer_exceptions.indexer_exception, IOError):
|
||||
# if none found, search on all languages
|
||||
for name in showNames:
|
||||
logger.log(u"Trying to find " + name + " on " + sickbeard.indexerApi(indexer).name, logger.DEBUG)
|
||||
try:
|
||||
# There's gotta be a better way of doing this but we don't wanna
|
||||
# change the language value elsewhere
|
||||
if indexer_id:
|
||||
search = t[indexer_id]
|
||||
else:
|
||||
search = t[name]
|
||||
|
||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||
if isinstance(search, dict):
|
||||
search = [search]
|
||||
|
||||
lINDEXER_API_PARMS['search_all_languages'] = True
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
# add search results
|
||||
result = [[t.config['id'], x['id']] for x in search if name.lower() == x['seriesname'].lower()]
|
||||
if len(result) > 0:
|
||||
result = [item for sublist in result for item in sublist]
|
||||
return result
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
showObj = t[name]
|
||||
return indexer
|
||||
except (indexer_exceptions.indexer_exception, IOError):
|
||||
pass
|
||||
except KeyError, e:
|
||||
break
|
||||
|
||||
continue
|
||||
except (IOError):
|
||||
continue
|
||||
except Exception, e:
|
||||
logger.log(
|
||||
u"Error while auto-detecting show indexer and indexerid on indexer " + sickbeard.indexerApi(
|
||||
indexer).name + ", retrying: " + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
continue
|
||||
|
||||
# search indexers for shows
|
||||
found = searchShows()
|
||||
if found: return found
|
||||
|
||||
return None
|
||||
|
||||
def sizeof_fmt(num):
|
||||
'''
|
||||
|
@ -347,8 +360,8 @@ def sizeof_fmt(num):
|
|||
return "%3.1f %s" % (num, x)
|
||||
num /= 1024.0
|
||||
|
||||
def listMediaFiles(path):
|
||||
|
||||
def listMediaFiles(path):
|
||||
if not dir or not ek.ek(os.path.isdir, path):
|
||||
return []
|
||||
|
||||
|
@ -365,6 +378,7 @@ def listMediaFiles(path):
|
|||
|
||||
return files
|
||||
|
||||
|
||||
def copyFile(srcFile, destFile):
|
||||
ek.ek(shutil.copyfile, srcFile, destFile)
|
||||
try:
|
||||
|
@ -372,6 +386,7 @@ def copyFile(srcFile, destFile):
|
|||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def moveFile(srcFile, destFile):
|
||||
try:
|
||||
ek.ek(os.rename, srcFile, destFile)
|
||||
|
@ -380,13 +395,16 @@ def moveFile(srcFile, destFile):
|
|||
copyFile(srcFile, destFile)
|
||||
ek.ek(os.unlink, srcFile)
|
||||
|
||||
|
||||
def link(src, dst):
|
||||
if os.name == 'nt':
|
||||
import ctypes
|
||||
|
||||
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
|
||||
else:
|
||||
os.link(src, dst)
|
||||
|
||||
|
||||
def hardlinkFile(srcFile, destFile):
|
||||
try:
|
||||
ek.ek(link, srcFile, destFile)
|
||||
|
@ -395,13 +413,17 @@ def hardlinkFile(srcFile, destFile):
|
|||
logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR)
|
||||
copyFile(srcFile, destFile)
|
||||
|
||||
|
||||
def symlink(src, dst):
|
||||
if os.name == 'nt':
|
||||
import ctypes
|
||||
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError()
|
||||
|
||||
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0,
|
||||
1280]: raise ctypes.WinError()
|
||||
else:
|
||||
os.symlink(src, dst)
|
||||
|
||||
|
||||
def moveAndSymlinkFile(srcFile, destFile):
|
||||
try:
|
||||
ek.ek(os.rename, srcFile, destFile)
|
||||
|
@ -411,6 +433,7 @@ def moveAndSymlinkFile(srcFile, destFile):
|
|||
logger.log(u"Failed to create symlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR)
|
||||
copyFile(srcFile, destFile)
|
||||
|
||||
|
||||
def make_dirs(path):
|
||||
"""
|
||||
Creates any folders that are missing and assigns them the permissions of their
|
||||
|
@ -466,27 +489,27 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
|
|||
old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION
|
||||
"""
|
||||
|
||||
new_dest_dir, new_dest_name = os.path.split(new_path) #@UnusedVariable
|
||||
new_dest_dir, new_dest_name = os.path.split(new_path) #@UnusedVariable
|
||||
|
||||
if old_path_length == 0 or old_path_length > len(cur_path):
|
||||
# approach from the right
|
||||
cur_file_name, cur_file_ext = os.path.splitext(cur_path) # @UnusedVariable
|
||||
else:
|
||||
# approach from the left
|
||||
cur_file_ext = cur_path[old_path_length:]
|
||||
cur_file_ext = cur_path[old_path_length:]
|
||||
cur_file_name = cur_path[:old_path_length]
|
||||
|
||||
|
||||
if cur_file_ext[1:] in subtitleExtensions:
|
||||
#Extract subtitle language from filename
|
||||
sublang = os.path.splitext(cur_file_name)[1][1:]
|
||||
|
||||
|
||||
#Check if the language extracted from filename is a valid language
|
||||
try:
|
||||
language = subliminal.language.Language(sublang, strict=True)
|
||||
cur_file_ext = '.'+sublang+cur_file_ext
|
||||
cur_file_ext = '.' + sublang + cur_file_ext
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
||||
# put the extension on the incoming file
|
||||
new_path += cur_file_ext
|
||||
|
||||
|
@ -524,7 +547,8 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
|
|||
|
||||
check_files = ek.ek(os.listdir, check_empty_dir)
|
||||
|
||||
if not check_files or (len(check_files) <= len(ignore_items) and all([check_file in ignore_items for check_file in check_files])):
|
||||
if not check_files or (len(check_files) <= len(ignore_items) and all(
|
||||
[check_file in ignore_items for check_file in check_files])):
|
||||
# directory is empty or contains only ignore_items
|
||||
try:
|
||||
logger.log(u"Deleting empty folder: " + check_empty_dir)
|
||||
|
@ -539,19 +563,20 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
|
|||
else:
|
||||
break
|
||||
|
||||
|
||||
def chmodAsParent(childPath):
|
||||
if os.name == 'nt' or os.name == 'ce':
|
||||
return
|
||||
|
||||
parentPath = ek.ek(os.path.dirname, childPath)
|
||||
|
||||
|
||||
if not parentPath:
|
||||
logger.log(u"No parent path provided in " + childPath + ", unable to get permissions from it", logger.DEBUG)
|
||||
return
|
||||
|
||||
|
||||
parentPathStat = ek.ek(os.stat, parentPath)
|
||||
parentMode = stat.S_IMODE(parentPathStat[stat.ST_MODE])
|
||||
|
||||
|
||||
childPathStat = ek.ek(os.stat, childPath)
|
||||
childPath_mode = stat.S_IMODE(childPathStat[stat.ST_MODE])
|
||||
|
||||
|
@ -564,18 +589,20 @@ def chmodAsParent(childPath):
|
|||
return
|
||||
|
||||
childPath_owner = childPathStat.st_uid
|
||||
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
||||
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
||||
|
||||
if user_id !=0 and user_id != childPath_owner:
|
||||
if user_id != 0 and user_id != childPath_owner:
|
||||
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set permissions", logger.DEBUG)
|
||||
return
|
||||
|
||||
try:
|
||||
ek.ek(os.chmod, childPath, childMode)
|
||||
logger.log(u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode), logger.DEBUG)
|
||||
logger.log(u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode),
|
||||
logger.DEBUG)
|
||||
except OSError:
|
||||
logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR)
|
||||
|
||||
|
||||
def fileBitFilter(mode):
|
||||
for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]:
|
||||
if mode & bit:
|
||||
|
@ -583,6 +610,7 @@ def fileBitFilter(mode):
|
|||
|
||||
return mode
|
||||
|
||||
|
||||
def fixSetGroupID(childPath):
|
||||
if os.name == 'nt' or os.name == 'ce':
|
||||
return
|
||||
|
@ -600,19 +628,23 @@ def fixSetGroupID(childPath):
|
|||
return
|
||||
|
||||
childPath_owner = childStat.st_uid
|
||||
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
||||
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
||||
|
||||
if user_id !=0 and user_id != childPath_owner:
|
||||
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set the set-group-ID", logger.DEBUG)
|
||||
if user_id != 0 and user_id != childPath_owner:
|
||||
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set the set-group-ID",
|
||||
logger.DEBUG)
|
||||
return
|
||||
|
||||
try:
|
||||
ek.ek(os.chown, childPath, -1, parentGID) # @UndefinedVariable - only available on UNIX
|
||||
ek.ek(os.chown, childPath, -1, parentGID) # @UndefinedVariable - only available on UNIX
|
||||
logger.log(u"Respecting the set-group-ID bit on the parent directory for %s" % (childPath), logger.DEBUG)
|
||||
except OSError:
|
||||
logger.log(u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % (childPath, parentGID), logger.ERROR)
|
||||
logger.log(
|
||||
u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % (
|
||||
childPath, parentGID), logger.ERROR)
|
||||
|
||||
def sanitizeSceneName (name, ezrss=False):
|
||||
|
||||
def sanitizeSceneName(name, ezrss=False):
|
||||
"""
|
||||
Takes a show name and returns the "scenified" version of it.
|
||||
|
||||
|
@ -640,13 +672,15 @@ def sanitizeSceneName (name, ezrss=False):
|
|||
|
||||
return name
|
||||
|
||||
|
||||
def create_https_certificates(ssl_cert, ssl_key):
|
||||
"""
|
||||
Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key'
|
||||
"""
|
||||
try:
|
||||
from lib.OpenSSL import crypto # @UnresolvedImport
|
||||
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, serial # @UnresolvedImport
|
||||
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \
|
||||
serial # @UnresolvedImport
|
||||
except:
|
||||
logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING)
|
||||
return False
|
||||
|
@ -654,12 +688,12 @@ def create_https_certificates(ssl_cert, ssl_key):
|
|||
# Create the CA Certificate
|
||||
cakey = createKeyPair(TYPE_RSA, 1024)
|
||||
careq = createCertRequest(cakey, CN='Certificate Authority')
|
||||
cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
|
||||
cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
|
||||
|
||||
cname = 'SickBeard'
|
||||
pkey = createKeyPair(TYPE_RSA, 1024)
|
||||
req = createCertRequest(pkey, CN=cname)
|
||||
cert = createCertificate(req, (cacert, cakey), serial, (0, 60* 60 * 24 * 365 *10)) # ten years
|
||||
cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
|
||||
|
||||
# Save the key and certificate to disk
|
||||
try:
|
||||
|
@ -671,8 +705,10 @@ def create_https_certificates(ssl_cert, ssl_key):
|
|||
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
||||
|
||||
|
||||
|
@ -741,7 +777,7 @@ def get_xml_text(element, mini_dom=False):
|
|||
|
||||
return text.strip()
|
||||
|
||||
|
||||
|
||||
def backupVersionedFile(old_file, version):
|
||||
numTries = 0
|
||||
|
||||
|
@ -771,14 +807,17 @@ def backupVersionedFile(old_file, version):
|
|||
|
||||
|
||||
# try to convert to int, if it fails the default will be returned
|
||||
def tryInt(s, s_default = 0):
|
||||
try: return int(s)
|
||||
except: return s_default
|
||||
def tryInt(s, s_default=0):
|
||||
try:
|
||||
return int(s)
|
||||
except:
|
||||
return s_default
|
||||
|
||||
|
||||
# generates a md5 hash of a file
|
||||
def md5_for_file(filename, block_size=2**16):
|
||||
try:
|
||||
with open(filename,'rb') as f:
|
||||
def md5_for_file(filename, block_size=2 ** 16):
|
||||
try:
|
||||
with open(filename, 'rb') as f:
|
||||
md5 = hashlib.md5()
|
||||
while True:
|
||||
data = f.read(block_size)
|
||||
|
@ -789,7 +828,8 @@ def md5_for_file(filename, block_size=2**16):
|
|||
return md5.hexdigest()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def get_lan_ip():
|
||||
"""
|
||||
Simple function to get LAN localhost_ip
|
||||
|
@ -799,12 +839,12 @@ def get_lan_ip():
|
|||
if os.name != "nt":
|
||||
import fcntl
|
||||
import struct
|
||||
|
||||
|
||||
def get_interface_ip(ifname):
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
return socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s',
|
||||
ifname[:15]))[20:24])
|
||||
|
||||
ifname[:15]))[20:24])
|
||||
|
||||
ip = socket.gethostbyname(socket.gethostname())
|
||||
if ip.startswith("127.") and os.name != "nt":
|
||||
interfaces = [
|
||||
|
@ -817,16 +857,17 @@ def get_lan_ip():
|
|||
"ath0",
|
||||
"ath1",
|
||||
"ppp0",
|
||||
]
|
||||
]
|
||||
for ifname in interfaces:
|
||||
try:
|
||||
ip = get_interface_ip(ifname)
|
||||
print ifname, ip
|
||||
print ifname, ip
|
||||
break
|
||||
except IOError:
|
||||
pass
|
||||
return ip
|
||||
|
||||
|
||||
def check_url(url):
|
||||
"""
|
||||
Check if a URL exists without downloading the whole file.
|
||||
|
@ -836,14 +877,14 @@ def check_url(url):
|
|||
# http://stackoverflow.com/questions/1140661
|
||||
good_codes = [httplib.OK, httplib.FOUND, httplib.MOVED_PERMANENTLY]
|
||||
|
||||
host, path = urlparse.urlparse(url)[1:3] # elems [1] and [2]
|
||||
host, path = urlparse.urlparse(url)[1:3] # elems [1] and [2]
|
||||
try:
|
||||
conn = httplib.HTTPConnection(host)
|
||||
conn.request('HEAD', path)
|
||||
return conn.getresponse().status in good_codes
|
||||
except StandardError:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
"""
|
||||
Encryption
|
||||
|
@ -860,27 +901,30 @@ To add a new encryption_version:
|
|||
"""
|
||||
|
||||
# Key Generators
|
||||
unique_key1 = hex(uuid.getnode()**2) # Used in encryption v1
|
||||
unique_key1 = hex(uuid.getnode() ** 2) # Used in encryption v1
|
||||
|
||||
# Encryption Functions
|
||||
def encrypt(data, encryption_version=0, decrypt=False):
|
||||
|
||||
# Version 1: Simple XOR encryption (this is not very secure, but works)
|
||||
if encryption_version == 1:
|
||||
if decrypt:
|
||||
return ''.join(chr(ord(x) ^ ord(y)) for (x,y) in izip(base64.decodestring(data), cycle(unique_key1)))
|
||||
if decrypt:
|
||||
return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(base64.decodestring(data), cycle(unique_key1)))
|
||||
else:
|
||||
return base64.encodestring(''.join(chr(ord(x) ^ ord(y)) for (x,y) in izip(data, cycle(unique_key1)))).strip()
|
||||
return base64.encodestring(
|
||||
''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(data, cycle(unique_key1)))).strip()
|
||||
# Version 0: Plain text
|
||||
else:
|
||||
return data
|
||||
|
||||
|
||||
|
||||
def decrypt(data, encryption_version=0):
|
||||
return encrypt(data, encryption_version, decrypt=True)
|
||||
return encrypt(data, encryption_version, decrypt=True)
|
||||
|
||||
|
||||
def full_sanitizeSceneName(name):
|
||||
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
||||
|
||||
|
||||
def _check_against_names(name, show):
|
||||
nameInQuestion = full_sanitizeSceneName(name)
|
||||
|
||||
|
@ -895,25 +939,26 @@ def _check_against_names(name, show):
|
|||
|
||||
return False
|
||||
|
||||
|
||||
def get_show_by_name(name, showList, useIndexer=False):
|
||||
logger.log(u"Trying to get the indexerid for "+name, logger.DEBUG)
|
||||
logger.log(u"Trying to get the indexerid for " + name, logger.DEBUG)
|
||||
|
||||
if showList:
|
||||
for show in showList:
|
||||
if _check_against_names(name, show):
|
||||
logger.log(u"Matched "+name+" in the showlist to the show "+show.name, logger.DEBUG)
|
||||
logger.log(u"Matched " + name + " in the showlist to the show " + show.name, logger.DEBUG)
|
||||
return show
|
||||
|
||||
if useIndexer:
|
||||
for indexer in indexerStrings:
|
||||
for indexer in sickbeard.indexerApi().indexers:
|
||||
try:
|
||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
showObj = t[name]
|
||||
except (indexer_exceptions.indexer_exception, IOError):
|
||||
except (sickbeard.indexer_exception, IOError):
|
||||
# if none found, search on all languages
|
||||
try:
|
||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||
|
@ -921,9 +966,9 @@ def get_show_by_name(name, showList, useIndexer=False):
|
|||
lINDEXER_API_PARMS['search_all_languages'] = True
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
showObj = t[name]
|
||||
except (indexer_exceptions.indexer_exception, IOError):
|
||||
except (sickbeard.indexer_exception, IOError):
|
||||
pass
|
||||
|
||||
continue
|
||||
|
@ -936,12 +981,15 @@ def get_show_by_name(name, showList, useIndexer=False):
|
|||
|
||||
return None
|
||||
|
||||
|
||||
def suffix(d):
|
||||
return 'th' if 11<=d<=13 else {1:'st',2:'nd',3:'rd'}.get(d%10, 'th')
|
||||
return 'th' if 11 <= d <= 13 else {1: 'st', 2: 'nd', 3: 'rd'}.get(d % 10, 'th')
|
||||
|
||||
|
||||
def custom_strftime(format, t):
|
||||
return t.strftime(format).replace('{S}', str(t.day) + suffix(t.day))
|
||||
|
||||
|
||||
def is_hidden_folder(folder):
|
||||
"""
|
||||
Returns True if folder is hidden.
|
||||
|
@ -954,6 +1002,7 @@ def is_hidden_folder(folder):
|
|||
|
||||
return False
|
||||
|
||||
|
||||
def real_path(path):
|
||||
"""
|
||||
Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components.
|
||||
|
|
|
@ -19,25 +19,25 @@
|
|||
import db
|
||||
import datetime
|
||||
|
||||
from sickbeard.common import SNATCHED, SUBTITLED, FAILED, Quality
|
||||
from sickbeard.common import SNATCHED, SUBTITLED, FAILED, Quality
|
||||
|
||||
|
||||
dateFormat = "%Y%m%d%H%M%S"
|
||||
|
||||
def _logHistoryItem(action, showid, season, episode, quality, resource, provider):
|
||||
|
||||
def _logHistoryItem(action, showid, season, episode, quality, resource, provider):
|
||||
logDate = datetime.datetime.today().strftime(dateFormat)
|
||||
|
||||
if not isinstance(resource, unicode):
|
||||
resource = unicode(resource, 'utf-8')
|
||||
|
||||
myDB = db.DBConnection()
|
||||
myDB.action("INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)",
|
||||
[action, logDate, showid, season, episode, quality, resource, provider])
|
||||
myDB.action(
|
||||
"INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)",
|
||||
[action, logDate, showid, season, episode, quality, resource, provider])
|
||||
|
||||
|
||||
def logSnatch(searchResult):
|
||||
|
||||
for curEpObj in searchResult.episodes:
|
||||
|
||||
showid = int(curEpObj.show.indexerid)
|
||||
|
@ -57,14 +57,14 @@ def logSnatch(searchResult):
|
|||
|
||||
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
|
||||
|
||||
def logDownload(episode, filename, new_ep_quality, release_group=None):
|
||||
|
||||
def logDownload(episode, filename, new_ep_quality, release_group=None):
|
||||
showid = int(episode.show.indexerid)
|
||||
season = int(episode.season)
|
||||
epNum = int(episode.episode)
|
||||
|
||||
quality = new_ep_quality
|
||||
|
||||
|
||||
# store the release group as the provider if possible
|
||||
if release_group:
|
||||
provider = release_group
|
||||
|
@ -75,21 +75,21 @@ def logDownload(episode, filename, new_ep_quality, release_group=None):
|
|||
|
||||
_logHistoryItem(action, showid, season, epNum, quality, filename, provider)
|
||||
|
||||
|
||||
def logSubtitle(showid, season, episode, status, subtitleResult):
|
||||
|
||||
resource = subtitleResult.path
|
||||
provider = subtitleResult.service
|
||||
status, quality = Quality.splitCompositeStatus(status)
|
||||
status, quality = Quality.splitCompositeStatus(status)
|
||||
action = Quality.compositeStatus(SUBTITLED, quality)
|
||||
|
||||
|
||||
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
|
||||
|
||||
def logFailed(indexerid, season, episode, status, release, provider=None):
|
||||
|
||||
def logFailed(indexerid, season, episode, status, release, provider=None):
|
||||
showid = int(indexerid)
|
||||
season = int(season)
|
||||
epNum = int(episode)
|
||||
status, quality = Quality.splitCompositeStatus(status)
|
||||
status, quality = Quality.splitCompositeStatus(status)
|
||||
action = Quality.compositeStatus(FAILED, quality)
|
||||
|
||||
_logHistoryItem(action, showid, season, epNum, quality, release, provider)
|
||||
|
|
|
@ -28,11 +28,11 @@ from sickbeard.metadata.generic import GenericMetadata
|
|||
from lib.hachoir_parser import createParser
|
||||
from lib.hachoir_metadata import extractMetadata
|
||||
|
||||
|
||||
class ImageCache:
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
def _cache_dir(self):
|
||||
"""
|
||||
Builds up the full path to the image cache directory
|
||||
|
@ -94,7 +94,7 @@ class ImageCache:
|
|||
Returns true if a cached poster exists for the given indexer id
|
||||
"""
|
||||
poster_path = self.poster_path(indexer_id)
|
||||
logger.log(u"Checking if file "+str(poster_path)+" exists", logger.DEBUG)
|
||||
logger.log(u"Checking if file " + str(poster_path) + " exists", logger.DEBUG)
|
||||
return ek.ek(os.path.isfile, poster_path)
|
||||
|
||||
def has_banner(self, indexer_id):
|
||||
|
@ -102,7 +102,7 @@ class ImageCache:
|
|||
Returns true if a cached banner exists for the given indexer id
|
||||
"""
|
||||
banner_path = self.banner_path(indexer_id)
|
||||
logger.log(u"Checking if file "+str(banner_path)+" exists", logger.DEBUG)
|
||||
logger.log(u"Checking if file " + str(banner_path) + " exists", logger.DEBUG)
|
||||
return ek.ek(os.path.isfile, banner_path)
|
||||
|
||||
def has_poster_thumbnail(self, indexer_id):
|
||||
|
@ -110,7 +110,7 @@ class ImageCache:
|
|||
Returns true if a cached poster thumbnail exists for the given indexer id
|
||||
"""
|
||||
poster_thumb_path = self.poster_thumb_path(indexer_id)
|
||||
logger.log(u"Checking if file "+str(poster_thumb_path)+" exists", logger.DEBUG)
|
||||
logger.log(u"Checking if file " + str(poster_thumb_path) + " exists", logger.DEBUG)
|
||||
return ek.ek(os.path.isfile, poster_thumb_path)
|
||||
|
||||
def has_banner_thumbnail(self, indexer_id):
|
||||
|
@ -118,7 +118,7 @@ class ImageCache:
|
|||
Returns true if a cached banner exists for the given indexer id
|
||||
"""
|
||||
banner_thumb_path = self.banner_thumb_path(indexer_id)
|
||||
logger.log(u"Checking if file "+str(banner_thumb_path)+" exists", logger.DEBUG)
|
||||
logger.log(u"Checking if file " + str(banner_thumb_path) + " exists", logger.DEBUG)
|
||||
return ek.ek(os.path.isfile, banner_thumb_path)
|
||||
|
||||
|
||||
|
@ -126,7 +126,7 @@ class ImageCache:
|
|||
POSTER = 2
|
||||
BANNER_THUMB = 3
|
||||
POSTER_THUMB = 4
|
||||
|
||||
|
||||
def which_type(self, path):
|
||||
"""
|
||||
Analyzes the image provided and attempts to determine whether it is a poster or banner.
|
||||
|
@ -137,7 +137,7 @@ class ImageCache:
|
|||
"""
|
||||
|
||||
if not ek.ek(os.path.isfile, path):
|
||||
logger.log(u"Couldn't check the type of "+str(path)+" cause it doesn't exist", logger.WARNING)
|
||||
logger.log(u"Couldn't check the type of " + str(path) + " cause it doesn't exist", logger.WARNING)
|
||||
return None
|
||||
|
||||
# use hachoir to parse the image for us
|
||||
|
@ -145,24 +145,24 @@ class ImageCache:
|
|||
img_metadata = extractMetadata(img_parser)
|
||||
|
||||
if not img_metadata:
|
||||
logger.log(u"Unable to get metadata from "+str(path)+", not using your existing image", logger.DEBUG)
|
||||
logger.log(u"Unable to get metadata from " + str(path) + ", not using your existing image", logger.DEBUG)
|
||||
return None
|
||||
|
||||
img_ratio = float(img_metadata.get('width'))/float(img_metadata.get('height'))
|
||||
|
||||
img_ratio = float(img_metadata.get('width')) / float(img_metadata.get('height'))
|
||||
|
||||
img_parser.stream._input.close()
|
||||
|
||||
# most posters are around 0.68 width/height ratio (eg. 680/1000)
|
||||
if 0.55 < img_ratio < 0.8:
|
||||
return self.POSTER
|
||||
|
||||
|
||||
# most banners are around 5.4 width/height ratio (eg. 758/140)
|
||||
elif 5 < img_ratio < 6:
|
||||
return self.BANNER
|
||||
else:
|
||||
logger.log(u"Image has size ratio of "+str(img_ratio)+", unknown type", logger.WARNING)
|
||||
logger.log(u"Image has size ratio of " + str(img_ratio) + ", unknown type", logger.WARNING)
|
||||
return None
|
||||
|
||||
|
||||
def _cache_image_from_file(self, image_path, img_type, indexer_id):
|
||||
"""
|
||||
Takes the image provided and copies it to the cache folder
|
||||
|
@ -180,21 +180,21 @@ class ImageCache:
|
|||
elif img_type == self.BANNER:
|
||||
dest_path = self.banner_path(indexer_id)
|
||||
else:
|
||||
logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR)
|
||||
logger.log(u"Invalid cache image type: " + str(img_type), logger.ERROR)
|
||||
return False
|
||||
|
||||
# make sure the cache folder exists before we try copying to it
|
||||
if not ek.ek(os.path.isdir, self._cache_dir()):
|
||||
logger.log(u"Image cache dir didn't exist, creating it at "+str(self._cache_dir()))
|
||||
logger.log(u"Image cache dir didn't exist, creating it at " + str(self._cache_dir()))
|
||||
ek.ek(os.makedirs, self._cache_dir())
|
||||
|
||||
if not ek.ek(os.path.isdir, self._thumbnails_dir()):
|
||||
logger.log(u"Thumbnails cache dir didn't exist, creating it at "+str(self._thumbnails_dir()))
|
||||
logger.log(u"Thumbnails cache dir didn't exist, creating it at " + str(self._thumbnails_dir()))
|
||||
ek.ek(os.makedirs, self._thumbnails_dir())
|
||||
|
||||
logger.log(u"Copying from "+image_path+" to "+dest_path)
|
||||
logger.log(u"Copying from " + image_path + " to " + dest_path)
|
||||
helpers.copyFile(image_path, dest_path)
|
||||
|
||||
|
||||
return True
|
||||
|
||||
def _cache_image_from_indexer(self, show_obj, img_type):
|
||||
|
@ -221,7 +221,7 @@ class ImageCache:
|
|||
img_type_name = 'banner_thumb'
|
||||
dest_path = self.banner_thumb_path(show_obj.indexerid)
|
||||
else:
|
||||
logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR)
|
||||
logger.log(u"Invalid cache image type: " + str(img_type), logger.ERROR)
|
||||
return False
|
||||
|
||||
# retrieve the image from indexer using the generic metadata class
|
||||
|
@ -231,7 +231,7 @@ class ImageCache:
|
|||
result = metadata_generator._write_image(img_data, dest_path)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def fill_cache(self, show_obj):
|
||||
"""
|
||||
Caches all images for the given show. Copies them from the show dir if possible, or
|
||||
|
@ -240,45 +240,51 @@ class ImageCache:
|
|||
show_obj: TVShow object to cache images for
|
||||
"""
|
||||
|
||||
logger.log(u"Checking if we need any cache images for show "+str(show_obj.indexerid), logger.DEBUG)
|
||||
logger.log(u"Checking if we need any cache images for show " + str(show_obj.indexerid), logger.DEBUG)
|
||||
|
||||
# check if the images are already cached or not
|
||||
need_images = {self.POSTER: not self.has_poster(show_obj.indexerid),
|
||||
self.BANNER: not self.has_banner(show_obj.indexerid),
|
||||
self.POSTER_THUMB: not self.has_poster_thumbnail(show_obj.indexerid),
|
||||
self.BANNER_THUMB: not self.has_banner_thumbnail(show_obj.indexerid)}
|
||||
|
||||
if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not need_images[self.BANNER_THUMB]:
|
||||
|
||||
if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not \
|
||||
need_images[self.BANNER_THUMB]:
|
||||
logger.log(u"No new cache images needed, not retrieving new ones")
|
||||
return
|
||||
|
||||
|
||||
# check the show dir for poster or banner images and use them
|
||||
if need_images[self.POSTER] or need_images[self.BANNER]:
|
||||
if need_images[self.POSTER] or need_images[self.BANNER]:
|
||||
try:
|
||||
for cur_provider in sickbeard.metadata_provider_dict.values():
|
||||
logger.log(u"Checking if we can use the show image from the "+cur_provider.name+" metadata", logger.DEBUG)
|
||||
logger.log(u"Checking if we can use the show image from the " + cur_provider.name + " metadata",
|
||||
logger.DEBUG)
|
||||
if ek.ek(os.path.isfile, cur_provider.get_poster_path(show_obj)):
|
||||
cur_file_name = os.path.abspath(cur_provider.get_poster_path(show_obj))
|
||||
cur_file_type = self.which_type(cur_file_name)
|
||||
|
||||
|
||||
if cur_file_type == None:
|
||||
logger.log(u"Unable to retrieve image type, not using the image from "+str(cur_file_name), logger.WARNING)
|
||||
logger.log(u"Unable to retrieve image type, not using the image from " + str(cur_file_name),
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
logger.log(u"Checking if image "+cur_file_name+" (type "+str(cur_file_type)+" needs metadata: "+str(need_images[cur_file_type]), logger.DEBUG)
|
||||
|
||||
|
||||
logger.log(u"Checking if image " + cur_file_name + " (type " + str(
|
||||
cur_file_type) + " needs metadata: " + str(need_images[cur_file_type]), logger.DEBUG)
|
||||
|
||||
if cur_file_type in need_images and need_images[cur_file_type]:
|
||||
logger.log(u"Found an image in the show dir that doesn't exist in the cache, caching it: "+cur_file_name+", type "+str(cur_file_type), logger.DEBUG)
|
||||
logger.log(
|
||||
u"Found an image in the show dir that doesn't exist in the cache, caching it: " + cur_file_name + ", type " + str(
|
||||
cur_file_type), logger.DEBUG)
|
||||
self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.indexerid)
|
||||
need_images[cur_file_type] = False
|
||||
except exceptions.ShowDirNotFoundException:
|
||||
logger.log(u"Unable to search for images in show dir because it doesn't exist", logger.WARNING)
|
||||
|
||||
|
||||
# download from indexer for missing ones
|
||||
for cur_image_type in [self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB]:
|
||||
logger.log(u"Seeing if we still need an image of type "+str(cur_image_type)+": "+str(need_images[cur_image_type]), logger.DEBUG)
|
||||
logger.log(u"Seeing if we still need an image of type " + str(cur_image_type) + ": " + str(
|
||||
need_images[cur_image_type]), logger.DEBUG)
|
||||
if cur_image_type in need_images and need_images[cur_image_type]:
|
||||
self._cache_image_from_indexer(show_obj, cur_image_type)
|
||||
|
||||
|
||||
logger.log(u"Done cache check")
|
||||
|
|
|
@ -16,20 +16,4 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
__all__ = ["generic","indexer_api","indexer_exceptions"]
|
||||
|
||||
import indexer_api, indexer_exceptions
|
||||
|
||||
def getClientModule(name):
|
||||
|
||||
name = name.lower()
|
||||
prefix = "sickbeard.indexers."
|
||||
|
||||
return __import__(prefix+name, fromlist=__all__)
|
||||
|
||||
def getClientIstance(name):
|
||||
|
||||
module = getClientModule(name)
|
||||
className = module.__class__.__name__
|
||||
|
||||
return getattr(module, className)
|
||||
from . import indexer_api, indexer_exceptions
|
|
@ -1,66 +0,0 @@
|
|||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||
# URL: http://code.google.com/p/sickbeard/
|
||||
#
|
||||
# This file is part of Sick Beard.
|
||||
#
|
||||
# Sick Beard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Sick Beard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||
import os
|
||||
|
||||
import sickbeard
|
||||
|
||||
class GenericIndexer(object):
|
||||
def __init__(self, indexer):
|
||||
|
||||
INDEXER_TVDB = 'Tvdb'
|
||||
INDEXER_TVRAGE = 'TVRage'
|
||||
|
||||
INDEXERS = {}
|
||||
INDEXERS[INDEXER_TVDB] = 'theTVDB'
|
||||
INDEXERS[INDEXER_TVRAGE] = 'TVRage'
|
||||
|
||||
INDEXER_API_KEY = {}
|
||||
INDEXER_API_KEY[INDEXER_TVDB] = '9DAF49C96CBF8DAC'
|
||||
INDEXER_API_KEY[INDEXER_TVRAGE] = 'Uhewg1Rr0o62fvZvUIZt'
|
||||
|
||||
INDEXER_BASEURL = {}
|
||||
INDEXER_BASEURL[INDEXER_TVDB] = 'http://thetvdb.com/api/' + INDEXER_API_KEY[INDEXER_TVDB] + '/series/'
|
||||
INDEXER_BASEURL[INDEXER_TVRAGE] = 'http://tvrage.com/showinfo?key=' + INDEXER_API_KEY[INDEXER_TVRAGE] + 'sid='
|
||||
|
||||
INDEXER_API_PARMS = {}
|
||||
INDEXER_API_PARMS[INDEXER_TVDB] = {'apikey': INDEXER_API_KEY[INDEXER_TVDB],
|
||||
'language': 'en',
|
||||
'useZip': True}
|
||||
|
||||
INDEXER_API_PARMS[INDEXER_TVRAGE] = {'apikey': INDEXER_API_KEY[INDEXER_TVRAGE],
|
||||
'language': 'en'}
|
||||
|
||||
self.config = {}
|
||||
self.config['valid_languages'] = [
|
||||
"da", "fi", "nl", "de", "it", "es", "fr","pl", "hu","el","tr",
|
||||
"ru","he","ja","pt","zh","cs","sl", "hr","ko","en","sv","no"]
|
||||
|
||||
self.config['langabbv_to_id'] = {'el': 20, 'en': 7, 'zh': 27,
|
||||
'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9,
|
||||
'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11,
|
||||
'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30}
|
||||
|
||||
self.indexers = [x for x in INDEXERS]
|
||||
|
||||
if indexer in INDEXERS:
|
||||
self.base_url = INDEXER_BASEURL[indexer]
|
||||
self.api_parms = INDEXER_API_PARMS[indexer]
|
||||
self.name = INDEXERS[indexer]
|
||||
|
||||
if sickbeard.CACHE_DIR:
|
||||
self.cache = os.path.join(sickbeard.CACHE_DIR, indexer)
|
|
@ -16,30 +16,34 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||
import os
|
||||
import datetime
|
||||
|
||||
import sickbeard
|
||||
import generic
|
||||
|
||||
from indexer_exceptions import indexer_attributenotfound
|
||||
from lib.tvdb_api.tvdb_api import Tvdb
|
||||
from lib.tvrage_api.tvrage_api import TVRage
|
||||
from indexer_config import initConfig, indexerConfig
|
||||
|
||||
class indexerApi(generic.GenericIndexer):
|
||||
|
||||
class indexerApi(object):
|
||||
def __init__(self, indexer=None, *args, **kwargs):
|
||||
generic.GenericIndexer.__init__(self, indexer)
|
||||
self._wrapped = object
|
||||
self.config = initConfig
|
||||
self.indexers = {k: v if k is 'id' else v['name'] for k, v in indexerConfig.items()}
|
||||
|
||||
if indexer in self.indexers:
|
||||
self.api_parms.update(**kwargs)
|
||||
if indexer in indexerConfig:
|
||||
self.name = indexerConfig[indexer]['name']
|
||||
self.config = indexerConfig[indexer]
|
||||
|
||||
if sickbeard.CACHE_DIR:
|
||||
self.api_parms['cache'] = self.cache
|
||||
# set cache if exists
|
||||
if sickbeard.CACHE_DIR: indexerConfig[indexer]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR,
|
||||
self.name)
|
||||
|
||||
# wrap the indexer API object and return it back
|
||||
self._wrapped = eval(indexer)(*args, **self.api_parms)
|
||||
if kwargs:
|
||||
# update API params
|
||||
indexerConfig[indexer]['api_params'].update(**kwargs)
|
||||
|
||||
# wrap the indexer API object and return it back
|
||||
self._wrapped = indexerConfig[indexer]['module'](**indexerConfig[indexer]['api_params'])
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self._wrapped, attr)
|
||||
|
||||
def __getitem__(self, attr):
|
||||
return self._wrapped.__getitem__(attr)
|
||||
return self._wrapped.__getitem__(attr)
|
||||
|
|
51
sickbeard/indexers/indexer_config.py
Normal file
51
sickbeard/indexers/indexer_config.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
from lib.tvdb_api.tvdb_api import Tvdb
|
||||
from lib.tvrage_api.tvrage_api import TVRage
|
||||
|
||||
INDEXER_TVDB = 1
|
||||
INDEXER_TVRAGE = 2
|
||||
|
||||
initConfig = {}
|
||||
indexerConfig = {}
|
||||
|
||||
initConfig['valid_languages'] = [
|
||||
"da", "fi", "nl", "de", "it", "es", "fr", "pl", "hu", "el", "tr",
|
||||
"ru", "he", "ja", "pt", "zh", "cs", "sl", "hr", "ko", "en", "sv", "no"]
|
||||
|
||||
initConfig['langabbv_to_id'] = {
|
||||
'el': 20, 'en': 7, 'zh': 27,
|
||||
'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9,
|
||||
'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11,
|
||||
'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30}
|
||||
|
||||
indexerConfig[INDEXER_TVDB] = {
|
||||
'id': INDEXER_TVDB,
|
||||
'name': 'theTVDB',
|
||||
'module': Tvdb,
|
||||
'api_params': {'apikey': '9DAF49C96CBF8DAC',
|
||||
'language': 'en',
|
||||
'useZip': True
|
||||
},
|
||||
}
|
||||
|
||||
indexerConfig[INDEXER_TVRAGE] = {
|
||||
'id': INDEXER_TVRAGE,
|
||||
'name': 'TVRage',
|
||||
'module': TVRage,
|
||||
'api_params': {'apikey': 'Uhewg1Rr0o62fvZvUIZt',
|
||||
'language': 'en'
|
||||
},
|
||||
}
|
||||
|
||||
# TVDB Indexer Settings
|
||||
indexerConfig[INDEXER_TVDB]['xem_origin'] = 'tvdb'
|
||||
indexerConfig[INDEXER_TVDB]['icon'] = 'thetvdb16.png'
|
||||
indexerConfig[INDEXER_TVDB]['scene_url'] = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt'
|
||||
indexerConfig[INDEXER_TVDB]['show_url'] = 'http://thetvdb.com/?tab=series&id='
|
||||
indexerConfig[INDEXER_TVDB]['base_url'] = 'http://thetvdb.com/api/%(apikey)s/series/' % indexerConfig[INDEXER_TVDB]['api_params']
|
||||
|
||||
# TVRAGE Indexer Settings
|
||||
indexerConfig[INDEXER_TVRAGE]['xem_origin'] = 'rage'
|
||||
indexerConfig[INDEXER_TVRAGE]['icon'] = 'tvrage16.png'
|
||||
indexerConfig[INDEXER_TVRAGE]['scene_url'] = 'http://raw.github.com/echel0n/sb_tvrage_scene_exceptions/master/exceptions.txt'
|
||||
indexerConfig[INDEXER_TVRAGE]['show_url'] = 'http://tvrage.com/shows/id-'
|
||||
indexerConfig[INDEXER_TVRAGE]['base_url'] = 'http://tvrage.com/showinfo.php?key=%(apikey)s&sid=' % indexerConfig[INDEXER_TVRAGE]['api_params']
|
|
@ -19,13 +19,13 @@ from lib.tvdb_api.tvdb_exceptions import \
|
|||
tvdb_seasonnotfound, tvdb_shownotfound, tvdb_userabort
|
||||
|
||||
indexerExcepts = ["indexer_exception", "indexer_error", "indexer_userabort", "indexer_shownotfound",
|
||||
"indexer_seasonnotfound", "indexer_episodenotfound", "indexer_attributenotfound"]
|
||||
"indexer_seasonnotfound", "indexer_episodenotfound", "indexer_attributenotfound"]
|
||||
|
||||
tvdbExcepts = ["tvdb_exception", "tvdb_error", "tvdb_userabort", "tvdb_shownotfound",
|
||||
"tvdb_seasonnotfound", "tvdb_episodenotfound", "tvdb_attributenotfound"]
|
||||
"tvdb_seasonnotfound", "tvdb_episodenotfound", "tvdb_attributenotfound"]
|
||||
|
||||
tvrageExcepts = ["tvdb_exception", "tvrage_error", "tvrage_userabort", "tvrage_shownotfound",
|
||||
"tvrage_seasonnotfound", "tvrage_episodenotfound", "tvrage_attributenotfound"]
|
||||
"tvrage_seasonnotfound", "tvrage_episodenotfound", "tvrage_attributenotfound"]
|
||||
|
||||
# link API exceptions to our exception handler
|
||||
indexer_exception = tvdb_exception, tvrage_exception
|
||||
|
|
|
@ -3,33 +3,99 @@ from __future__ import with_statement
|
|||
import unittest
|
||||
|
||||
import sys
|
||||
import datetime
|
||||
import os.path
|
||||
import string
|
||||
|
||||
sys.path.append(os.path.abspath('..'))
|
||||
sys.path.append(os.path.abspath('../../../lib'))
|
||||
|
||||
from sickbeard.indexers.indexer_api import indexerApi
|
||||
from sickbeard.indexers.indexer_exceptions import indexer_exception
|
||||
import sickbeard
|
||||
import itertools
|
||||
|
||||
from itertools import chain
|
||||
from sickbeard import classes
|
||||
|
||||
|
||||
class APICheck(unittest.TestCase):
|
||||
indexer_id = 81189
|
||||
indexer = 'Tvdb'
|
||||
indexer = u'3'
|
||||
|
||||
for i in int([indexer]) and sickbeard.indexerApi().indexers:
|
||||
print i
|
||||
|
||||
global indexer, keywords, nameUTF8
|
||||
|
||||
indexer = 0
|
||||
name = 'american dad'
|
||||
lang = "en"
|
||||
|
||||
# Set our common indexer_api options here
|
||||
INDEXER_API_PARMS = {'indexer': indexer}
|
||||
lindexer_api_parms = INDEXER_API_PARMS.copy()
|
||||
if not lang or lang == 'null':
|
||||
lang = "en"
|
||||
|
||||
try:
|
||||
lang_id = indexerApi().config['langabbv_to_id'][lang]
|
||||
t = indexerApi(cache=True, **lindexer_api_parms)
|
||||
myEp = t[indexer_id]
|
||||
results = []
|
||||
|
||||
if getattr(myEp, 'seriesname', None) is not None:
|
||||
print "FOUND"
|
||||
nameUTF8 = name.encode('utf-8')
|
||||
|
||||
except indexer_exception as e:
|
||||
print e
|
||||
pass
|
||||
# Use each word in the show's name as a possible search term
|
||||
keywords = nameUTF8.split(' ')
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
# Insert the whole show's name as the first search term so best results are first
|
||||
# ex: keywords = ['Some Show Name', 'Some', 'Show', 'Name']
|
||||
if len(keywords) > 1:
|
||||
keywords.insert(0, nameUTF8)
|
||||
|
||||
|
||||
# check for indexer preset
|
||||
indexers = [int(indexer)]
|
||||
if 0 in indexers:
|
||||
indexers = sickbeard.indexerApi().indexers
|
||||
|
||||
# Query Indexers for each search term and build the list of results
|
||||
for i in indexers:
|
||||
def searchShows(i):
|
||||
results = []
|
||||
|
||||
lINDEXER_API_PARMS = {'indexer': i}
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.AllShowsListUI
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
for searchTerm in keywords:
|
||||
try:
|
||||
search = t[searchTerm]
|
||||
if isinstance(search, dict):
|
||||
search = [search]
|
||||
|
||||
# add search results
|
||||
result = [
|
||||
[t.name, t.config['id'], t.config["show_url"], int(x['id']), x['seriesname'], x['firstaired']]
|
||||
for x in search if nameUTF8.lower() in x['seriesname'].lower()]
|
||||
|
||||
# see if we have any matches
|
||||
if len(result) > 0:
|
||||
# add result to list of found shows
|
||||
results += result
|
||||
|
||||
# search through result to see if we have a exact match
|
||||
for show in result:
|
||||
# cleanup the series name
|
||||
seriesname = show[4].encode('utf-8').translate(None, string.punctuation)
|
||||
|
||||
# check if we got a exact match
|
||||
if nameUTF8.lower() == seriesname.lower():
|
||||
return results
|
||||
|
||||
except Exception, e:
|
||||
continue
|
||||
|
||||
# finished searching a indexer so return the results
|
||||
return results
|
||||
|
||||
# search indexers for shows
|
||||
results += searchShows(i)
|
||||
|
||||
# remove duplicates
|
||||
results = list(results for results, _ in itertools.groupby(results))
|
||||
print results
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
|
@ -25,6 +25,7 @@ import sqlite3
|
|||
|
||||
import sys
|
||||
import os.path
|
||||
|
||||
sys.path.append(os.path.abspath('..'))
|
||||
sys.path.append(os.path.abspath('../lib'))
|
||||
|
||||
|
@ -43,7 +44,6 @@ TESTDIR = os.path.abspath('.')
|
|||
TESTDBNAME = "sickbeard.db"
|
||||
TESTCACHEDBNAME = "cache.db"
|
||||
|
||||
|
||||
SHOWNAME = u"show name"
|
||||
SEASON = 4
|
||||
EPISODE = 2
|
||||
|
@ -78,9 +78,9 @@ sickbeard.NAMING_PATTERN = ''
|
|||
sickbeard.NAMING_ABD_PATTERN = ''
|
||||
sickbeard.NAMING_MULTI_EP = 1
|
||||
|
||||
|
||||
sickbeard.PROVIDER_ORDER = ["sick_beard_index"]
|
||||
sickbeard.newznabProviderList = providers.getNewznabProviderList("Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0!!!NZBs.org|http://nzbs.org/||5030,5040,5070,5090|0!!!Usenet-Crawler|http://www.usenet-crawler.com/||5030,5040|0")
|
||||
sickbeard.newznabProviderList = providers.getNewznabProviderList(
|
||||
"Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0!!!NZBs.org|http://nzbs.org/||5030,5040,5070,5090|0!!!Usenet-Crawler|http://www.usenet-crawler.com/||5030,5040|0")
|
||||
sickbeard.providerList = providers.makeProviderList()
|
||||
|
||||
sickbeard.PROG_DIR = os.path.abspath('..')
|
||||
|
@ -95,6 +95,7 @@ sickbeard.logger.sb_log_instance.initLogging(False)
|
|||
#=================
|
||||
def _dummy_saveConfig():
|
||||
return True
|
||||
|
||||
# this overrides the sickbeard save_config which gets called during a db upgrade
|
||||
# this might be considered a hack
|
||||
mainDB.sickbeard.save_config = _dummy_saveConfig
|
||||
|
@ -104,6 +105,7 @@ mainDB.sickbeard.save_config = _dummy_saveConfig
|
|||
def _fake_specifyEP(self, season, episode):
|
||||
pass
|
||||
|
||||
|
||||
sickbeard.tv.TVEpisode.specifyEpisode = _fake_specifyEP
|
||||
|
||||
|
||||
|
@ -125,14 +127,12 @@ class SickbeardTestDBCase(unittest.TestCase):
|
|||
|
||||
|
||||
class TestDBConnection(db.DBConnection, object):
|
||||
|
||||
def __init__(self, dbFileName=TESTDBNAME):
|
||||
dbFileName = os.path.join(TESTDIR, dbFileName)
|
||||
super(TestDBConnection, self).__init__(dbFileName)
|
||||
|
||||
|
||||
class TestCacheDBConnection(TestDBConnection, object):
|
||||
|
||||
def __init__(self, providerName):
|
||||
db.DBConnection.__init__(self, os.path.join(TESTDIR, TESTCACHEDBNAME))
|
||||
|
||||
|
@ -210,6 +210,7 @@ def setUp_test_show_dir():
|
|||
def tearDown_test_show_dir():
|
||||
shutil.rmtree(SHOWDIR)
|
||||
|
||||
|
||||
tearDown_test_db()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -39,27 +39,26 @@ ERROR = logging.ERROR
|
|||
WARNING = logging.WARNING
|
||||
MESSAGE = logging.INFO
|
||||
DEBUG = logging.DEBUG
|
||||
DB = 5
|
||||
DB = 5
|
||||
|
||||
reverseNames = {u'ERROR': ERROR,
|
||||
u'WARNING': WARNING,
|
||||
u'INFO': MESSAGE,
|
||||
u'DEBUG': DEBUG,
|
||||
u'DB' : DB}
|
||||
u'DB': DB}
|
||||
|
||||
|
||||
class SBRotatingLogHandler(object):
|
||||
|
||||
def __init__(self, log_file, num_files, num_bytes):
|
||||
self.num_files = num_files
|
||||
self.num_bytes = num_bytes
|
||||
|
||||
|
||||
self.log_file = log_file
|
||||
self.log_file_path = log_file
|
||||
self.cur_handler = None
|
||||
|
||||
self.writes_since_check = 0
|
||||
|
||||
|
||||
self.console_logging = False
|
||||
self.log_lock = threading.Lock()
|
||||
|
||||
|
@ -74,7 +73,7 @@ class SBRotatingLogHandler(object):
|
|||
|
||||
sb_logger.removeHandler(handler)
|
||||
sub_logger.removeHandler(handler)
|
||||
imdb_logger.removeHandler(handler)
|
||||
imdb_logger.removeHandler(handler)
|
||||
|
||||
handler.flush()
|
||||
handler.close()
|
||||
|
@ -83,17 +82,17 @@ class SBRotatingLogHandler(object):
|
|||
|
||||
if consoleLogging:
|
||||
self.console_logging = consoleLogging
|
||||
|
||||
|
||||
old_handler = None
|
||||
|
||||
|
||||
# get old handler in case we want to close it
|
||||
if self.cur_handler:
|
||||
old_handler = self.cur_handler
|
||||
else:
|
||||
|
||||
|
||||
#Add a new logging level DB
|
||||
logging.addLevelName(5,'DB')
|
||||
|
||||
logging.addLevelName(5, 'DB')
|
||||
|
||||
# only start consoleLogging on first initialize
|
||||
if self.console_logging:
|
||||
# define a Handler which writes INFO messages or higher to the sys.stderr
|
||||
|
@ -102,19 +101,21 @@ class SBRotatingLogHandler(object):
|
|||
console.setLevel(logging.INFO)
|
||||
|
||||
# set a format which is simpler for console use
|
||||
console.setFormatter(DispatchingFormatter({'sickbeard' : logging.Formatter('%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'),
|
||||
'subliminal' : logging.Formatter('%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s', '%H:%M:%S'),
|
||||
'imdbpy' : logging.Formatter('%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S')
|
||||
},
|
||||
logging.Formatter('%(message)s'),))
|
||||
console.setFormatter(DispatchingFormatter(
|
||||
{'sickbeard': logging.Formatter('%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'),
|
||||
'subliminal': logging.Formatter('%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s',
|
||||
'%H:%M:%S'),
|
||||
'imdbpy': logging.Formatter('%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S')
|
||||
},
|
||||
logging.Formatter('%(message)s'), ))
|
||||
|
||||
# add the handler to the root logger
|
||||
logging.getLogger('sickbeard').addHandler(console)
|
||||
logging.getLogger('sickbeard').addHandler(console)
|
||||
logging.getLogger('subliminal').addHandler(console)
|
||||
logging.getLogger('imdbpy').addHandler(console)
|
||||
|
||||
self.log_file_path = os.path.join(sickbeard.LOG_DIR, self.log_file)
|
||||
|
||||
|
||||
self.cur_handler = self._config_handler()
|
||||
logging.getLogger('sickbeard').addHandler(self.cur_handler)
|
||||
logging.getLogger('subliminal').addHandler(self.cur_handler)
|
||||
|
@ -127,28 +128,30 @@ class SBRotatingLogHandler(object):
|
|||
# already logging in new log folder, close the old handler
|
||||
if old_handler:
|
||||
self.close_log(old_handler)
|
||||
# old_handler.flush()
|
||||
# old_handler.close()
|
||||
# sb_logger = logging.getLogger('sickbeard')
|
||||
# sub_logger = logging.getLogger('subliminal')
|
||||
# imdb_logger = logging.getLogger('imdbpy')
|
||||
# sb_logger.removeHandler(old_handler)
|
||||
# subli_logger.removeHandler(old_handler)
|
||||
# imdb_logger.removeHandler(old_handler)
|
||||
# old_handler.flush()
|
||||
# old_handler.close()
|
||||
# sb_logger = logging.getLogger('sickbeard')
|
||||
# sub_logger = logging.getLogger('subliminal')
|
||||
# imdb_logger = logging.getLogger('imdbpy')
|
||||
# sb_logger.removeHandler(old_handler)
|
||||
# subli_logger.removeHandler(old_handler)
|
||||
# imdb_logger.removeHandler(old_handler)
|
||||
|
||||
def _config_handler(self):
|
||||
"""
|
||||
Configure a file handler to log at file_name and return it.
|
||||
"""
|
||||
|
||||
|
||||
file_handler = logging.FileHandler(self.log_file_path, encoding='utf-8')
|
||||
file_handler.setLevel(DB)
|
||||
file_handler.setFormatter(DispatchingFormatter({'sickbeard' : logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'subliminal' : logging.Formatter('%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'imdbpy' : logging.Formatter('%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
},
|
||||
logging.Formatter('%(message)s'),))
|
||||
|
||||
file_handler.setFormatter(DispatchingFormatter(
|
||||
{'sickbeard': logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'subliminal': logging.Formatter('%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s',
|
||||
'%Y-%m-%d %H:%M:%S'),
|
||||
'imdbpy': logging.Formatter('%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
},
|
||||
logging.Formatter('%(message)s'), ))
|
||||
|
||||
return file_handler
|
||||
|
||||
def _log_file_name(self, i):
|
||||
|
@ -160,7 +163,7 @@ class SBRotatingLogHandler(object):
|
|||
"""
|
||||
|
||||
return self.log_file_path + ('.' + str(i) if i else '')
|
||||
|
||||
|
||||
def _num_logs(self):
|
||||
"""
|
||||
Scans the log folder and figures out how many log files there are already on disk
|
||||
|
@ -174,15 +177,15 @@ class SBRotatingLogHandler(object):
|
|||
return cur_log - 1
|
||||
|
||||
def _rotate_logs(self):
|
||||
|
||||
|
||||
sb_logger = logging.getLogger('sickbeard')
|
||||
sub_logger = logging.getLogger('subliminal')
|
||||
imdb_logger = logging.getLogger('imdbpy')
|
||||
|
||||
|
||||
# delete the old handler
|
||||
if self.cur_handler:
|
||||
self.close_log()
|
||||
|
||||
|
||||
# rename or delete all the old log files
|
||||
for i in range(self._num_logs(), -1, -1):
|
||||
cur_file_name = self._log_file_name(i)
|
||||
|
@ -193,12 +196,12 @@ class SBRotatingLogHandler(object):
|
|||
os.rename(cur_file_name, self._log_file_name(i + 1))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
# the new log handler will always be on the un-numbered .log file
|
||||
new_file_handler = self._config_handler()
|
||||
|
||||
|
||||
self.cur_handler = new_file_handler
|
||||
|
||||
|
||||
sb_logger.addHandler(new_file_handler)
|
||||
sub_logger.addHandler(new_file_handler)
|
||||
imdb_logger.addHandler(new_file_handler)
|
||||
|
@ -254,7 +257,6 @@ class SBRotatingLogHandler(object):
|
|||
|
||||
|
||||
class DispatchingFormatter:
|
||||
|
||||
def __init__(self, formatters, default_formatter):
|
||||
self._formatters = formatters
|
||||
self._default_formatter = default_formatter
|
||||
|
@ -266,11 +268,14 @@ class DispatchingFormatter:
|
|||
|
||||
sb_log_instance = SBRotatingLogHandler('sickbeard.log', NUM_LOGS, LOG_SIZE)
|
||||
|
||||
|
||||
def log(toLog, logLevel=MESSAGE):
|
||||
sb_log_instance.log(toLog, logLevel)
|
||||
|
||||
|
||||
def log_error_and_exit(error_msg):
|
||||
sb_log_instance.log_error_and_exit(error_msg)
|
||||
|
||||
|
||||
|
||||
def close():
|
||||
sb_log_instance.close_log()
|
|
@ -21,26 +21,29 @@ __all__ = ['generic', 'helpers', 'xbmc', 'xbmc_12plus', 'mediabrowser', 'ps3', '
|
|||
import sys
|
||||
import xbmc, xbmc_12plus, mediabrowser, ps3, wdtv, tivo
|
||||
|
||||
|
||||
def available_generators():
|
||||
return filter(lambda x: x not in ('generic', 'helpers'), __all__)
|
||||
|
||||
|
||||
def _getMetadataModule(name):
|
||||
name = name.lower()
|
||||
prefix = "sickbeard.metadata."
|
||||
if name in __all__ and prefix+name in sys.modules:
|
||||
return sys.modules[prefix+name]
|
||||
if name in __all__ and prefix + name in sys.modules:
|
||||
return sys.modules[prefix + name]
|
||||
else:
|
||||
return None
|
||||
|
||||
def _getMetadataClass(name):
|
||||
|
||||
def _getMetadataClass(name):
|
||||
module = _getMetadataModule(name)
|
||||
|
||||
|
||||
if not module:
|
||||
return None
|
||||
|
||||
|
||||
return module.metadata_class()
|
||||
|
||||
|
||||
def get_metadata_generator_dict():
|
||||
result = {}
|
||||
for cur_generator_id in available_generators():
|
||||
|
@ -48,6 +51,6 @@ def get_metadata_generator_dict():
|
|||
if not cur_generator:
|
||||
continue
|
||||
result[cur_generator.name] = cur_generator
|
||||
|
||||
|
||||
return result
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ from sickbeard.exceptions import ex
|
|||
from sickbeard.show_name_helpers import allPossibleShowNames
|
||||
|
||||
from lib.tmdb_api.tmdb_api import TMDB
|
||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
||||
|
||||
|
||||
class GenericMetadata():
|
||||
"""
|
||||
|
@ -88,7 +88,9 @@ class GenericMetadata():
|
|||
self.season_all_banner = season_all_banner
|
||||
|
||||
def get_config(self):
|
||||
config_list = [self.show_metadata, self.episode_metadata, self.fanart, self.poster, self.banner, self.episode_thumbnails, self.season_posters, self.season_banners, self.season_all_poster, self.season_all_banner]
|
||||
config_list = [self.show_metadata, self.episode_metadata, self.fanart, self.poster, self.banner,
|
||||
self.episode_thumbnails, self.season_posters, self.season_banners, self.season_all_poster,
|
||||
self.season_all_banner]
|
||||
return '|'.join([str(int(x)) for x in config_list])
|
||||
|
||||
def get_id(self):
|
||||
|
@ -161,12 +163,14 @@ class GenericMetadata():
|
|||
|
||||
def _has_season_all_poster(self, show_obj):
|
||||
result = ek.ek(os.path.isfile, self.get_season_all_poster_path(show_obj))
|
||||
logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result), logger.DEBUG)
|
||||
logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result),
|
||||
logger.DEBUG)
|
||||
return result
|
||||
|
||||
def _has_season_all_banner(self, show_obj):
|
||||
result = ek.ek(os.path.isfile, self.get_season_all_banner_path(show_obj))
|
||||
logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG)
|
||||
logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result),
|
||||
logger.DEBUG)
|
||||
return result
|
||||
|
||||
def get_show_file_path(self, show_obj):
|
||||
|
@ -264,7 +268,8 @@ class GenericMetadata():
|
|||
|
||||
def create_episode_metadata(self, ep_obj, force=False):
|
||||
if self.episode_metadata and ep_obj and (not self._has_episode_metadata(ep_obj) or force):
|
||||
logger.log(u"Metadata provider " + self.name + " creating episode metadata for " + ep_obj.prettyName(), logger.DEBUG)
|
||||
logger.log(u"Metadata provider " + self.name + " creating episode metadata for " + ep_obj.prettyName(),
|
||||
logger.DEBUG)
|
||||
return self.write_ep_file(ep_obj)
|
||||
return False
|
||||
|
||||
|
@ -288,7 +293,8 @@ class GenericMetadata():
|
|||
|
||||
def create_episode_thumb(self, ep_obj):
|
||||
if self.episode_thumbnails and ep_obj and not self._has_episode_thumb(ep_obj):
|
||||
logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.prettyName(), logger.DEBUG)
|
||||
logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.prettyName(),
|
||||
logger.DEBUG)
|
||||
return self.save_thumbnail(ep_obj)
|
||||
return False
|
||||
|
||||
|
@ -297,7 +303,8 @@ class GenericMetadata():
|
|||
result = []
|
||||
for season, episodes in show_obj.episodes.iteritems(): # @UnusedVariable
|
||||
if not self._has_season_poster(show_obj, season):
|
||||
logger.log(u"Metadata provider " + self.name + " creating season posters for " + show_obj.name, logger.DEBUG)
|
||||
logger.log(u"Metadata provider " + self.name + " creating season posters for " + show_obj.name,
|
||||
logger.DEBUG)
|
||||
result = result + [self.save_season_posters(show_obj, season)]
|
||||
return all(result)
|
||||
return False
|
||||
|
@ -307,20 +314,23 @@ class GenericMetadata():
|
|||
result = []
|
||||
for season, episodes in show_obj.episodes.iteritems(): # @UnusedVariable
|
||||
if not self._has_season_banner(show_obj, season):
|
||||
logger.log(u"Metadata provider " + self.name + " creating season banners for " + show_obj.name, logger.DEBUG)
|
||||
logger.log(u"Metadata provider " + self.name + " creating season banners for " + show_obj.name,
|
||||
logger.DEBUG)
|
||||
result = result + [self.save_season_banners(show_obj, season)]
|
||||
return all(result)
|
||||
return False
|
||||
|
||||
def create_season_all_poster(self, show_obj):
|
||||
if self.season_all_poster and show_obj and not self._has_season_all_poster(show_obj):
|
||||
logger.log(u"Metadata provider " + self.name + " creating season all poster for " + show_obj.name, logger.DEBUG)
|
||||
logger.log(u"Metadata provider " + self.name + " creating season all poster for " + show_obj.name,
|
||||
logger.DEBUG)
|
||||
return self.save_season_all_poster(show_obj)
|
||||
return False
|
||||
|
||||
def create_season_all_banner(self, show_obj):
|
||||
if self.season_all_banner and show_obj and not self._has_season_all_banner(show_obj):
|
||||
logger.log(u"Metadata provider " + self.name + " creating season all banner for " + show_obj.name, logger.DEBUG)
|
||||
logger.log(u"Metadata provider " + self.name + " creating season all banner for " + show_obj.name,
|
||||
logger.DEBUG)
|
||||
return self.save_season_all_banner(show_obj)
|
||||
return False
|
||||
|
||||
|
@ -349,21 +359,24 @@ class GenericMetadata():
|
|||
if ep_obj.show.dvdorder != 0:
|
||||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
indexer_show_obj = t[ep_obj.show.indexerid]
|
||||
except indexer_exceptions.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
raise exceptions.ShowNotFoundException(e.message)
|
||||
except indexer_exceptions.indexer_error, e:
|
||||
logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
except sickbeard.indexer_error, e:
|
||||
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
# try all included episodes in case some have thumbs and others don't
|
||||
for cur_ep in all_eps:
|
||||
try:
|
||||
myEp = indexer_show_obj[cur_ep.season][cur_ep.episode]
|
||||
except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound):
|
||||
logger.log(u"Unable to find episode " + str(cur_ep.season) + "x" + str(cur_ep.episode) + " on " + ep_obj.show.indexer + ".. has it been removed? Should I delete from db?")
|
||||
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||
logger.log(u"Unable to find episode " + str(cur_ep.season) + "x" + str(
|
||||
cur_ep.episode) + " on " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?")
|
||||
continue
|
||||
|
||||
thumb_url = getattr(myEp, 'filename', None)
|
||||
|
@ -410,7 +423,8 @@ class GenericMetadata():
|
|||
nfo_file.close()
|
||||
helpers.chmodAsParent(nfo_file_path)
|
||||
except IOError, e:
|
||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR)
|
||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -454,7 +468,8 @@ class GenericMetadata():
|
|||
nfo_file.close()
|
||||
helpers.chmodAsParent(nfo_file_path)
|
||||
except IOError, e:
|
||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR)
|
||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -580,7 +595,8 @@ class GenericMetadata():
|
|||
season_poster_file_path = self.get_season_poster_path(show_obj, cur_season)
|
||||
|
||||
if not season_poster_file_path:
|
||||
logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season", logger.DEBUG)
|
||||
logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
seasonData = metadata_helpers.getShowImage(season_url)
|
||||
|
@ -627,7 +643,8 @@ class GenericMetadata():
|
|||
season_banner_file_path = self.get_season_banner_path(show_obj, cur_season)
|
||||
|
||||
if not season_banner_file_path:
|
||||
logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season", logger.DEBUG)
|
||||
logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
seasonData = metadata_helpers.getShowImage(season_url)
|
||||
|
@ -699,7 +716,9 @@ class GenericMetadata():
|
|||
outFile.close()
|
||||
helpers.chmodAsParent(image_path)
|
||||
except IOError, e:
|
||||
logger.log(u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e), logger.ERROR)
|
||||
logger.log(
|
||||
u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -730,14 +749,16 @@ class GenericMetadata():
|
|||
if show_obj.dvdorder != 0:
|
||||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
indexer_show_obj = t[show_obj.indexerid]
|
||||
except (indexer_exceptions.indexer_error, IOError), e:
|
||||
logger.log(u"Unable to look up show on " + show_obj.indexer + ", not downloading images: " + ex(e), logger.ERROR)
|
||||
except (sickbeard.indexer_error, IOError), e:
|
||||
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if image_type not in ('fanart', 'poster', 'banner', 'poster_thumb', 'banner_thumb'):
|
||||
logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + show_obj.indexer + " object", logger.ERROR)
|
||||
logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + " object", logger.ERROR)
|
||||
return None
|
||||
|
||||
if image_type == 'poster_thumb':
|
||||
|
@ -793,16 +814,17 @@ class GenericMetadata():
|
|||
if show_obj.dvdorder != 0:
|
||||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
indexer_show_obj = t[show_obj.indexerid]
|
||||
except (indexer_exceptions.indexer_error, IOError), e:
|
||||
logger.log(u"Unable to look up show on " + show_obj.indexer + ", not downloading images: " + ex(e), logger.ERROR)
|
||||
except (sickbeard.indexer_error, IOError), e:
|
||||
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||
return result
|
||||
|
||||
# if we have no season banners then just finish
|
||||
if getattr(indexer_show_obj, '_banners', None) is None:
|
||||
return result
|
||||
|
||||
|
||||
if 'season' not in indexer_show_obj['_banners'] or 'season' not in indexer_show_obj['_banners']['season']:
|
||||
return result
|
||||
|
||||
|
@ -845,10 +867,11 @@ class GenericMetadata():
|
|||
if indexer_lang and not indexer_lang == 'en':
|
||||
lINDEXER_API_PARMS['language'] = indexer_lang
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
indexer_show_obj = t[show_obj.indexerid]
|
||||
except (indexer_exceptions.indexer_error, IOError), e:
|
||||
logger.log(u"Unable to look up show on " + show_obj.indexer + ", not downloading images: " + ex(e), logger.ERROR)
|
||||
except (sickbeard.indexer_error, IOError), e:
|
||||
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||
return result
|
||||
|
||||
# if we have no season banners then just finish
|
||||
|
@ -890,17 +913,24 @@ class GenericMetadata():
|
|||
with ek.ek(open, metadata_path, 'r') as xmlFileObj:
|
||||
showXML = etree.ElementTree(file=xmlFileObj)
|
||||
|
||||
if showXML.findtext('title') == None\
|
||||
or (showXML.findtext('tvdbid') == None and showXML.findtext('id') == None):
|
||||
if showXML.findtext('title') == None \
|
||||
or (showXML.findtext('tvdbid') == None
|
||||
and showXML.findtext('id') == None) \
|
||||
and showXML.findtext('indexer') == None:
|
||||
logger.log(u"Invalid info in tvshow.nfo (missing name or id):" \
|
||||
+ str(showXML.findtext('title')) + " " \
|
||||
+ str(showXML.findtext('indexer')) + " " \
|
||||
+ str(showXML.findtext('tvdbid')) + " " \
|
||||
+ str(showXML.findtext('id')))
|
||||
+ str(showXML.findtext('title')) + " " \
|
||||
+ str(showXML.findtext('indexer')) + " " \
|
||||
+ str(showXML.findtext('tvdbid')) + " " \
|
||||
+ str(showXML.findtext('id')))
|
||||
return empty_return
|
||||
|
||||
name = showXML.findtext('title')
|
||||
indexer = showXML.findtext('indexer')
|
||||
|
||||
try:
|
||||
indexer = int(showXML.findtext('indexer'))
|
||||
except:
|
||||
indexer = None
|
||||
|
||||
if showXML.findtext('tvdbid') != None:
|
||||
indexer_id = int(showXML.findtext('tvdbid'))
|
||||
elif showXML.findtext('id') != None:
|
||||
|
@ -914,7 +944,9 @@ class GenericMetadata():
|
|||
return empty_return
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e), logger.WARNING)
|
||||
logger.log(
|
||||
u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e),
|
||||
logger.WARNING)
|
||||
return empty_return
|
||||
|
||||
return (indexer_id, name, indexer)
|
||||
|
@ -931,6 +963,7 @@ class GenericMetadata():
|
|||
|
||||
def size_str_to_int(x):
|
||||
return float("inf") if x == 'original' else int(x[1:])
|
||||
|
||||
max_size = max(sizes, key=size_str_to_int)
|
||||
|
||||
try:
|
||||
|
|
|
@ -21,7 +21,6 @@ from sickbeard import logger
|
|||
|
||||
|
||||
def getShowImage(url, imgNum=None):
|
||||
|
||||
image_data = None # @UnusedVariable
|
||||
|
||||
if url == None:
|
||||
|
|
|
@ -27,7 +27,6 @@ import generic
|
|||
from sickbeard import logger, exceptions, helpers
|
||||
from sickbeard import encodingKludge as ek
|
||||
|
||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
||||
from sickbeard.exceptions import ex
|
||||
|
||||
import xml.etree.cElementTree as etree
|
||||
|
@ -145,7 +144,8 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
|||
If no season folder exists, None is returned
|
||||
"""
|
||||
|
||||
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
||||
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if
|
||||
ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
||||
|
||||
season_dir_regex = '^Season\s+(\d+)$'
|
||||
|
||||
|
@ -184,7 +184,8 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
|||
If no season folder exists, None is returned
|
||||
"""
|
||||
|
||||
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
||||
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if
|
||||
ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
||||
|
||||
season_dir_regex = '^Season\s+(\d+)$'
|
||||
|
||||
|
@ -237,32 +238,36 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
|||
if show_obj.dvdorder != 0:
|
||||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
tv_node = etree.Element("Series")
|
||||
|
||||
try:
|
||||
myShow = t[int(show_obj.indexerid)]
|
||||
except indexer_exceptions.indexer_shownotfound:
|
||||
logger.log(u"Unable to find show with id " + str(show_obj.indexerid) + " on " + show_obj.indexer + ", skipping it", logger.ERROR)
|
||||
except sickbeard.indexer_shownotfound:
|
||||
logger.log(u"Unable to find show with id " + str(show_obj.indexerid) + " on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", skipping it", logger.ERROR)
|
||||
raise
|
||||
|
||||
except indexer_exceptions.indexer_error:
|
||||
logger.log(u"" + show_obj.indexer + " is down, can't use its data to make the NFO", logger.ERROR)
|
||||
except sickbeard.indexer_error:
|
||||
logger.log(
|
||||
u"" + sickbeard.indexerApi(show_obj.indexer).name + " is down, can't use its data to make the NFO",
|
||||
logger.ERROR)
|
||||
raise
|
||||
|
||||
# check for title and id
|
||||
if getattr(myShow, 'seriesname', None) is None or getattr(myShow, 'id', None) is None:
|
||||
logger.log(u"Incomplete info for show with id " + str(show_obj.indexerid) + " on " + show_obj.indexer + ", skipping it", logger.ERROR)
|
||||
logger.log(u"Incomplete info for show with id " + str(show_obj.indexerid) + " on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", skipping it", logger.ERROR)
|
||||
return False
|
||||
|
||||
indexerid = etree.SubElement(tv_node, "id")
|
||||
if getattr(myShow, 'id', None) is not None:
|
||||
indexerid.text = myShow['id']
|
||||
indexerid.text = str(myShow['id'])
|
||||
|
||||
indexer = etree.SubElement(tv_node, "indexer")
|
||||
if show_obj.indexer != None:
|
||||
indexer.text = show_obj.indexer
|
||||
indexer.text = str(show_obj.indexer)
|
||||
|
||||
SeriesName = etree.SubElement(tv_node, "SeriesName")
|
||||
if getattr(myShow, 'seriesname', None) is not None:
|
||||
|
@ -400,13 +405,14 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
|||
if ep_obj.show.dvdorder != 0:
|
||||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
myShow = t[ep_obj.show.indexerid]
|
||||
except indexer_exceptions.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
raise exceptions.ShowNotFoundException(e.message)
|
||||
except indexer_exceptions.indexer_error, e:
|
||||
logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
except sickbeard.indexer_error, e:
|
||||
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
rootNode = etree.Element("Item")
|
||||
|
@ -416,8 +422,10 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
|||
|
||||
try:
|
||||
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
||||
except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound):
|
||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + ".. has it been removed? Should I delete from db?")
|
||||
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(
|
||||
curEpToWrite.episode) + " on " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?")
|
||||
return None
|
||||
|
||||
if curEpToWrite == ep_obj:
|
||||
|
|
|
@ -30,9 +30,6 @@ from sickbeard import encodingKludge as ek
|
|||
from sickbeard.exceptions import ex
|
||||
|
||||
|
||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
||||
|
||||
|
||||
class TIVOMetadata(generic.GenericMetadata):
|
||||
"""
|
||||
Metadata generation class for TIVO
|
||||
|
@ -179,20 +176,23 @@ class TIVOMetadata(generic.GenericMetadata):
|
|||
if ep_obj.show.dvdorder != 0:
|
||||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
myShow = t[ep_obj.show.indexerid]
|
||||
except indexer_exceptions.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
raise exceptions.ShowNotFoundException(str(e))
|
||||
except indexer_exceptions.indexer_error, e:
|
||||
logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + str(e), logger.ERROR)
|
||||
except sickbeard.indexer_error, e:
|
||||
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + " while creating meta files - skipping - " + str(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
for curEpToWrite in eps_to_write:
|
||||
|
||||
try:
|
||||
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
||||
except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound):
|
||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + "... has it been removed? Should I delete from db?")
|
||||
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(
|
||||
curEpToWrite.episode) + " on " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + "... has it been removed? Should I delete from db?")
|
||||
return None
|
||||
|
||||
if getattr(myEp, 'firstaired', None) is None and ep_obj.season == 0:
|
||||
|
@ -230,7 +230,8 @@ class TIVOMetadata(generic.GenericMetadata):
|
|||
# Replace double curly quotes
|
||||
sanitizedDescription = sanitizedDescription.replace(u"\u201c", "\"").replace(u"\u201d", "\"")
|
||||
# Replace single curly quotes
|
||||
sanitizedDescription = sanitizedDescription.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u02BC", "'")
|
||||
sanitizedDescription = sanitizedDescription.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(
|
||||
u"\u02BC", "'")
|
||||
|
||||
data += ("description : " + sanitizedDescription + "\n")
|
||||
|
||||
|
@ -277,15 +278,15 @@ class TIVOMetadata(generic.GenericMetadata):
|
|||
if genre:
|
||||
data += ("vProgramGenre : " + str(genre) + "\n")
|
||||
|
||||
# NOTE: The following are metadata keywords are not used
|
||||
# displayMajorNumber
|
||||
# showingBits
|
||||
# displayMinorNumber
|
||||
# colorCode
|
||||
# vSeriesGenre
|
||||
# vGuestStar, vDirector, vExecProducer, vProducer, vWriter, vHost, vChoreographer
|
||||
# partCount
|
||||
# partIndex
|
||||
# NOTE: The following are metadata keywords are not used
|
||||
# displayMajorNumber
|
||||
# showingBits
|
||||
# displayMinorNumber
|
||||
# colorCode
|
||||
# vSeriesGenre
|
||||
# vGuestStar, vDirector, vExecProducer, vProducer, vWriter, vHost, vChoreographer
|
||||
# partCount
|
||||
# partIndex
|
||||
|
||||
return data
|
||||
|
||||
|
@ -324,7 +325,8 @@ class TIVOMetadata(generic.GenericMetadata):
|
|||
helpers.chmodAsParent(nfo_file_path)
|
||||
|
||||
except EnvironmentError, e:
|
||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR)
|
||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -27,7 +27,6 @@ import generic
|
|||
from sickbeard import logger, exceptions, helpers
|
||||
from sickbeard import encodingKludge as ek
|
||||
|
||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
||||
from sickbeard.exceptions import ex
|
||||
|
||||
import xml.etree.cElementTree as etree
|
||||
|
@ -135,7 +134,8 @@ class WDTVMetadata(generic.GenericMetadata):
|
|||
If no season folder exists, None is returned
|
||||
"""
|
||||
|
||||
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
||||
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if
|
||||
ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
||||
|
||||
season_dir_regex = '^Season\s+(\d+)$'
|
||||
|
||||
|
@ -187,12 +187,13 @@ class WDTVMetadata(generic.GenericMetadata):
|
|||
if ep_obj.show.dvdorder != 0:
|
||||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
myShow = t[ep_obj.show.indexerid]
|
||||
except indexer_exceptions.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
raise exceptions.ShowNotFoundException(e.message)
|
||||
except indexer_exceptions.indexer_error, e:
|
||||
logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
except sickbeard.indexer_error, e:
|
||||
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
rootNode = etree.Element("details")
|
||||
|
@ -202,8 +203,10 @@ class WDTVMetadata(generic.GenericMetadata):
|
|||
|
||||
try:
|
||||
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
||||
except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound):
|
||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + "... has it been removed? Should I delete from db?")
|
||||
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(
|
||||
curEpToWrite.episode) + " on " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + "... has it been removed? Should I delete from db?")
|
||||
return None
|
||||
|
||||
if getattr(myEp, 'firstaired', None) is None and ep_obj.season == 0:
|
||||
|
|
|
@ -20,8 +20,6 @@ import datetime
|
|||
|
||||
import sickbeard
|
||||
|
||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
||||
|
||||
from sickbeard import logger, exceptions, helpers
|
||||
from sickbeard.exceptions import ex
|
||||
|
||||
|
@ -109,23 +107,27 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
|||
if show_obj.dvdorder != 0:
|
||||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
tv_node = etree.Element("tvshow")
|
||||
|
||||
try:
|
||||
myShow = t[int(show_ID)]
|
||||
except indexer_exceptions.indexer_shownotfound:
|
||||
logger.log(u"Unable to find show with id " + str(show_ID) + " on " + show_obj.indexer + ", skipping it", logger.ERROR)
|
||||
except sickbeard.indexer_shownotfound:
|
||||
logger.log(u"Unable to find show with id " + str(show_ID) + " on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", skipping it", logger.ERROR)
|
||||
raise
|
||||
|
||||
except indexer_exceptions.indexer_error:
|
||||
logger.log(u"" + show_obj.indexer + " is down, can't use its data to add this show", logger.ERROR)
|
||||
except sickbeard.indexer_error:
|
||||
logger.log(
|
||||
u"" + sickbeard.indexerApi(show_obj.indexer).name + " is down, can't use its data to add this show",
|
||||
logger.ERROR)
|
||||
raise
|
||||
|
||||
# check for title and id
|
||||
if getattr(myShow, 'seriesname', None) is None or getattr(myShow, 'id', None) is None:
|
||||
logger.log(u"Incomplete info for show with id " + str(show_ID) + " on " + show_obj.indexer + ", skipping it", logger.ERROR)
|
||||
logger.log(u"Incomplete info for show with id " + str(show_ID) + " on " + sickbeard.indexerApi(
|
||||
show_obj.indexer).name + ", skipping it", logger.ERROR)
|
||||
return False
|
||||
|
||||
title = etree.SubElement(tv_node, "title")
|
||||
|
@ -153,7 +155,7 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
|||
episodeguideurl = etree.SubElement(episodeguide, "url")
|
||||
episodeguideurl2 = etree.SubElement(tv_node, "episodeguideurl")
|
||||
if getattr(myShow, 'id', None) is not None:
|
||||
showurl = t.base_url + myShow["id"] + '/all/en.zip'
|
||||
showurl = sickbeard.indexerApi(show_obj.indexer).config['base_url'] + str(myShow["id"]) + '/all/en.zip'
|
||||
episodeguideurl.text = showurl
|
||||
episodeguideurl2.text = showurl
|
||||
|
||||
|
@ -163,11 +165,11 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
|||
|
||||
indexerid = etree.SubElement(tv_node, "id")
|
||||
if getattr(myShow, 'id', None) is not None:
|
||||
indexerid.text = myShow["id"]
|
||||
indexerid.text = str(myShow["id"])
|
||||
|
||||
indexer = etree.SubElement(tv_node, "indexer")
|
||||
if show_obj.indexer is not None:
|
||||
indexer.text = show_obj.indexer
|
||||
indexer.text = str(show_obj.indexer)
|
||||
|
||||
genre = etree.SubElement(tv_node, "genre")
|
||||
if getattr(myShow, 'genre', None) is not None:
|
||||
|
@ -230,12 +232,13 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
|||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
try:
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
myShow = t[ep_obj.show.indexerid]
|
||||
except indexer_exceptions.indexer_shownotfound, e:
|
||||
except sickbeard.indexer_shownotfound, e:
|
||||
raise exceptions.ShowNotFoundException(e.message)
|
||||
except indexer_exceptions.indexer_error, e:
|
||||
logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
except sickbeard.indexer_error, e:
|
||||
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||
return
|
||||
|
||||
if len(eps_to_write) > 1:
|
||||
|
@ -248,8 +251,10 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
|||
|
||||
try:
|
||||
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
||||
except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound):
|
||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + ".. has it been removed? Should I delete from db?")
|
||||
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(
|
||||
curEpToWrite.episode) + " on " + sickbeard.indexerApi(
|
||||
ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?")
|
||||
return None
|
||||
|
||||
if getattr(myEp, 'firstaired', None) is None:
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
from sickbeard import db
|
||||
from sickbeard.helpers import sanitizeSceneName
|
||||
|
||||
|
||||
def addNameToCache(name, indexer_id):
|
||||
"""
|
||||
Adds the show & tvdb id to the scene_names table in cache.db.
|
||||
|
@ -26,16 +27,17 @@ def addNameToCache(name, indexer_id):
|
|||
name: The show name to cache
|
||||
indexer_id: the TVDB and TVRAGE id that this show should be cached with (can be None/0 for unknown)
|
||||
"""
|
||||
|
||||
|
||||
# standardize the name we're using to account for small differences in providers
|
||||
name = sanitizeSceneName(name)
|
||||
|
||||
|
||||
if not indexer_id:
|
||||
indexer_id = 0
|
||||
|
||||
|
||||
cacheDB = db.DBConnection('cache.db')
|
||||
cacheDB.action("INSERT INTO scene_names (indexer_id, name) VALUES (?, ?)", [indexer_id, name])
|
||||
|
||||
|
||||
def retrieveNameFromCache(name):
|
||||
"""
|
||||
Looks up the given name in the scene_names table in cache.db.
|
||||
|
@ -44,18 +46,19 @@ def retrieveNameFromCache(name):
|
|||
|
||||
Returns: the TVDB and TVRAGE id that resulted from the cache lookup or None if the show wasn't found in the cache
|
||||
"""
|
||||
|
||||
|
||||
# standardize the name we're using to account for small differences in providers
|
||||
name = sanitizeSceneName(name)
|
||||
|
||||
|
||||
cacheDB = db.DBConnection('cache.db')
|
||||
cache_results = cacheDB.select("SELECT * FROM scene_names WHERE name = ?", [name])
|
||||
|
||||
if not cache_results:
|
||||
return None
|
||||
|
||||
|
||||
return int(cache_results[0]["indexer_id"])
|
||||
|
||||
|
||||
def clearCache():
|
||||
"""
|
||||
Deletes all "unknown" entries from the cache (names with indexer_id of 0).
|
||||
|
|
|
@ -26,13 +26,12 @@ import calendar
|
|||
|
||||
from sickbeard import logger, classes
|
||||
from sickbeard import scene_numbering, scene_exceptions
|
||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
||||
from sickbeard.common import indexerStrings
|
||||
|
||||
from lib.dateutil.parser import parse
|
||||
|
||||
from time import strptime
|
||||
|
||||
|
||||
class NameParser(object):
|
||||
def __init__(self, file_name=True):
|
||||
|
||||
|
@ -54,14 +53,14 @@ class NameParser(object):
|
|||
|
||||
Stolen from dbr's tvnamer
|
||||
"""
|
||||
|
||||
|
||||
series_name = re.sub("(\D)\.(?!\s)(\D)", "\\1 \\2", series_name)
|
||||
series_name = re.sub("(\d)\.(\d{4})", "\\1 \\2", series_name) # if it ends in a year then don't keep the dot
|
||||
series_name = re.sub("(\d)\.(\d{4})", "\\1 \\2", series_name) # if it ends in a year then don't keep the dot
|
||||
series_name = re.sub("(\D)\.(?!\s)", "\\1 ", series_name)
|
||||
series_name = re.sub("\.(?!\s)(\D)", " \\1", series_name)
|
||||
series_name = series_name.replace("_", " ")
|
||||
series_name = re.sub("-$", "", series_name)
|
||||
series_name = re.sub("^\[.*\]", "", series_name)
|
||||
series_name = re.sub("^\[.*\]", "", series_name)
|
||||
return series_name.strip()
|
||||
|
||||
def _compile_regexes(self):
|
||||
|
@ -74,43 +73,43 @@ class NameParser(object):
|
|||
self.compiled_regexes.append((cur_pattern_name, cur_regex))
|
||||
|
||||
def _parse_string(self, name):
|
||||
|
||||
|
||||
if not name:
|
||||
return None
|
||||
|
||||
|
||||
for (cur_regex_name, cur_regex) in self.compiled_regexes:
|
||||
match = cur_regex.match(name)
|
||||
|
||||
if not match:
|
||||
continue
|
||||
|
||||
|
||||
result = ParseResult(name)
|
||||
result.which_regex = [cur_regex_name]
|
||||
|
||||
|
||||
named_groups = match.groupdict().keys()
|
||||
|
||||
if 'series_name' in named_groups:
|
||||
result.series_name = match.group('series_name')
|
||||
if result.series_name:
|
||||
result.series_name = self.clean_series_name(result.series_name)
|
||||
|
||||
|
||||
if 'season_num' in named_groups:
|
||||
tmp_season = int(match.group('season_num'))
|
||||
if cur_regex_name == 'bare' and tmp_season in (19,20):
|
||||
if cur_regex_name == 'bare' and tmp_season in (19, 20):
|
||||
continue
|
||||
result.season_number = tmp_season
|
||||
|
||||
|
||||
if 'ep_num' in named_groups:
|
||||
ep_num = self._convert_number(match.group('ep_num'))
|
||||
if 'extra_ep_num' in named_groups and match.group('extra_ep_num'):
|
||||
result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num'))+1)
|
||||
result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1)
|
||||
else:
|
||||
result.episode_numbers = [ep_num]
|
||||
|
||||
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
|
||||
if 'scene_sports_date_format' in cur_regex_name:
|
||||
year = match.group('air_year')
|
||||
month = strptime(match.group('air_month')[:3],'%b').tm_mon
|
||||
month = strptime(match.group('air_month')[:3], '%b').tm_mon
|
||||
day = re.sub("(st|nd|rd|th)", "", match.group('air_day'))
|
||||
else:
|
||||
year = int(match.group('air_year'))
|
||||
|
@ -125,17 +124,18 @@ class NameParser(object):
|
|||
|
||||
if 'extra_info' in named_groups:
|
||||
tmp_extra_info = match.group('extra_info')
|
||||
|
||||
|
||||
# Show.S04.Special is almost certainly not every episode in the season
|
||||
if tmp_extra_info and cur_regex_name == 'season_only' and re.match(r'([. _-]|^)(special|extra)\w*([. _-]|$)', tmp_extra_info, re.I):
|
||||
if tmp_extra_info and cur_regex_name == 'season_only' and re.match(
|
||||
r'([. _-]|^)(special|extra)\w*([. _-]|$)', tmp_extra_info, re.I):
|
||||
continue
|
||||
result.extra_info = tmp_extra_info
|
||||
|
||||
|
||||
if 'release_group' in named_groups:
|
||||
result.release_group = match.group('release_group')
|
||||
|
||||
return result
|
||||
|
||||
|
||||
return None
|
||||
|
||||
def _combine_results(self, first, second, attr):
|
||||
|
@ -149,10 +149,10 @@ class NameParser(object):
|
|||
# if the second doesn't exist then return the first
|
||||
if not second:
|
||||
return getattr(first, attr)
|
||||
|
||||
|
||||
a = getattr(first, attr)
|
||||
b = getattr(second, attr)
|
||||
|
||||
|
||||
# if a is good use it
|
||||
if a != None or (type(a) == list and len(a)):
|
||||
return a
|
||||
|
@ -160,14 +160,14 @@ class NameParser(object):
|
|||
else:
|
||||
return b
|
||||
|
||||
def _unicodify(self, obj, encoding = "utf-8"):
|
||||
def _unicodify(self, obj, encoding="utf-8"):
|
||||
if isinstance(obj, basestring):
|
||||
if not isinstance(obj, unicode):
|
||||
obj = unicode(obj, encoding)
|
||||
return obj
|
||||
|
||||
def _convert_number(self, number):
|
||||
|
||||
|
||||
try:
|
||||
return int(number)
|
||||
except:
|
||||
|
@ -175,15 +175,15 @@ class NameParser(object):
|
|||
(1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1),
|
||||
('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I')
|
||||
)
|
||||
|
||||
n = unicode(number).upper()
|
||||
|
||||
|
||||
n = unicode(number).upper()
|
||||
|
||||
i = result = 0
|
||||
for integer, numeral in numeral_map:
|
||||
while n[i:i + len(numeral)] == numeral:
|
||||
result += integer
|
||||
i += len(numeral)
|
||||
|
||||
|
||||
return result
|
||||
|
||||
def parse(self, name, fix_scene_numbering=False):
|
||||
|
@ -253,47 +253,49 @@ class NameParser(object):
|
|||
return result_fixed
|
||||
|
||||
return final_result
|
||||
|
||||
|
||||
@classmethod
|
||||
def series_name_to_indexer_id(cls, series_name, check_scene_exceptions=True, check_database=True, check_indexer=False):
|
||||
def series_name_to_indexer_id(cls, series_name, check_scene_exceptions=True, check_database=True,
|
||||
check_indexer=False):
|
||||
"""
|
||||
Given a series name, return it's tvdbd_id.
|
||||
Returns None if not found.
|
||||
|
||||
This is mostly robbed from postProcessor._analyze_name
|
||||
"""
|
||||
|
||||
|
||||
# do a scene reverse-lookup to get a list of all possible names
|
||||
name_list = sickbeard.show_name_helpers.sceneToNormalShowNames(series_name)
|
||||
|
||||
|
||||
# for each possible interpretation of that scene name
|
||||
if check_scene_exceptions:
|
||||
for cur_name in name_list:
|
||||
logger.log(u"Checking scene exceptions for a match on "+cur_name, logger.DEBUG)
|
||||
logger.log(u"Checking scene exceptions for a match on " + cur_name, logger.DEBUG)
|
||||
scene_id = sickbeard.scene_exceptions.get_scene_exception_by_name(cur_name)
|
||||
if scene_id: return scene_id
|
||||
|
||||
# see if we can find the name directly in the DB, if so use it
|
||||
if check_database:
|
||||
for cur_name in name_list:
|
||||
logger.log(u"Looking up "+cur_name+u" in the DB", logger.DEBUG)
|
||||
logger.log(u"Looking up " + str(cur_name) + " in the DB", logger.DEBUG)
|
||||
db_result = sickbeard.helpers.searchDBForShow(cur_name)
|
||||
if db_result: return db_result[1]
|
||||
|
||||
|
||||
# see if we can find the name with a TVDB lookup
|
||||
if check_indexer:
|
||||
for cur_name in name_list:
|
||||
for indexer in indexerStrings:
|
||||
for indexer in sickbeard.indexerApi().indexers:
|
||||
try:
|
||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
logger.log(u"Looking up name "+cur_name+u" on the Indexer", logger.DEBUG)
|
||||
logger.log(u"Looking up name " + str(cur_name) + " on " + sickbeard.indexerApi(indexer).name,
|
||||
logger.DEBUG)
|
||||
showObj = t[cur_name]
|
||||
except (indexer_exceptions):
|
||||
except (sickbeard.indexer_exception):
|
||||
# if none found, search on all languages
|
||||
try:
|
||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||
|
@ -301,11 +303,13 @@ class NameParser(object):
|
|||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
lINDEXER_API_PARMS['search_all_languages'] = True
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
logger.log(u"Looking up name "+cur_name+u" in all languages on the Indexer", logger.DEBUG)
|
||||
logger.log(
|
||||
u"Looking up name " + str(cur_name) + " in all languages on " + sickbeard.indexerApi(
|
||||
indexer).name, logger.DEBUG)
|
||||
showObj = t[cur_name]
|
||||
except (indexer_exceptions.indexer_exception, IOError):
|
||||
except (sickbeard.indexer_exception, IOError):
|
||||
pass
|
||||
|
||||
continue
|
||||
|
@ -313,9 +317,10 @@ class NameParser(object):
|
|||
continue
|
||||
|
||||
return showObj["id"]
|
||||
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class ParseResult(object):
|
||||
def __init__(self,
|
||||
original_name,
|
||||
|
@ -325,10 +330,10 @@ class ParseResult(object):
|
|||
extra_info=None,
|
||||
release_group=None,
|
||||
air_date=None
|
||||
):
|
||||
):
|
||||
|
||||
self.original_name = original_name
|
||||
|
||||
|
||||
self.series_name = series_name
|
||||
self.season_number = season_number
|
||||
if not episode_numbers:
|
||||
|
@ -338,15 +343,15 @@ class ParseResult(object):
|
|||
|
||||
self.extra_info = extra_info
|
||||
self.release_group = release_group
|
||||
|
||||
|
||||
self.air_date = air_date
|
||||
|
||||
|
||||
self.which_regex = None
|
||||
|
||||
|
||||
def __eq__(self, other):
|
||||
if not other:
|
||||
return False
|
||||
|
||||
|
||||
if self.series_name != other.series_name:
|
||||
return False
|
||||
if self.season_number != other.season_number:
|
||||
|
@ -359,7 +364,7 @@ class ParseResult(object):
|
|||
return False
|
||||
if self.air_date != other.air_date:
|
||||
return False
|
||||
|
||||
|
||||
return True
|
||||
|
||||
def __str__(self):
|
||||
|
@ -368,10 +373,10 @@ class ParseResult(object):
|
|||
else:
|
||||
to_return = u''
|
||||
if self.season_number != None:
|
||||
to_return += 'S'+str(self.season_number)
|
||||
to_return += 'S' + str(self.season_number)
|
||||
if self.episode_numbers and len(self.episode_numbers):
|
||||
for e in self.episode_numbers:
|
||||
to_return += 'E'+str(e)
|
||||
to_return += 'E' + str(e)
|
||||
|
||||
if self.air_by_date:
|
||||
to_return += str(self.air_date)
|
||||
|
@ -381,7 +386,7 @@ class ParseResult(object):
|
|||
if self.release_group:
|
||||
to_return += ' (' + self.release_group + ')'
|
||||
|
||||
to_return += ' [ABD: '+str(self.air_by_date)+']'
|
||||
to_return += ' [ABD: ' + str(self.air_by_date) + ']'
|
||||
|
||||
return to_return.encode('utf-8')
|
||||
|
||||
|
@ -389,59 +394,61 @@ class ParseResult(object):
|
|||
if self.season_number == None and len(self.episode_numbers) == 0 and self.air_date:
|
||||
return True
|
||||
return False
|
||||
|
||||
air_by_date = property(_is_air_by_date)
|
||||
|
||||
|
||||
def fix_scene_numbering(self):
|
||||
"""
|
||||
The changes the parsed result (which is assumed to be scene numbering) to
|
||||
tvdb numbering, if necessary.
|
||||
"""
|
||||
if self.air_by_date: return self # scene numbering does not apply to air-by-date
|
||||
if self.season_number == None: return self # can't work without a season
|
||||
if len(self.episode_numbers) == 0: return self # need at least one episode
|
||||
|
||||
if self.air_by_date: return self # scene numbering does not apply to air-by-date
|
||||
if self.season_number == None: return self # can't work without a season
|
||||
if len(self.episode_numbers) == 0: return self # need at least one episode
|
||||
|
||||
indexer_id = NameParser.series_name_to_indexer_id(self.series_name, True, True, False)
|
||||
|
||||
|
||||
new_episode_numbers = []
|
||||
new_season_numbers = []
|
||||
for epNo in self.episode_numbers:
|
||||
(s, e) = scene_numbering.get_indexer_numbering(indexer_id, self.season_number, epNo)
|
||||
new_episode_numbers.append(e)
|
||||
new_season_numbers.append(s)
|
||||
|
||||
|
||||
# need to do a quick sanity check here. It's possible that we now have episodes
|
||||
# from more than one season (by tvdb numbering), and this is just too much
|
||||
# for sickbeard, so we'd need to flag it.
|
||||
new_season_numbers = list(set(new_season_numbers)) # remove duplicates
|
||||
new_season_numbers = list(set(new_season_numbers)) # remove duplicates
|
||||
if len(new_season_numbers) > 1:
|
||||
raise InvalidNameException("Scene numbering results episodes from "
|
||||
"seasons %s, (i.e. more than one) and "
|
||||
"sickbeard does not support this. "
|
||||
"Sorry." % (str(new_season_numbers)))
|
||||
|
||||
|
||||
# I guess it's possible that we'd have duplicate episodes too, so lets
|
||||
# eliminate them
|
||||
new_episode_numbers = list(set(new_episode_numbers))
|
||||
new_episode_numbers.sort()
|
||||
|
||||
|
||||
self.episode_numbers = new_episode_numbers
|
||||
self.season_number = new_season_numbers[0]
|
||||
|
||||
return self
|
||||
|
||||
|
||||
class NameParserCache(object):
|
||||
#TODO: check if the fifo list can beskiped and only use one dict
|
||||
_previous_parsed_list = [] # keep a fifo list of the cached items
|
||||
_previous_parsed_list = [] # keep a fifo list of the cached items
|
||||
_previous_parsed = {}
|
||||
_cache_size = 100
|
||||
|
||||
|
||||
def add(self, name, parse_result):
|
||||
self._previous_parsed[name] = parse_result
|
||||
self._previous_parsed_list.append(name)
|
||||
while len(self._previous_parsed_list) > self._cache_size:
|
||||
del_me = self._previous_parsed_list.pop(0)
|
||||
self._previous_parsed.pop(del_me)
|
||||
|
||||
|
||||
def get(self, name):
|
||||
if name in self._previous_parsed:
|
||||
logger.log("Using cached parse result for: " + name, logger.DEBUG)
|
||||
|
@ -449,7 +456,9 @@ class NameParserCache(object):
|
|||
else:
|
||||
return None
|
||||
|
||||
|
||||
name_parser_cache = NameParserCache()
|
||||
|
||||
|
||||
class InvalidNameException(Exception):
|
||||
"The given name is not valid"
|
||||
|
|
|
@ -19,181 +19,181 @@
|
|||
# all regexes are case insensitive
|
||||
|
||||
ep_regexes = [
|
||||
('standard_repeat',
|
||||
# Show.Name.S01E02.S01E03.Source.Quality.Etc-Group
|
||||
# Show Name - S01E02 - S01E03 - S01E04 - Ep Name
|
||||
'''
|
||||
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
|
||||
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||
e(?P<ep_num>\d+) # E02 and separator
|
||||
([. _-]+s(?P=season_num)[. _-]* # S01 and optional separator
|
||||
e(?P<extra_ep_num>\d+))+ # E03/etc and separator
|
||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''),
|
||||
|
||||
('fov_repeat',
|
||||
# Show.Name.1x02.1x03.Source.Quality.Etc-Group
|
||||
# Show Name - 1x02 - 1x03 - 1x04 - Ep Name
|
||||
'''
|
||||
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
|
||||
(?P<season_num>\d+)x # 1x
|
||||
(?P<ep_num>\d+) # 02 and separator
|
||||
([. _-]+(?P=season_num)x # 1x
|
||||
(?P<extra_ep_num>\d+))+ # 03/etc and separator
|
||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''),
|
||||
|
||||
('standard',
|
||||
# Show.Name.S01E02.Source.Quality.Etc-Group
|
||||
# Show Name - S01E02 - My Ep Name
|
||||
# Show.Name.S01.E03.My.Ep.Name
|
||||
# Show.Name.S01E02E03.Source.Quality.Etc-Group
|
||||
# Show Name - S01E02-03 - My Ep Name
|
||||
# Show.Name.S01.E02.E03
|
||||
'''
|
||||
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
|
||||
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||
e(?P<ep_num>\d+) # E02 and separator
|
||||
(([. _-]*e|-) # linking e/- char
|
||||
(?P<extra_ep_num>(?!(1080|720)[pi])\d+))* # additional E03/etc
|
||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''),
|
||||
('standard_repeat',
|
||||
# Show.Name.S01E02.S01E03.Source.Quality.Etc-Group
|
||||
# Show Name - S01E02 - S01E03 - S01E04 - Ep Name
|
||||
'''
|
||||
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
|
||||
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||
e(?P<ep_num>\d+) # E02 and separator
|
||||
([. _-]+s(?P=season_num)[. _-]* # S01 and optional separator
|
||||
e(?P<extra_ep_num>\d+))+ # E03/etc and separator
|
||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''),
|
||||
|
||||
('fov',
|
||||
# Show_Name.1x02.Source_Quality_Etc-Group
|
||||
# Show Name - 1x02 - My Ep Name
|
||||
# Show_Name.1x02x03x04.Source_Quality_Etc-Group
|
||||
# Show Name - 1x02-03-04 - My Ep Name
|
||||
'''
|
||||
^((?P<series_name>.+?)[\[. _-]+)? # Show_Name and separator
|
||||
(?P<season_num>\d+)x # 1x
|
||||
(?P<ep_num>\d+) # 02 and separator
|
||||
(([. _-]*x|-) # linking x/- char
|
||||
(?P<extra_ep_num>
|
||||
(?!(1080|720)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps
|
||||
\d+))* # additional x03/etc
|
||||
[\]. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''),
|
||||
|
||||
('scene_date_format',
|
||||
# Show.Name.2010.11.23.Source.Quality.Etc-Group
|
||||
# Show Name - 2010-11-23 - Ep Name
|
||||
'''
|
||||
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
|
||||
(?P<air_year>\d{4})[. _-]+ # 2010 and separator
|
||||
(?P<air_month>\d{2})[. _-]+ # 11 and separator
|
||||
(?P<air_day>\d{2}) # 23 and separator
|
||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''),
|
||||
('fov_repeat',
|
||||
# Show.Name.1x02.1x03.Source.Quality.Etc-Group
|
||||
# Show Name - 1x02 - 1x03 - 1x04 - Ep Name
|
||||
'''
|
||||
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
|
||||
(?P<season_num>\d+)x # 1x
|
||||
(?P<ep_num>\d+) # 02 and separator
|
||||
([. _-]+(?P=season_num)x # 1x
|
||||
(?P<extra_ep_num>\d+))+ # 03/etc and separator
|
||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''),
|
||||
|
||||
('scene_sports_date_format',
|
||||
# Show.Name.2010.Nov.23rd.Source.Quality.Etc-Group
|
||||
# Show Name - 2010-Nov-23rd - Ep Name
|
||||
'''
|
||||
^(?P<series_name>.*?(UEFA|MLB|ESPN|WWE|MMA|UFC|TNA|EPL|NASCAR|NBA|NFL|NHL|NRL|PGA|SUPER LEAGUE|FORMULA|FIFA|NETBALL|MOTOGP).*?)
|
||||
(?P<air_day>\d{1,2}[a-zA-Z]{2})[. _-]+ # 23rd and seperator
|
||||
(?P<air_month>[a-zA-Z]{3,})[. _-]+ # Nov and seperator
|
||||
(?P<air_year>\d{4})[. _-]+ # 2010
|
||||
(?P<extra_info>.*?(?<![. _-])(?<!WEB))[. _-]+ # Make sure this is really the release group
|
||||
(?P<release_group>.*?)$ # Group
|
||||
'''),
|
||||
('standard',
|
||||
# Show.Name.S01E02.Source.Quality.Etc-Group
|
||||
# Show Name - S01E02 - My Ep Name
|
||||
# Show.Name.S01.E03.My.Ep.Name
|
||||
# Show.Name.S01E02E03.Source.Quality.Etc-Group
|
||||
# Show Name - S01E02-03 - My Ep Name
|
||||
# Show.Name.S01.E02.E03
|
||||
'''
|
||||
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
|
||||
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||
e(?P<ep_num>\d+) # E02 and separator
|
||||
(([. _-]*e|-) # linking e/- char
|
||||
(?P<extra_ep_num>(?!(1080|720)[pi])\d+))* # additional E03/etc
|
||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''),
|
||||
|
||||
('stupid',
|
||||
# tpz-abc102
|
||||
'''
|
||||
(?P<release_group>.+?)-\w+?[\. ]? # tpz-abc
|
||||
(?!264) # don't count x264
|
||||
(?P<season_num>\d{1,2}) # 1
|
||||
(?P<ep_num>\d{2})$ # 02
|
||||
'''),
|
||||
|
||||
('verbose',
|
||||
# Show Name Season 1 Episode 2 Ep Name
|
||||
'''
|
||||
^(?P<series_name>.+?)[. _-]+ # Show Name and separator
|
||||
season[. _-]+ # season and separator
|
||||
(?P<season_num>\d+)[. _-]+ # 1
|
||||
episode[. _-]+ # episode and separator
|
||||
(?P<ep_num>\d+)[. _-]+ # 02 and separator
|
||||
(?P<extra_info>.+)$ # Source_Quality_Etc-
|
||||
'''),
|
||||
|
||||
('season_only',
|
||||
# Show.Name.S01.Source.Quality.Etc-Group
|
||||
'''
|
||||
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
|
||||
s(eason[. _-])? # S01/Season 01
|
||||
(?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''
|
||||
),
|
||||
('fov',
|
||||
# Show_Name.1x02.Source_Quality_Etc-Group
|
||||
# Show Name - 1x02 - My Ep Name
|
||||
# Show_Name.1x02x03x04.Source_Quality_Etc-Group
|
||||
# Show Name - 1x02-03-04 - My Ep Name
|
||||
'''
|
||||
^((?P<series_name>.+?)[\[. _-]+)? # Show_Name and separator
|
||||
(?P<season_num>\d+)x # 1x
|
||||
(?P<ep_num>\d+) # 02 and separator
|
||||
(([. _-]*x|-) # linking x/- char
|
||||
(?P<extra_ep_num>
|
||||
(?!(1080|720)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps
|
||||
\d+))* # additional x03/etc
|
||||
[\]. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''),
|
||||
|
||||
('no_season_multi_ep',
|
||||
# Show.Name.E02-03
|
||||
# Show.Name.E02.2010
|
||||
'''
|
||||
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
|
||||
(e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part
|
||||
(?P<ep_num>(\d+|[ivx]+)) # first ep num
|
||||
((([. _-]+(and|&|to)[. _-]+)|-) # and/&/to joiner
|
||||
(?P<extra_ep_num>(?!(1080|720)[pi])(\d+|[ivx]+))[. _-]) # second ep num
|
||||
([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''
|
||||
),
|
||||
('scene_date_format',
|
||||
# Show.Name.2010.11.23.Source.Quality.Etc-Group
|
||||
# Show Name - 2010-11-23 - Ep Name
|
||||
'''
|
||||
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
|
||||
(?P<air_year>\d{4})[. _-]+ # 2010 and separator
|
||||
(?P<air_month>\d{2})[. _-]+ # 11 and separator
|
||||
(?P<air_day>\d{2}) # 23 and separator
|
||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''),
|
||||
|
||||
('no_season_general',
|
||||
# Show.Name.E23.Test
|
||||
# Show.Name.Part.3.Source.Quality.Etc-Group
|
||||
# Show.Name.Part.1.and.Part.2.Blah-Group
|
||||
'''
|
||||
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
|
||||
(e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part
|
||||
(?P<ep_num>(\d+|([ivx]+(?=[. _-])))) # first ep num
|
||||
([. _-]+((and|&|to)[. _-]+)? # and/&/to joiner
|
||||
((e(p(isode)?)?|part|pt)[. _-]?) # e, ep, episode, or part
|
||||
(?P<extra_ep_num>(?!(1080|720)[pi])
|
||||
(\d+|([ivx]+(?=[. _-]))))[. _-])* # second ep num
|
||||
([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''
|
||||
),
|
||||
('scene_sports_date_format',
|
||||
# Show.Name.2010.Nov.23rd.Source.Quality.Etc-Group
|
||||
# Show Name - 2010-Nov-23rd - Ep Name
|
||||
'''
|
||||
^(?P<series_name>.*?(UEFA|MLB|ESPN|WWE|MMA|UFC|TNA|EPL|NASCAR|NBA|NFL|NHL|NRL|PGA|SUPER LEAGUE|FORMULA|FIFA|NETBALL|MOTOGP).*?)
|
||||
(?P<air_day>\d{1,2}[a-zA-Z]{2})[. _-]+ # 23rd and seperator
|
||||
(?P<air_month>[a-zA-Z]{3,})[. _-]+ # Nov and seperator
|
||||
(?P<air_year>\d{4})[. _-]+ # 2010
|
||||
(?P<extra_info>.*?(?<![. _-])(?<!WEB))[. _-]+ # Make sure this is really the release group
|
||||
(?P<release_group>.*?)$ # Group
|
||||
'''),
|
||||
|
||||
('bare',
|
||||
# Show.Name.102.Source.Quality.Etc-Group
|
||||
'''
|
||||
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
|
||||
(?P<season_num>\d{1,2}) # 1
|
||||
(?P<ep_num>\d{2}) # 02 and separator
|
||||
([. _-]+(?P<extra_info>(?!\d{3}[. _-]+)[^-]+) # Source_Quality_Etc-
|
||||
(-(?P<release_group>.+))?)?$ # Group
|
||||
'''),
|
||||
|
||||
('no_season',
|
||||
# Show Name - 01 - Ep Name
|
||||
# 01 - Ep Name
|
||||
# 01 - Ep Name
|
||||
'''
|
||||
^((?P<series_name>.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator
|
||||
(?P<ep_num>\d{1,2}) # 02
|
||||
(?:-(?P<extra_ep_num>\d{1,2}))* # 02
|
||||
[. _-]+((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''
|
||||
),
|
||||
]
|
||||
('stupid',
|
||||
# tpz-abc102
|
||||
'''
|
||||
(?P<release_group>.+?)-\w+?[\. ]? # tpz-abc
|
||||
(?!264) # don't count x264
|
||||
(?P<season_num>\d{1,2}) # 1
|
||||
(?P<ep_num>\d{2})$ # 02
|
||||
'''),
|
||||
|
||||
('verbose',
|
||||
# Show Name Season 1 Episode 2 Ep Name
|
||||
'''
|
||||
^(?P<series_name>.+?)[. _-]+ # Show Name and separator
|
||||
season[. _-]+ # season and separator
|
||||
(?P<season_num>\d+)[. _-]+ # 1
|
||||
episode[. _-]+ # episode and separator
|
||||
(?P<ep_num>\d+)[. _-]+ # 02 and separator
|
||||
(?P<extra_info>.+)$ # Source_Quality_Etc-
|
||||
'''),
|
||||
|
||||
('season_only',
|
||||
# Show.Name.S01.Source.Quality.Etc-Group
|
||||
'''
|
||||
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
|
||||
s(eason[. _-])? # S01/Season 01
|
||||
(?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''
|
||||
),
|
||||
|
||||
('no_season_multi_ep',
|
||||
# Show.Name.E02-03
|
||||
# Show.Name.E02.2010
|
||||
'''
|
||||
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
|
||||
(e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part
|
||||
(?P<ep_num>(\d+|[ivx]+)) # first ep num
|
||||
((([. _-]+(and|&|to)[. _-]+)|-) # and/&/to joiner
|
||||
(?P<extra_ep_num>(?!(1080|720)[pi])(\d+|[ivx]+))[. _-]) # second ep num
|
||||
([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''
|
||||
),
|
||||
|
||||
('no_season_general',
|
||||
# Show.Name.E23.Test
|
||||
# Show.Name.Part.3.Source.Quality.Etc-Group
|
||||
# Show.Name.Part.1.and.Part.2.Blah-Group
|
||||
'''
|
||||
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
|
||||
(e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part
|
||||
(?P<ep_num>(\d+|([ivx]+(?=[. _-])))) # first ep num
|
||||
([. _-]+((and|&|to)[. _-]+)? # and/&/to joiner
|
||||
((e(p(isode)?)?|part|pt)[. _-]?) # e, ep, episode, or part
|
||||
(?P<extra_ep_num>(?!(1080|720)[pi])
|
||||
(\d+|([ivx]+(?=[. _-]))))[. _-])* # second ep num
|
||||
([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''
|
||||
),
|
||||
|
||||
('bare',
|
||||
# Show.Name.102.Source.Quality.Etc-Group
|
||||
'''
|
||||
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
|
||||
(?P<season_num>\d{1,2}) # 1
|
||||
(?P<ep_num>\d{2}) # 02 and separator
|
||||
([. _-]+(?P<extra_info>(?!\d{3}[. _-]+)[^-]+) # Source_Quality_Etc-
|
||||
(-(?P<release_group>.+))?)?$ # Group
|
||||
'''),
|
||||
|
||||
('no_season',
|
||||
# Show Name - 01 - Ep Name
|
||||
# 01 - Ep Name
|
||||
# 01 - Ep Name
|
||||
'''
|
||||
^((?P<series_name>.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator
|
||||
(?P<ep_num>\d{1,2}) # 02
|
||||
(?:-(?P<extra_ep_num>\d{1,2}))* # 02
|
||||
[. _-]+((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||
'''
|
||||
),
|
||||
]
|
||||
|
|
|
@ -33,12 +33,13 @@ name_presets = ('%SN - %Sx%0E - %EN',
|
|||
'%Sx%0E - %EN',
|
||||
'S%0SE%0E - %EN',
|
||||
'Season %0S/%S.N.S%0SE%0E.%Q.N-%RG'
|
||||
)
|
||||
)
|
||||
|
||||
name_abd_presets = ('%SN - %A-D - %EN',
|
||||
'%S.N.%A.D.%E.N.%Q.N',
|
||||
'%Y/%0M/%S.N.%A.D.%E.N-%RG'
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class TVShow():
|
||||
def __init__(self):
|
||||
|
@ -46,6 +47,7 @@ class TVShow():
|
|||
self.genre = "Comedy"
|
||||
self.air_by_date = 0
|
||||
|
||||
|
||||
class TVEpisode(tv.TVEpisode):
|
||||
def __init__(self, season, episode, name):
|
||||
self.relatedEps = []
|
||||
|
@ -58,6 +60,7 @@ class TVEpisode(tv.TVEpisode):
|
|||
self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
|
||||
self._is_proper = True
|
||||
|
||||
|
||||
def check_force_season_folders(pattern=None, multi=None):
|
||||
"""
|
||||
Checks if the name can still be parsed if you strip off the folders to determine if we need to force season folders
|
||||
|
@ -67,14 +70,15 @@ def check_force_season_folders(pattern=None, multi=None):
|
|||
"""
|
||||
if pattern == None:
|
||||
pattern = sickbeard.NAMING_PATTERN
|
||||
|
||||
valid = not validate_name(pattern, None, file_only=True)
|
||||
|
||||
|
||||
valid = not validate_name(pattern, None, file_only=True)
|
||||
|
||||
if multi != None:
|
||||
valid = valid or not validate_name(pattern, multi, file_only=True)
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
def check_valid_naming(pattern=None, multi=None):
|
||||
"""
|
||||
Checks if the name is can be parsed back to its original form for both single and multi episodes.
|
||||
|
@ -83,16 +87,17 @@ def check_valid_naming(pattern=None, multi=None):
|
|||
"""
|
||||
if pattern == None:
|
||||
pattern = sickbeard.NAMING_PATTERN
|
||||
|
||||
logger.log(u"Checking whether the pattern "+pattern+" is valid for a single episode", logger.DEBUG)
|
||||
|
||||
logger.log(u"Checking whether the pattern " + pattern + " is valid for a single episode", logger.DEBUG)
|
||||
valid = validate_name(pattern, None)
|
||||
|
||||
if multi != None:
|
||||
logger.log(u"Checking whether the pattern "+pattern+" is valid for a multi episode", logger.DEBUG)
|
||||
logger.log(u"Checking whether the pattern " + pattern + " is valid for a multi episode", logger.DEBUG)
|
||||
valid = valid and validate_name(pattern, multi)
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
def check_valid_abd_naming(pattern=None):
|
||||
"""
|
||||
Checks if the name is can be parsed back to its original form for an air-by-date format.
|
||||
|
@ -101,8 +106,8 @@ def check_valid_abd_naming(pattern=None):
|
|||
"""
|
||||
if pattern == None:
|
||||
pattern = sickbeard.NAMING_PATTERN
|
||||
|
||||
logger.log(u"Checking whether the pattern "+pattern+" is valid for an air-by-date episode", logger.DEBUG)
|
||||
|
||||
logger.log(u"Checking whether the pattern " + pattern + " is valid for an air-by-date episode", logger.DEBUG)
|
||||
valid = validate_name(pattern, abd=True)
|
||||
|
||||
return valid
|
||||
|
@ -119,18 +124,18 @@ def validate_name(pattern, multi=None, file_only=False, abd=False):
|
|||
new_name = ek.ek(os.path.join, new_path, new_name)
|
||||
|
||||
if not new_name:
|
||||
logger.log(u"Unable to create a name out of "+pattern, logger.DEBUG)
|
||||
logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG)
|
||||
return False
|
||||
|
||||
logger.log(u"Trying to parse "+new_name, logger.DEBUG)
|
||||
logger.log(u"Trying to parse " + new_name, logger.DEBUG)
|
||||
|
||||
try:
|
||||
result = parser.parse(new_name)
|
||||
except InvalidNameException:
|
||||
logger.log(u"Unable to parse "+new_name+", not valid", logger.DEBUG)
|
||||
logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG)
|
||||
return False
|
||||
|
||||
logger.log("The name "+new_name + " parsed into " + str(result), logger.DEBUG)
|
||||
|
||||
logger.log("The name " + new_name + " parsed into " + str(result), logger.DEBUG)
|
||||
|
||||
if abd:
|
||||
if result.air_date != ep.airdate:
|
||||
|
@ -146,9 +151,10 @@ def validate_name(pattern, multi=None, file_only=False, abd=False):
|
|||
|
||||
return True
|
||||
|
||||
|
||||
def _generate_sample_ep(multi=None, abd=False):
|
||||
# make a fake episode object
|
||||
ep = TVEpisode(2,3,"Ep Name")
|
||||
ep = TVEpisode(2, 3, "Ep Name")
|
||||
ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||
ep._airdate = datetime.date(2011, 3, 9)
|
||||
if abd:
|
||||
|
@ -160,11 +166,11 @@ def _generate_sample_ep(multi=None, abd=False):
|
|||
ep._name = "Ep Name (1)"
|
||||
ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP'
|
||||
|
||||
secondEp = TVEpisode(2,4,"Ep Name (2)")
|
||||
secondEp = TVEpisode(2, 4, "Ep Name (2)")
|
||||
secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||
secondEp._release_name = ep._release_name
|
||||
|
||||
thirdEp = TVEpisode(2,5,"Ep Name (3)")
|
||||
thirdEp = TVEpisode(2, 5, "Ep Name (3)")
|
||||
thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||
thirdEp._release_name = ep._release_name
|
||||
|
||||
|
@ -173,8 +179,8 @@ def _generate_sample_ep(multi=None, abd=False):
|
|||
|
||||
return ep
|
||||
|
||||
def test_name(pattern, multi=None, abd=False):
|
||||
|
||||
def test_name(pattern, multi=None, abd=False):
|
||||
ep = _generate_sample_ep(multi, abd)
|
||||
|
||||
return {'name': ep.formatted_filename(pattern, multi), 'dir': ep.formatted_dir(pattern, multi)}
|
|
@ -39,33 +39,35 @@ sb_timezone = tz.tzlocal()
|
|||
# helper to remove failed temp download
|
||||
def _remove_zoneinfo_failed(filename):
|
||||
try:
|
||||
ek.ek(os.remove,filename)
|
||||
ek.ek(os.remove, filename)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
# helper to remove old unneeded zoneinfo files
|
||||
def _remove_old_zoneinfo():
|
||||
if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None):
|
||||
cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
|
||||
else:
|
||||
return
|
||||
|
||||
cur_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
||||
|
||||
for (path, dirs, files) in ek.ek(os.walk,helpers.real_path(ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))):
|
||||
|
||||
cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
||||
|
||||
for (path, dirs, files) in ek.ek(os.walk,
|
||||
helpers.real_path(ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))):
|
||||
for filename in files:
|
||||
if filename.endswith('.tar.gz'):
|
||||
file_w_path = ek.ek(join,path,filename)
|
||||
if file_w_path != cur_file and ek.ek(isfile,file_w_path):
|
||||
file_w_path = ek.ek(join, path, filename)
|
||||
if file_w_path != cur_file and ek.ek(isfile, file_w_path):
|
||||
try:
|
||||
ek.ek(os.remove,file_w_path)
|
||||
ek.ek(os.remove, file_w_path)
|
||||
logger.log(u"Delete unneeded old zoneinfo File: " + file_w_path)
|
||||
except:
|
||||
logger.log(u"Unable to delete: " + file_w_path,logger.ERROR)
|
||||
logger.log(u"Unable to delete: " + file_w_path, logger.ERROR)
|
||||
|
||||
|
||||
# update the dateutil zoneinfo
|
||||
def _update_zoneinfo():
|
||||
|
||||
global sb_timezone
|
||||
sb_timezone = tz.tzlocal()
|
||||
|
||||
|
@ -90,36 +92,37 @@ def _update_zoneinfo():
|
|||
|
||||
# now load the new zoneinfo
|
||||
url_tar = u'https://github.com/Prinz23/sb_network_timezones/raw/master/' + new_zoneinfo
|
||||
|
||||
zonefile = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
|
||||
zonefile_tmp = re.sub(r"\.tar\.gz$",'.tmp', zonefile)
|
||||
|
||||
if (ek.ek(os.path.exists,zonefile_tmp)):
|
||||
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
|
||||
zonefile_tmp = re.sub(r"\.tar\.gz$", '.tmp', zonefile)
|
||||
|
||||
if (ek.ek(os.path.exists, zonefile_tmp)):
|
||||
try:
|
||||
ek.ek(os.remove,zonefile_tmp)
|
||||
ek.ek(os.remove, zonefile_tmp)
|
||||
except:
|
||||
logger.log(u"Unable to delete: " + zonefile_tmp,logger.ERROR)
|
||||
logger.log(u"Unable to delete: " + zonefile_tmp, logger.ERROR)
|
||||
return
|
||||
|
||||
if not helpers.download_file(url_tar, zonefile_tmp):
|
||||
return
|
||||
|
||||
if not ek.ek(os.path.exists,zonefile_tmp):
|
||||
logger.log(u"Download of " + zonefile_tmp + " failed.",logger.ERROR)
|
||||
if not ek.ek(os.path.exists, zonefile_tmp):
|
||||
logger.log(u"Download of " + zonefile_tmp + " failed.", logger.ERROR)
|
||||
return
|
||||
|
||||
new_hash = str(helpers.md5_for_file(zonefile_tmp))
|
||||
|
||||
if (zoneinfo_md5.upper() == new_hash.upper()):
|
||||
logger.log(u"Updating timezone info with new one: " + new_zoneinfo,logger.MESSAGE)
|
||||
logger.log(u"Updating timezone info with new one: " + new_zoneinfo, logger.MESSAGE)
|
||||
try:
|
||||
# remove the old zoneinfo file
|
||||
if (cur_zoneinfo is not None):
|
||||
old_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
||||
if (ek.ek(os.path.exists,old_file)):
|
||||
ek.ek(os.remove,old_file)
|
||||
old_file = helpers.real_path(
|
||||
ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
||||
if (ek.ek(os.path.exists, old_file)):
|
||||
ek.ek(os.remove, old_file)
|
||||
# rename downloaded file
|
||||
ek.ek(os.rename,zonefile_tmp,zonefile)
|
||||
ek.ek(os.rename, zonefile_tmp, zonefile)
|
||||
# load the new zoneinfo
|
||||
reload(lib.dateutil.zoneinfo)
|
||||
sb_timezone = tz.tzlocal()
|
||||
|
@ -128,12 +131,12 @@ def _update_zoneinfo():
|
|||
return
|
||||
else:
|
||||
_remove_zoneinfo_failed(zonefile_tmp)
|
||||
logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(),logger.ERROR)
|
||||
logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(), logger.ERROR)
|
||||
return
|
||||
|
||||
|
||||
# update the network timezone table
|
||||
def update_network_dict():
|
||||
|
||||
_remove_old_zoneinfo()
|
||||
_update_zoneinfo()
|
||||
|
||||
|
@ -152,10 +155,10 @@ def update_network_dict():
|
|||
|
||||
try:
|
||||
for line in url_data.splitlines():
|
||||
(key, val) = line.decode('utf-8').strip().rsplit(u':',1)
|
||||
if key is None or val is None:
|
||||
continue
|
||||
d[key] = val
|
||||
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
|
||||
if key is None or val is None:
|
||||
continue
|
||||
d[key] = val
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
|
@ -169,7 +172,8 @@ def update_network_dict():
|
|||
h_k = old_d.has_key(cur_d)
|
||||
if h_k and cur_t != old_d[cur_d]:
|
||||
# update old record
|
||||
ql.append(["UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?", [cur_d, cur_t, cur_d]])
|
||||
ql.append(
|
||||
["UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?", [cur_d, cur_t, cur_d]])
|
||||
elif not h_k:
|
||||
# add new record
|
||||
ql.append(["INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)", [cur_d, cur_t]])
|
||||
|
@ -178,12 +182,13 @@ def update_network_dict():
|
|||
# remove deleted records
|
||||
if len(old_d) > 0:
|
||||
L = list(va for va in old_d)
|
||||
ql.append(["DELETE FROM network_timezones WHERE network_name IN ("+','.join(['?'] * len(L))+")", L])
|
||||
ql.append(["DELETE FROM network_timezones WHERE network_name IN (" + ','.join(['?'] * len(L)) + ")", L])
|
||||
# change all network timezone infos at once (much faster)
|
||||
if len(ql) > 0:
|
||||
myDB.mass_action(ql)
|
||||
load_network_dict()
|
||||
|
||||
|
||||
# load network timezones from db into dict
|
||||
def load_network_dict():
|
||||
d = {}
|
||||
|
@ -199,6 +204,7 @@ def load_network_dict():
|
|||
global network_dict
|
||||
network_dict = d
|
||||
|
||||
|
||||
# get timezone of a network or return default timezone
|
||||
def get_network_timezone(network, network_dict):
|
||||
if network is None:
|
||||
|
@ -206,7 +212,7 @@ def get_network_timezone(network, network_dict):
|
|||
|
||||
try:
|
||||
if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
|
||||
n_t = tz.gettz(network_dict[network])
|
||||
n_t = tz.gettz(network_dict[network])
|
||||
if n_t is not None:
|
||||
return n_t
|
||||
else:
|
||||
|
@ -216,6 +222,7 @@ def get_network_timezone(network, network_dict):
|
|||
except:
|
||||
return sb_timezone
|
||||
|
||||
|
||||
# parse date and time string into local time
|
||||
def parse_date_time(d, t, network):
|
||||
if network_dict is None:
|
||||
|
@ -257,6 +264,7 @@ def parse_date_time(d, t, network):
|
|||
except (ValueError):
|
||||
return foreign_naive
|
||||
|
||||
|
||||
def test_timeformat(t):
|
||||
mo = time_regex.search(t)
|
||||
if mo is None or len(mo.groups()) < 2:
|
||||
|
|
|
@ -64,7 +64,7 @@ trakt_notifier = trakt.TraktNotifier()
|
|||
email_notifier = emailnotify.EmailNotifier()
|
||||
|
||||
notifiers = [
|
||||
libnotify_notifier, # Libnotify notifier goes first because it doesn't involve blocking on network activity.
|
||||
libnotify_notifier, # Libnotify notifier goes first because it doesn't involve blocking on network activity.
|
||||
xbmc_notifier,
|
||||
plex_notifier,
|
||||
nmj_notifier,
|
||||
|
@ -89,10 +89,12 @@ def notify_download(ep_name):
|
|||
for n in notifiers:
|
||||
n.notify_download(ep_name)
|
||||
|
||||
|
||||
def notify_subtitle_download(ep_name, lang):
|
||||
for n in notifiers:
|
||||
n.notify_subtitle_download(ep_name, lang)
|
||||
|
||||
|
||||
def notify_snatch(ep_name):
|
||||
for n in notifiers:
|
||||
n.notify_snatch(ep_name)
|
||||
|
|
|
@ -28,8 +28,8 @@ from sickbeard.exceptions import ex
|
|||
|
||||
API_URL = "https://boxcar.io/devices/providers/fWc4sgSmpcN6JujtBmR6/notifications"
|
||||
|
||||
class BoxcarNotifier:
|
||||
|
||||
class BoxcarNotifier:
|
||||
def test_notify(self, email, title="Test"):
|
||||
return self._sendBoxcar("This is a test notification from SickBeard", title, email)
|
||||
|
||||
|
@ -44,7 +44,7 @@ class BoxcarNotifier:
|
|||
|
||||
returns: True if the message succeeded, False otherwise
|
||||
"""
|
||||
|
||||
|
||||
# build up the URL and parameters
|
||||
msg = msg.strip()
|
||||
curUrl = API_URL
|
||||
|
@ -53,7 +53,7 @@ class BoxcarNotifier:
|
|||
if subscribe:
|
||||
data = urllib.urlencode({'email': email})
|
||||
curUrl = curUrl + "/subscribe"
|
||||
|
||||
|
||||
# for normal requests we need all these parameters
|
||||
else:
|
||||
data = urllib.urlencode({
|
||||
|
@ -61,7 +61,7 @@ class BoxcarNotifier:
|
|||
'notification[from_screen_name]': title,
|
||||
'notification[message]': msg.encode('utf-8'),
|
||||
'notification[from_remote_service_id]': int(time.time())
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
# send the request to boxcar
|
||||
|
@ -69,7 +69,7 @@ class BoxcarNotifier:
|
|||
req = urllib2.Request(curUrl)
|
||||
handle = urllib2.urlopen(req, data)
|
||||
handle.close()
|
||||
|
||||
|
||||
except urllib2.URLError, e:
|
||||
# if we get an error back that doesn't have an error code then who knows what's really happening
|
||||
if not hasattr(e, 'code'):
|
||||
|
@ -82,16 +82,16 @@ class BoxcarNotifier:
|
|||
if e.code == 404:
|
||||
logger.log("Username is wrong/not a boxcar email. Boxcar will send an email to it", logger.WARNING)
|
||||
return False
|
||||
|
||||
|
||||
# For HTTP status code 401's, it is because you are passing in either an invalid token, or the user has not added your service.
|
||||
elif e.code == 401:
|
||||
|
||||
|
||||
# If the user has already added your service, we'll return an HTTP status code of 401.
|
||||
if subscribe:
|
||||
logger.log("Already subscribed to service", logger.ERROR)
|
||||
# i dont know if this is true or false ... its neither but i also dont know how we got here in the first place
|
||||
return False
|
||||
|
||||
|
||||
#HTTP status 401 if the user doesn't have the service added
|
||||
else:
|
||||
subscribeNote = self._sendBoxcar(msg, title, email, True)
|
||||
|
@ -101,7 +101,7 @@ class BoxcarNotifier:
|
|||
else:
|
||||
logger.log("Subscription could not be send", logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
# If you receive an HTTP status code of 400, it is because you failed to send the proper parameters
|
||||
elif e.code == 400:
|
||||
logger.log("Wrong data send to boxcar", logger.ERROR)
|
||||
|
@ -113,7 +113,7 @@ class BoxcarNotifier:
|
|||
def notify_snatch(self, ep_name, title=notifyStrings[NOTIFY_SNATCH]):
|
||||
if sickbeard.BOXCAR_NOTIFY_ONSNATCH:
|
||||
self._notifyBoxcar(title, ep_name)
|
||||
|
||||
|
||||
|
||||
def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]):
|
||||
if sickbeard.BOXCAR_NOTIFY_ONDOWNLOAD:
|
||||
|
@ -146,4 +146,5 @@ class BoxcarNotifier:
|
|||
self._sendBoxcar(message, title, username)
|
||||
return True
|
||||
|
||||
|
||||
notifier = BoxcarNotifier
|
||||
|
|
|
@ -31,10 +31,11 @@ from sickbeard import logger
|
|||
from sickbeard import db
|
||||
from sickbeard.exceptions import ex
|
||||
|
||||
|
||||
class EmailNotifier:
|
||||
def __init__(self):
|
||||
self.last_err = None
|
||||
|
||||
|
||||
def test_notify(self, host, port, smtp_from, use_tls, user, pwd, to):
|
||||
msg = MIMEText('This is a test message from Sick Beard. If you\'re reading this, the test succeeded.')
|
||||
msg['Subject'] = 'Sick Beard: Test Message'
|
||||
|
@ -55,19 +56,25 @@ class EmailNotifier:
|
|||
if len(to) == 0:
|
||||
logger.log('Skipping email notify because there are no configured recepients', logger.WARNING)
|
||||
else:
|
||||
try:
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg.attach(MIMEText("<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Snatched</h3>\n<p>Show: <b>" + re.search("(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(".+ - (.+?-.+) -.+", ep_name).group(1) + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>", 'html'))
|
||||
except:
|
||||
msg = MIMEText(ep_name)
|
||||
|
||||
try:
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg.attach(MIMEText(
|
||||
"<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Snatched</h3>\n<p>Show: <b>" + re.search(
|
||||
"(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(
|
||||
".+ - (.+?-.+) -.+", ep_name).group(
|
||||
1) + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>",
|
||||
'html'))
|
||||
except:
|
||||
msg = MIMEText(ep_name)
|
||||
|
||||
msg['Subject'] = 'Snatched: ' + ep_name
|
||||
msg['From'] = sickbeard.EMAIL_FROM
|
||||
msg['To'] = ','.join(to)
|
||||
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
||||
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS,
|
||||
sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
||||
logger.log("Snatch notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG)
|
||||
else:
|
||||
logger.log("Snatch notification ERROR: %s" % self.last_err, logger.ERROR)
|
||||
logger.log("Snatch notification ERROR: %s" % self.last_err, logger.ERROR)
|
||||
|
||||
def notify_download(self, ep_name, title="Completed:"):
|
||||
"""
|
||||
|
@ -82,16 +89,22 @@ class EmailNotifier:
|
|||
if len(to) == 0:
|
||||
logger.log('Skipping email notify because there are no configured recepients', logger.WARNING)
|
||||
else:
|
||||
try:
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg.attach(MIMEText("<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Downloaded</h3>\n<p>Show: <b>" + re.search("(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(".+ - (.+?-.+) -.+", ep_name).group(1) + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>", 'html'))
|
||||
except:
|
||||
msg = MIMEText(ep_name)
|
||||
|
||||
try:
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg.attach(MIMEText(
|
||||
"<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Downloaded</h3>\n<p>Show: <b>" + re.search(
|
||||
"(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(
|
||||
".+ - (.+?-.+) -.+", ep_name).group(
|
||||
1) + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>",
|
||||
'html'))
|
||||
except:
|
||||
msg = MIMEText(ep_name)
|
||||
|
||||
msg['Subject'] = 'Downloaded: ' + ep_name
|
||||
msg['From'] = sickbeard.EMAIL_FROM
|
||||
msg['To'] = ','.join(to)
|
||||
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
||||
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS,
|
||||
sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
||||
logger.log("Download notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG)
|
||||
else:
|
||||
logger.log("Download notification ERROR: %s" % self.last_err, logger.ERROR)
|
||||
|
@ -109,16 +122,22 @@ class EmailNotifier:
|
|||
if len(to) == 0:
|
||||
logger.log('Skipping email notify because there are no configured recepients', logger.WARNING)
|
||||
else:
|
||||
try:
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg.attach(MIMEText("<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Subtitle Downloaded</h3>\n<p>Show: <b>" + re.search("(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(".+ - (.+?-.+) -.+", ep_name).group(1) + "</b></p>\n<p>Language: <b>" + lang + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>", 'html'))
|
||||
except:
|
||||
msg = MIMEText(ep_name + ": " + lang)
|
||||
|
||||
try:
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg.attach(MIMEText(
|
||||
"<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Subtitle Downloaded</h3>\n<p>Show: <b>" + re.search(
|
||||
"(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(
|
||||
".+ - (.+?-.+) -.+", ep_name).group(
|
||||
1) + "</b></p>\n<p>Language: <b>" + lang + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>",
|
||||
'html'))
|
||||
except:
|
||||
msg = MIMEText(ep_name + ": " + lang)
|
||||
|
||||
msg['Subject'] = lang + ' Subtitle Downloaded: ' + ep_name
|
||||
msg['From'] = sickbeard.EMAIL_FROM
|
||||
msg['To'] = ','.join(to)
|
||||
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
||||
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS,
|
||||
sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
||||
logger.log("Download notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG)
|
||||
else:
|
||||
logger.log("Download notification ERROR: %s" % self.last_err, logger.ERROR)
|
||||
|
@ -128,7 +147,7 @@ class EmailNotifier:
|
|||
|
||||
# Grab the global recipients
|
||||
for addr in sickbeard.EMAIL_LIST.split(','):
|
||||
if(len(addr.strip()) > 0):
|
||||
if (len(addr.strip()) > 0):
|
||||
addrs.append(addr)
|
||||
|
||||
# Grab the recipients for the show
|
||||
|
@ -137,15 +156,16 @@ class EmailNotifier:
|
|||
for subs in mydb.select("SELECT notify_list FROM tv_shows WHERE show_name = ?", (s,)):
|
||||
if subs['notify_list']:
|
||||
for addr in subs['notify_list'].split(','):
|
||||
if(len(addr.strip()) > 0):
|
||||
if (len(addr.strip()) > 0):
|
||||
addrs.append(addr)
|
||||
|
||||
|
||||
addrs = set(addrs)
|
||||
logger.log('Notification recepients: %s' % addrs, logger.DEBUG)
|
||||
return addrs
|
||||
|
||||
|
||||
def _sendmail(self, host, port, smtp_from, use_tls, user, pwd, to, msg, smtpDebug=False):
|
||||
logger.log('HOST: %s; PORT: %s; FROM: %s, TLS: %s, USER: %s, PWD: %s, TO: %s' % (host, port, smtp_from, use_tls, user, pwd, to), logger.DEBUG)
|
||||
logger.log('HOST: %s; PORT: %s; FROM: %s, TLS: %s, USER: %s, PWD: %s, TO: %s' % (
|
||||
host, port, smtp_from, use_tls, user, pwd, to), logger.DEBUG)
|
||||
srv = smtplib.SMTP(host, int(port))
|
||||
if smtpDebug:
|
||||
srv.set_debuglevel(1)
|
||||
|
@ -172,5 +192,6 @@ class EmailNotifier:
|
|||
titles.sort(key=len, reverse=True)
|
||||
logger.log("TITLES: %s" % titles, logger.DEBUG)
|
||||
return titles
|
||||
|
||||
|
||||
|
||||
notifier = EmailNotifier
|
||||
|
|
|
@ -25,11 +25,12 @@ from sickbeard.exceptions import ex
|
|||
|
||||
from lib.growl import gntp
|
||||
|
||||
class GrowlNotifier:
|
||||
|
||||
class GrowlNotifier:
|
||||
def test_notify(self, host, password):
|
||||
self._sendRegistration(host, password, 'Test')
|
||||
return self._sendGrowl("Test Growl", "Testing Growl settings from Sick Beard", "Test", host, password, force=True)
|
||||
return self._sendGrowl("Test Growl", "Testing Growl settings from Sick Beard", "Test", host, password,
|
||||
force=True)
|
||||
|
||||
def notify_snatch(self, ep_name):
|
||||
if sickbeard.GROWL_NOTIFY_ONSNATCH:
|
||||
|
@ -43,145 +44,145 @@ class GrowlNotifier:
|
|||
if sickbeard.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||
self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ": " + lang)
|
||||
|
||||
def _send_growl(self, options,message=None):
|
||||
|
||||
def _send_growl(self, options, message=None):
|
||||
|
||||
#Send Notification
|
||||
notice = gntp.GNTPNotice()
|
||||
|
||||
|
||||
#Required
|
||||
notice.add_header('Application-Name',options['app'])
|
||||
notice.add_header('Notification-Name',options['name'])
|
||||
notice.add_header('Notification-Title',options['title'])
|
||||
|
||||
notice.add_header('Application-Name', options['app'])
|
||||
notice.add_header('Notification-Name', options['name'])
|
||||
notice.add_header('Notification-Title', options['title'])
|
||||
|
||||
if options['password']:
|
||||
notice.set_password(options['password'])
|
||||
|
||||
|
||||
#Optional
|
||||
if options['sticky']:
|
||||
notice.add_header('Notification-Sticky',options['sticky'])
|
||||
notice.add_header('Notification-Sticky', options['sticky'])
|
||||
if options['priority']:
|
||||
notice.add_header('Notification-Priority',options['priority'])
|
||||
notice.add_header('Notification-Priority', options['priority'])
|
||||
if options['icon']:
|
||||
notice.add_header('Notification-Icon', 'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png')
|
||||
|
||||
if message:
|
||||
notice.add_header('Notification-Text',message)
|
||||
notice.add_header('Notification-Icon',
|
||||
'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png')
|
||||
|
||||
response = self._send(options['host'],options['port'],notice.encode(),options['debug'])
|
||||
if isinstance(response,gntp.GNTPOK): return True
|
||||
if message:
|
||||
notice.add_header('Notification-Text', message)
|
||||
|
||||
response = self._send(options['host'], options['port'], notice.encode(), options['debug'])
|
||||
if isinstance(response, gntp.GNTPOK): return True
|
||||
return False
|
||||
|
||||
def _send(self, host,port,data,debug=False):
|
||||
if debug: print '<Sending>\n',data,'\n</Sending>'
|
||||
|
||||
def _send(self, host, port, data, debug=False):
|
||||
if debug: print '<Sending>\n', data, '\n</Sending>'
|
||||
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect((host,port))
|
||||
s.connect((host, port))
|
||||
s.send(data)
|
||||
response = gntp.parse_gntp(s.recv(1024))
|
||||
s.close()
|
||||
|
||||
if debug: print '<Recieved>\n',response,'\n</Recieved>'
|
||||
|
||||
if debug: print '<Recieved>\n', response, '\n</Recieved>'
|
||||
|
||||
return response
|
||||
|
||||
def _sendGrowl(self, title="Sick Beard Notification", message=None, name=None, host=None, password=None, force=False):
|
||||
def _sendGrowl(self, title="Sick Beard Notification", message=None, name=None, host=None, password=None,
|
||||
force=False):
|
||||
if not sickbeard.USE_GROWL and not force:
|
||||
return False
|
||||
|
||||
|
||||
if name == None:
|
||||
name = title
|
||||
|
||||
|
||||
if host == None:
|
||||
hostParts = sickbeard.GROWL_HOST.split(':')
|
||||
else:
|
||||
hostParts = host.split(':')
|
||||
|
||||
|
||||
if len(hostParts) != 2 or hostParts[1] == '':
|
||||
port = 23053
|
||||
else:
|
||||
port = int(hostParts[1])
|
||||
|
||||
growlHosts = [(hostParts[0],port)]
|
||||
|
||||
|
||||
growlHosts = [(hostParts[0], port)]
|
||||
|
||||
opts = {}
|
||||
|
||||
|
||||
opts['name'] = name
|
||||
|
||||
|
||||
opts['title'] = title
|
||||
opts['app'] = 'SickBeard'
|
||||
|
||||
|
||||
opts['sticky'] = None
|
||||
opts['priority'] = None
|
||||
opts['debug'] = False
|
||||
|
||||
|
||||
if password == None:
|
||||
opts['password'] = sickbeard.GROWL_PASSWORD
|
||||
else:
|
||||
opts['password'] = password
|
||||
|
||||
|
||||
opts['icon'] = True
|
||||
|
||||
|
||||
|
||||
for pc in growlHosts:
|
||||
opts['host'] = pc[0]
|
||||
opts['port'] = pc[1]
|
||||
logger.log(u"Sending growl to "+opts['host']+":"+str(opts['port'])+": "+message)
|
||||
logger.log(u"Sending growl to " + opts['host'] + ":" + str(opts['port']) + ": " + message)
|
||||
try:
|
||||
if self._send_growl(opts, message):
|
||||
return True
|
||||
else:
|
||||
else:
|
||||
if self._sendRegistration(host, password, 'Sickbeard'):
|
||||
return self._send_growl(opts, message)
|
||||
else:
|
||||
return False
|
||||
except socket.error, e:
|
||||
logger.log(u"Unable to send growl to "+opts['host']+":"+str(opts['port'])+": "+ex(e))
|
||||
logger.log(u"Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + ": " + ex(e))
|
||||
return False
|
||||
|
||||
def _sendRegistration(self, host=None, password=None, name='Sick Beard Notification'):
|
||||
opts = {}
|
||||
|
||||
|
||||
if host == None:
|
||||
hostParts = sickbeard.GROWL_HOST.split(':')
|
||||
else:
|
||||
hostParts = host.split(':')
|
||||
|
||||
|
||||
if len(hostParts) != 2 or hostParts[1] == '':
|
||||
port = 23053
|
||||
else:
|
||||
port = int(hostParts[1])
|
||||
|
||||
|
||||
opts['host'] = hostParts[0]
|
||||
opts['port'] = port
|
||||
|
||||
|
||||
|
||||
if password == None:
|
||||
opts['password'] = sickbeard.GROWL_PASSWORD
|
||||
else:
|
||||
opts['password'] = password
|
||||
|
||||
|
||||
|
||||
opts['app'] = 'SickBeard'
|
||||
opts['debug'] = False
|
||||
|
||||
|
||||
#Send Registration
|
||||
register = gntp.GNTPRegister()
|
||||
register.add_header('Application-Name', opts['app'])
|
||||
register.add_header('Application-Icon', 'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png')
|
||||
|
||||
register.add_header('Application-Icon',
|
||||
'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png')
|
||||
|
||||
register.add_notification('Test', True)
|
||||
register.add_notification(common.notifyStrings[common.NOTIFY_SNATCH], True)
|
||||
register.add_notification(common.notifyStrings[common.NOTIFY_DOWNLOAD], True)
|
||||
|
||||
if opts['password']:
|
||||
register.set_password(opts['password'])
|
||||
|
||||
|
||||
try:
|
||||
return self._send(opts['host'],opts['port'],register.encode(),opts['debug'])
|
||||
return self._send(opts['host'], opts['port'], register.encode(), opts['debug'])
|
||||
except socket.error, e:
|
||||
logger.log(u"Unable to send growl to "+opts['host']+":"+str(opts['port'])+": "+str(e).decode('utf-8'))
|
||||
logger.log(
|
||||
u"Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + ": " + str(e).decode('utf-8'))
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
notifier = GrowlNotifier
|
|
@ -22,6 +22,7 @@ import sickbeard
|
|||
|
||||
from sickbeard import logger, common
|
||||
|
||||
|
||||
def diagnose():
|
||||
'''
|
||||
Check the environment for reasons libnotify isn't working. Return a
|
||||
|
@ -115,4 +116,5 @@ class LibnotifyNotifier:
|
|||
except self.gobject.GError:
|
||||
return False
|
||||
|
||||
|
||||
notifier = LibnotifyNotifier
|
||||
|
|
|
@ -3,54 +3,59 @@ import sickbeard
|
|||
from sickbeard import logger, common
|
||||
from lib.pynma import pynma
|
||||
|
||||
|
||||
class NMA_Notifier:
|
||||
|
||||
def test_notify(self, nma_api, nma_priority):
|
||||
return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from Sick Beard", force=True)
|
||||
return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from Sick Beard",
|
||||
force=True)
|
||||
|
||||
def notify_snatch(self, ep_name):
|
||||
if sickbeard.NMA_NOTIFY_ONSNATCH:
|
||||
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name)
|
||||
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH],
|
||||
message=ep_name)
|
||||
|
||||
def notify_download(self, ep_name):
|
||||
if sickbeard.NMA_NOTIFY_ONDOWNLOAD:
|
||||
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name)
|
||||
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD],
|
||||
message=ep_name)
|
||||
|
||||
def notify_subtitle_download(self, ep_name, lang):
|
||||
if sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang)
|
||||
|
||||
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD],
|
||||
message=ep_name + ": " + lang)
|
||||
|
||||
def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, force=False):
|
||||
|
||||
|
||||
title = 'Sick-Beard'
|
||||
|
||||
|
||||
if not sickbeard.USE_NMA and not force:
|
||||
return False
|
||||
|
||||
|
||||
if nma_api == None:
|
||||
nma_api = sickbeard.NMA_API
|
||||
|
||||
|
||||
if nma_priority == None:
|
||||
nma_priority = sickbeard.NMA_PRIORITY
|
||||
|
||||
|
||||
logger.log(u"NMA title: " + title, logger.DEBUG)
|
||||
logger.log(u"NMA event: " + event, logger.DEBUG)
|
||||
logger.log(u"NMA message: " + message, logger.DEBUG)
|
||||
|
||||
|
||||
batch = False
|
||||
|
||||
|
||||
p = pynma.PyNMA()
|
||||
keys = nma_api.split(',')
|
||||
p.addkey(keys)
|
||||
|
||||
|
||||
if len(keys) > 1: batch = True
|
||||
|
||||
|
||||
response = p.push(title, event, message, priority=nma_priority, batch_mode=batch)
|
||||
|
||||
|
||||
if not response[nma_api][u'code'] == u'200':
|
||||
logger.log(u'Could not send notification to NotifyMyAndroid', logger.ERROR)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
|
||||
notifier = NMA_Notifier
|
|
@ -38,7 +38,7 @@ class NMJNotifier:
|
|||
|
||||
Returns: True if the settings were retrieved successfully, False otherwise
|
||||
"""
|
||||
|
||||
|
||||
# establish a terminal session to the PC
|
||||
terminal = False
|
||||
try:
|
||||
|
@ -68,7 +68,7 @@ class NMJNotifier:
|
|||
else:
|
||||
logger.log(u"Could not get current NMJ database on %s, NMJ is probably not running!" % (host), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
# if the device is a remote host then try to parse the mounting URL and save it to the config
|
||||
if device.startswith("NETWORK_SHARE/"):
|
||||
match = re.search(".*(?=\r\n?%s)" % (re.escape(device[14:])), tnoutput)
|
||||
|
@ -78,11 +78,12 @@ class NMJNotifier:
|
|||
logger.log(u"Found mounting url on the Popcorn Hour in configuration: %s" % (mount), logger.DEBUG)
|
||||
sickbeard.NMJ_MOUNT = mount
|
||||
else:
|
||||
logger.log(u"Detected a network share on the Popcorn Hour, but could not get the mounting url", logger.DEBUG)
|
||||
logger.log(u"Detected a network share on the Popcorn Hour, but could not get the mounting url",
|
||||
logger.DEBUG)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def notify_snatch(self, ep_name):
|
||||
return False
|
||||
#Not implemented: Start the scanner when snatched does not make any sense
|
||||
|
@ -94,7 +95,7 @@ class NMJNotifier:
|
|||
def notify_subtitle_download(self, ep_name, lang):
|
||||
if sickbeard.USE_NMJ:
|
||||
self._notifyNMJ()
|
||||
|
||||
|
||||
def test_notify(self, host, database, mount):
|
||||
return self._sendNMJ(host, database, mount)
|
||||
|
||||
|
@ -108,7 +109,7 @@ class NMJNotifier:
|
|||
|
||||
Returns: True if the request succeeded, False otherwise
|
||||
"""
|
||||
|
||||
|
||||
# if a mount URL is provided then attempt to open a handle to that URL
|
||||
if mount:
|
||||
try:
|
||||
|
@ -146,7 +147,7 @@ class NMJNotifier:
|
|||
except SyntaxError, e:
|
||||
logger.log(u"Unable to parse XML returned from the Popcorn Hour: %s" % (e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
# if the result was a number then consider that an error
|
||||
if int(result) > 0:
|
||||
logger.log(u"Popcorn Hour returned an errorcode: %s" % (result))
|
||||
|
@ -180,4 +181,5 @@ class NMJNotifier:
|
|||
|
||||
return self._sendNMJ(host, database, mount)
|
||||
|
||||
|
||||
notifier = NMJNotifier
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import urllib, urllib2,xml.dom.minidom
|
||||
import urllib, urllib2, xml.dom.minidom
|
||||
from xml.dom.minidom import parseString
|
||||
import sickbeard
|
||||
import telnetlib
|
||||
|
@ -33,7 +33,6 @@ except ImportError:
|
|||
|
||||
|
||||
class NMJv2Notifier:
|
||||
|
||||
def notify_snatch(self, ep_name):
|
||||
return False
|
||||
#Not implemented: Start the scanner when snatched does not make any sense
|
||||
|
@ -58,32 +57,35 @@ class NMJv2Notifier:
|
|||
Returns: True if the settings were retrieved successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
url_loc = "http://" + host + ":8008/file_operation?arg0=list_user_storage_file&arg1=&arg2="+instance+"&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false"
|
||||
url_loc = "http://" + host + ":8008/file_operation?arg0=list_user_storage_file&arg1=&arg2=" + instance + "&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false"
|
||||
req = urllib2.Request(url_loc)
|
||||
handle1 = urllib2.urlopen(req)
|
||||
response1 = handle1.read()
|
||||
xml = parseString(response1)
|
||||
time.sleep (300.0 / 1000.0)
|
||||
time.sleep(300.0 / 1000.0)
|
||||
for node in xml.getElementsByTagName('path'):
|
||||
xmlTag=node.toxml();
|
||||
xmlData=xmlTag.replace('<path>','').replace('</path>','').replace('[=]','')
|
||||
url_db = "http://" + host + ":8008/metadata_database?arg0=check_database&arg1="+ xmlData
|
||||
xmlTag = node.toxml();
|
||||
xmlData = xmlTag.replace('<path>', '').replace('</path>', '').replace('[=]', '')
|
||||
url_db = "http://" + host + ":8008/metadata_database?arg0=check_database&arg1=" + xmlData
|
||||
reqdb = urllib2.Request(url_db)
|
||||
handledb = urllib2.urlopen(reqdb)
|
||||
responsedb = handledb.read()
|
||||
xmldb = parseString(responsedb)
|
||||
returnvalue=xmldb.getElementsByTagName('returnValue')[0].toxml().replace('<returnValue>','').replace('</returnValue>','')
|
||||
if returnvalue=="0":
|
||||
DB_path=xmldb.getElementsByTagName('database_path')[0].toxml().replace('<database_path>','').replace('</database_path>','').replace('[=]','')
|
||||
if dbloc=="local" and DB_path.find("localhost")>-1:
|
||||
sickbeard.NMJv2_HOST=host
|
||||
sickbeard.NMJv2_DATABASE=DB_path
|
||||
returnvalue = xmldb.getElementsByTagName('returnValue')[0].toxml().replace('<returnValue>', '').replace(
|
||||
'</returnValue>', '')
|
||||
if returnvalue == "0":
|
||||
DB_path = xmldb.getElementsByTagName('database_path')[0].toxml().replace('<database_path>',
|
||||
'').replace(
|
||||
'</database_path>', '').replace('[=]', '')
|
||||
if dbloc == "local" and DB_path.find("localhost") > -1:
|
||||
sickbeard.NMJv2_HOST = host
|
||||
sickbeard.NMJv2_DATABASE = DB_path
|
||||
return True
|
||||
if dbloc=="network" and DB_path.find("://")>-1:
|
||||
sickbeard.NMJv2_HOST=host
|
||||
sickbeard.NMJv2_DATABASE=DB_path
|
||||
if dbloc == "network" and DB_path.find("://") > -1:
|
||||
sickbeard.NMJv2_HOST = host
|
||||
sickbeard.NMJv2_DATABASE = DB_path
|
||||
return True
|
||||
|
||||
|
||||
except IOError, e:
|
||||
logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e))
|
||||
return False
|
||||
|
@ -99,52 +101,52 @@ class NMJv2Notifier:
|
|||
|
||||
Returns: True if the request succeeded, False otherwise
|
||||
"""
|
||||
|
||||
|
||||
#if a host is provided then attempt to open a handle to that URL
|
||||
try:
|
||||
url_scandir = "http://" + host + ":8008/metadata_database?arg0=update_scandir&arg1="+ sickbeard.NMJv2_DATABASE +"&arg2=&arg3=update_all"
|
||||
url_scandir = "http://" + host + ":8008/metadata_database?arg0=update_scandir&arg1=" + sickbeard.NMJv2_DATABASE + "&arg2=&arg3=update_all"
|
||||
logger.log(u"NMJ scan update command send to host: %s" % (host))
|
||||
url_updatedb = "http://" + host + ":8008/metadata_database?arg0=scanner_start&arg1="+ sickbeard.NMJv2_DATABASE +"&arg2=background&arg3="
|
||||
url_updatedb = "http://" + host + ":8008/metadata_database?arg0=scanner_start&arg1=" + sickbeard.NMJv2_DATABASE + "&arg2=background&arg3="
|
||||
logger.log(u"Try to mount network drive via url: %s" % (host), logger.DEBUG)
|
||||
prereq = urllib2.Request(url_scandir)
|
||||
req = urllib2.Request(url_updatedb)
|
||||
handle1 = urllib2.urlopen(prereq)
|
||||
response1 = handle1.read()
|
||||
time.sleep (300.0 / 1000.0)
|
||||
time.sleep(300.0 / 1000.0)
|
||||
handle2 = urllib2.urlopen(req)
|
||||
response2 = handle2.read()
|
||||
except IOError, e:
|
||||
logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e))
|
||||
return False
|
||||
try:
|
||||
try:
|
||||
et = etree.fromstring(response1)
|
||||
result1 = et.findtext("returnValue")
|
||||
except SyntaxError, e:
|
||||
logger.log(u"Unable to parse XML returned from the Popcorn Hour: update_scandir, %s" % (e), logger.ERROR)
|
||||
return False
|
||||
try:
|
||||
logger.log(u"Unable to parse XML returned from the Popcorn Hour: update_scandir, %s" % (e), logger.ERROR)
|
||||
return False
|
||||
try:
|
||||
et = etree.fromstring(response2)
|
||||
result2 = et.findtext("returnValue")
|
||||
except SyntaxError, e:
|
||||
logger.log(u"Unable to parse XML returned from the Popcorn Hour: scanner_start, %s" % (e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
# if the result was a number then consider that an error
|
||||
error_codes=["8","11","22","49","50","51","60"]
|
||||
error_messages=["Invalid parameter(s)/argument(s)",
|
||||
"Invalid database path",
|
||||
"Insufficient size",
|
||||
"Database write error",
|
||||
"Database read error",
|
||||
"Open fifo pipe failed",
|
||||
"Read only file system"]
|
||||
error_codes = ["8", "11", "22", "49", "50", "51", "60"]
|
||||
error_messages = ["Invalid parameter(s)/argument(s)",
|
||||
"Invalid database path",
|
||||
"Insufficient size",
|
||||
"Database write error",
|
||||
"Database read error",
|
||||
"Open fifo pipe failed",
|
||||
"Read only file system"]
|
||||
if int(result1) > 0:
|
||||
index=error_codes.index(result1)
|
||||
index = error_codes.index(result1)
|
||||
logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index]))
|
||||
return False
|
||||
else:
|
||||
if int(result2) > 0:
|
||||
index=error_codes.index(result2)
|
||||
index = error_codes.index(result2)
|
||||
logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index]))
|
||||
return False
|
||||
else:
|
||||
|
@ -172,4 +174,5 @@ class NMJv2Notifier:
|
|||
|
||||
return self._sendNMJ(host)
|
||||
|
||||
|
||||
notifier = NMJv2Notifier
|
||||
|
|
|
@ -32,7 +32,6 @@ from xml.dom import minidom
|
|||
|
||||
|
||||
class PLEXNotifier:
|
||||
|
||||
def _send_to_plex(self, command, host, username=None, password=None):
|
||||
"""Handles communication to Plex hosts via HTTP API
|
||||
|
||||
|
@ -127,16 +126,17 @@ class PLEXNotifier:
|
|||
for curHost in [x.strip() for x in host.split(",")]:
|
||||
logger.log(u"Sending Plex notification to '" + curHost + "' - " + message, logger.MESSAGE)
|
||||
|
||||
command = {'command': 'ExecBuiltIn', 'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode("utf-8") + ')'}
|
||||
command = {'command': 'ExecBuiltIn',
|
||||
'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode("utf-8") + ')'}
|
||||
notifyResult = self._send_to_plex(command, curHost, username, password)
|
||||
if notifyResult:
|
||||
result += curHost + ':' + str(notifyResult)
|
||||
|
||||
return result
|
||||
|
||||
##############################################################################
|
||||
# Public functions
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
# Public functions
|
||||
##############################################################################
|
||||
|
||||
def notify_snatch(self, ep_name):
|
||||
if sickbeard.PLEX_NOTIFY_ONSNATCH:
|
||||
|
@ -149,9 +149,10 @@ class PLEXNotifier:
|
|||
def notify_subtitle_download(self, ep_name, lang):
|
||||
if sickbeard.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||
self._notify_pmc(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD])
|
||||
|
||||
|
||||
def test_notify(self, host, username, password):
|
||||
return self._notify_pmc("Testing Plex notifications from Sick Beard", "Test Notification", host, username, password, force=True)
|
||||
return self._notify_pmc("Testing Plex notifications from Sick Beard", "Test Notification", host, username,
|
||||
password, force=True)
|
||||
|
||||
def update_library(self):
|
||||
"""Handles updating the Plex Media Server host via HTTP API
|
||||
|
@ -168,7 +169,8 @@ class PLEXNotifier:
|
|||
logger.log(u"No Plex Server host specified, check your settings", logger.DEBUG)
|
||||
return False
|
||||
|
||||
logger.log(u"Updating library for the Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST, logger.MESSAGE)
|
||||
logger.log(u"Updating library for the Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST,
|
||||
logger.MESSAGE)
|
||||
|
||||
url = "http://%s/library/sections" % sickbeard.PLEX_SERVER_HOST
|
||||
try:
|
||||
|
@ -193,4 +195,5 @@ class PLEXNotifier:
|
|||
|
||||
return True
|
||||
|
||||
|
||||
notifier = PLEXNotifier
|
||||
|
|
|
@ -31,56 +31,59 @@ import sickbeard
|
|||
|
||||
from sickbeard import logger, common
|
||||
|
||||
class ProwlNotifier:
|
||||
|
||||
class ProwlNotifier:
|
||||
def test_notify(self, prowl_api, prowl_priority):
|
||||
return self._sendProwl(prowl_api, prowl_priority, event="Test", message="Testing Prowl settings from Sick Beard", force=True)
|
||||
return self._sendProwl(prowl_api, prowl_priority, event="Test",
|
||||
message="Testing Prowl settings from Sick Beard", force=True)
|
||||
|
||||
def notify_snatch(self, ep_name):
|
||||
if sickbeard.PROWL_NOTIFY_ONSNATCH:
|
||||
self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name)
|
||||
self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH],
|
||||
message=ep_name)
|
||||
|
||||
def notify_download(self, ep_name):
|
||||
if sickbeard.PROWL_NOTIFY_ONDOWNLOAD:
|
||||
self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name)
|
||||
|
||||
self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD],
|
||||
message=ep_name)
|
||||
|
||||
def notify_subtitle_download(self, ep_name, lang):
|
||||
if sickbeard.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||
self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang)
|
||||
|
||||
self._sendProwl(prowl_api=None, prowl_priority=None,
|
||||
event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang)
|
||||
|
||||
def _sendProwl(self, prowl_api=None, prowl_priority=None, event=None, message=None, force=False):
|
||||
|
||||
|
||||
if not sickbeard.USE_PROWL and not force:
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
if prowl_api == None:
|
||||
prowl_api = sickbeard.PROWL_API
|
||||
|
||||
|
||||
if prowl_priority == None:
|
||||
prowl_priority = sickbeard.PROWL_PRIORITY
|
||||
|
||||
|
||||
|
||||
title = "Sick Beard"
|
||||
|
||||
|
||||
logger.log(u"Prowl title: " + title, logger.DEBUG)
|
||||
logger.log(u"Prowl event: " + event, logger.DEBUG)
|
||||
logger.log(u"Prowl message: " + message, logger.DEBUG)
|
||||
logger.log(u"Prowl api: " + prowl_api, logger.DEBUG)
|
||||
logger.log(u"Prowl priority: " + prowl_priority, logger.DEBUG)
|
||||
|
||||
|
||||
http_handler = HTTPSConnection("api.prowlapp.com")
|
||||
|
||||
|
||||
data = {'apikey': prowl_api,
|
||||
'application': title,
|
||||
'event': event,
|
||||
'description': message.encode('utf-8'),
|
||||
'priority': prowl_priority }
|
||||
'priority': prowl_priority}
|
||||
|
||||
try:
|
||||
http_handler.request("POST",
|
||||
"/publicapi/add",
|
||||
headers = {'Content-type': "application/x-www-form-urlencoded"},
|
||||
body = urlencode(data))
|
||||
"/publicapi/add",
|
||||
headers={'Content-type': "application/x-www-form-urlencoded"},
|
||||
body=urlencode(data))
|
||||
except (SSLError, HTTPException):
|
||||
logger.log(u"Prowl notification failed.", logger.ERROR)
|
||||
return False
|
||||
|
@ -88,13 +91,14 @@ class ProwlNotifier:
|
|||
request_status = response.status
|
||||
|
||||
if request_status == 200:
|
||||
logger.log(u"Prowl notifications sent.", logger.DEBUG)
|
||||
return True
|
||||
elif request_status == 401:
|
||||
logger.log(u"Prowl auth failed: %s" % response.reason, logger.ERROR)
|
||||
return False
|
||||
logger.log(u"Prowl notifications sent.", logger.DEBUG)
|
||||
return True
|
||||
elif request_status == 401:
|
||||
logger.log(u"Prowl auth failed: %s" % response.reason, logger.ERROR)
|
||||
return False
|
||||
else:
|
||||
logger.log(u"Prowl notification failed.", logger.ERROR)
|
||||
return False
|
||||
|
||||
logger.log(u"Prowl notification failed.", logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
notifier = ProwlNotifier
|
||||
|
|
|
@ -24,46 +24,51 @@ from ssl import SSLError
|
|||
import sickbeard
|
||||
from sickbeard import logger, common
|
||||
|
||||
class PushalotNotifier:
|
||||
|
||||
class PushalotNotifier:
|
||||
def test_notify(self, pushalot_authorizationtoken):
|
||||
return self._sendPushalot(pushalot_authorizationtoken, event="Test", message="Testing Pushalot settings from Sick Beard", force=True)
|
||||
return self._sendPushalot(pushalot_authorizationtoken, event="Test",
|
||||
message="Testing Pushalot settings from Sick Beard", force=True)
|
||||
|
||||
def notify_snatch(self, ep_name):
|
||||
if sickbeard.PUSHALOT_NOTIFY_ONSNATCH:
|
||||
self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name)
|
||||
self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_SNATCH],
|
||||
message=ep_name)
|
||||
|
||||
def notify_download(self, ep_name):
|
||||
if sickbeard.PUSHALOT_NOTIFY_ONDOWNLOAD:
|
||||
self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name)
|
||||
self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD],
|
||||
message=ep_name)
|
||||
|
||||
def notify_subtitle_download(self, ep_name, lang):
|
||||
if sickbeard.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||
self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang)
|
||||
self._sendPushalot(pushalot_authorizationtoken=None,
|
||||
event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD],
|
||||
message=ep_name + ": " + lang)
|
||||
|
||||
def _sendPushalot(self, pushalot_authorizationtoken=None, event=None, message=None, force=False):
|
||||
|
||||
|
||||
if not sickbeard.USE_PUSHALOT and not force:
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
if pushalot_authorizationtoken == None:
|
||||
pushalot_authorizationtoken = sickbeard.PUSHALOT_AUTHORIZATIONTOKEN
|
||||
|
||||
|
||||
logger.log(u"Pushalot event: " + event, logger.DEBUG)
|
||||
logger.log(u"Pushalot message: " + message, logger.DEBUG)
|
||||
logger.log(u"Pushalot api: " + pushalot_authorizationtoken, logger.DEBUG)
|
||||
|
||||
|
||||
http_handler = HTTPSConnection("pushalot.com")
|
||||
|
||||
|
||||
data = {'AuthorizationToken': pushalot_authorizationtoken,
|
||||
'Title': event.encode('utf-8'),
|
||||
'Body': message.encode('utf-8') }
|
||||
'Body': message.encode('utf-8')}
|
||||
|
||||
try:
|
||||
http_handler.request("POST",
|
||||
"/api/sendmessage",
|
||||
headers = {'Content-type': "application/x-www-form-urlencoded"},
|
||||
body = urlencode(data))
|
||||
"/api/sendmessage",
|
||||
headers={'Content-type': "application/x-www-form-urlencoded"},
|
||||
body=urlencode(data))
|
||||
except (SSLError, HTTPException):
|
||||
logger.log(u"Pushalot notification failed.", logger.ERROR)
|
||||
return False
|
||||
|
@ -71,13 +76,14 @@ class PushalotNotifier:
|
|||
request_status = response.status
|
||||
|
||||
if request_status == 200:
|
||||
logger.log(u"Pushalot notifications sent.", logger.DEBUG)
|
||||
return True
|
||||
elif request_status == 410:
|
||||
logger.log(u"Pushalot auth failed: %s" % response.reason, logger.ERROR)
|
||||
return False
|
||||
logger.log(u"Pushalot notifications sent.", logger.DEBUG)
|
||||
return True
|
||||
elif request_status == 410:
|
||||
logger.log(u"Pushalot auth failed: %s" % response.reason, logger.ERROR)
|
||||
return False
|
||||
else:
|
||||
logger.log(u"Pushalot notification failed.", logger.ERROR)
|
||||
return False
|
||||
|
||||
logger.log(u"Pushalot notification failed.", logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
notifier = PushalotNotifier
|
||||
|
|
|
@ -25,31 +25,36 @@ from ssl import SSLError
|
|||
import sickbeard
|
||||
from sickbeard import logger, common
|
||||
|
||||
class PushbulletNotifier:
|
||||
|
||||
class PushbulletNotifier:
|
||||
def test_notify(self, pushbullet_api):
|
||||
return self._sendPushbullet(pushbullet_api, event="Test", message="Testing Pushbullet settings from Sick Beard", method="POST", notificationType="note", force=True)
|
||||
return self._sendPushbullet(pushbullet_api, event="Test", message="Testing Pushbullet settings from Sick Beard",
|
||||
method="POST", notificationType="note", force=True)
|
||||
|
||||
def get_devices(self, pushbullet_api):
|
||||
return self._sendPushbullet(pushbullet_api, method="GET", force=True)
|
||||
|
||||
def notify_snatch(self, ep_name):
|
||||
if sickbeard.PUSHBULLET_NOTIFY_ONSNATCH:
|
||||
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name, notificationType="note", method="POST")
|
||||
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name,
|
||||
notificationType="note", method="POST")
|
||||
|
||||
def notify_download(self, ep_name):
|
||||
if sickbeard.PUSHBULLET_NOTIFY_ONDOWNLOAD:
|
||||
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name, notificationType="note", method="POST")
|
||||
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD],
|
||||
message=ep_name, notificationType="note", method="POST")
|
||||
|
||||
def notify_subtitle_download(self, ep_name, lang):
|
||||
if sickbeard.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang, notificationType="note", method="POST")
|
||||
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD],
|
||||
message=ep_name + ": " + lang, notificationType="note", method="POST")
|
||||
|
||||
def _sendPushbullet(self, pushbullet_api=None, pushbullet_device=None, event=None, message=None,
|
||||
notificationType=None, method=None, force=False):
|
||||
|
||||
def _sendPushbullet(self, pushbullet_api=None, pushbullet_device=None, event=None, message=None, notificationType=None, method=None, force=False):
|
||||
|
||||
if not sickbeard.USE_PUSHBULLET and not force:
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
if pushbullet_api == None:
|
||||
pushbullet_api = sickbeard.PUSHBULLET_API
|
||||
if pushbullet_device == None:
|
||||
|
@ -59,13 +64,13 @@ class PushbulletNotifier:
|
|||
uri = '/api/pushes'
|
||||
else:
|
||||
uri = '/api/devices'
|
||||
|
||||
|
||||
logger.log(u"Pushbullet event: " + str(event), logger.DEBUG)
|
||||
logger.log(u"Pushbullet message: " + str(message), logger.DEBUG)
|
||||
logger.log(u"Pushbullet api: " + str(pushbullet_api), logger.DEBUG)
|
||||
logger.log(u"Pushbullet devices: " + str(pushbullet_device), logger.DEBUG)
|
||||
logger.log(u"Pushbullet notification type: " + str(notificationType), logger.DEBUG)
|
||||
|
||||
|
||||
http_handler = HTTPSConnection("api.pushbullet.com")
|
||||
|
||||
authString = base64.encodestring('%s:' % (pushbullet_api)).replace('\n', '')
|
||||
|
@ -74,7 +79,7 @@ class PushbulletNotifier:
|
|||
testMessage = True
|
||||
try:
|
||||
logger.log(u"Testing Pushbullet authentication and retrieving the device list.", logger.DEBUG)
|
||||
http_handler.request(method, uri, None, headers={'Authorization':'Basic %s:' % authString})
|
||||
http_handler.request(method, uri, None, headers={'Authorization': 'Basic %s:' % authString})
|
||||
except (SSLError, HTTPException):
|
||||
logger.log(u"Pushbullet notification failed.", logger.ERROR)
|
||||
return False
|
||||
|
@ -86,7 +91,8 @@ class PushbulletNotifier:
|
|||
'body': message.encode('utf-8'),
|
||||
'device_iden': pushbullet_device,
|
||||
'type': notificationType}
|
||||
http_handler.request(method, uri, body = urlencode(data), headers={'Authorization':'Basic %s' % authString})
|
||||
http_handler.request(method, uri, body=urlencode(data),
|
||||
headers={'Authorization': 'Basic %s' % authString})
|
||||
pass
|
||||
except (SSLError, HTTPException):
|
||||
return False
|
||||
|
@ -96,17 +102,18 @@ class PushbulletNotifier:
|
|||
request_status = response.status
|
||||
|
||||
if request_status == 200:
|
||||
if testMessage:
|
||||
return request_body
|
||||
else:
|
||||
logger.log(u"Pushbullet notifications sent.", logger.DEBUG)
|
||||
return True
|
||||
if testMessage:
|
||||
return request_body
|
||||
else:
|
||||
logger.log(u"Pushbullet notifications sent.", logger.DEBUG)
|
||||
return True
|
||||
elif request_status == 410:
|
||||
logger.log(u"Pushbullet auth failed: %s" % response.reason, logger.ERROR)
|
||||
return False
|
||||
logger.log(u"Pushbullet auth failed: %s" % response.reason, logger.ERROR)
|
||||
return False
|
||||
else:
|
||||
logger.log(u"Pushbullet notification failed.", logger.ERROR)
|
||||
return False
|
||||
|
||||
logger.log(u"Pushbullet notification failed.", logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
notifier = PushbulletNotifier
|
||||
|
||||
|
|
|
@ -30,12 +30,12 @@ from sickbeard.exceptions import ex
|
|||
API_URL = "https://api.pushover.net/1/messages.json"
|
||||
API_KEY = "OKCXmkvHN1syU2e8xvpefTnyvVWGv5"
|
||||
|
||||
|
||||
class PushoverNotifier:
|
||||
|
||||
def test_notify(self, userKey=None):
|
||||
return self._sendPushover("This is a test notification from SickBeard", 'Test', userKey )
|
||||
return self._sendPushover("This is a test notification from SickBeard", 'Test', userKey)
|
||||
|
||||
def _sendPushover(self, msg, title, userKey=None ):
|
||||
def _sendPushover(self, msg, title, userKey=None):
|
||||
"""
|
||||
Sends a pushover notification to the address provided
|
||||
|
||||
|
@ -48,7 +48,7 @@ class PushoverNotifier:
|
|||
|
||||
if not userKey:
|
||||
userKey = sickbeard.PUSHOVER_USERKEY
|
||||
|
||||
|
||||
# build up the URL and parameters
|
||||
msg = msg.strip()
|
||||
curUrl = API_URL
|
||||
|
@ -59,7 +59,7 @@ class PushoverNotifier:
|
|||
'user': userKey,
|
||||
'message': msg.encode('utf-8'),
|
||||
'timestamp': int(time.time())
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
# send the request to pushover
|
||||
|
@ -67,7 +67,7 @@ class PushoverNotifier:
|
|||
req = urllib2.Request(curUrl)
|
||||
handle = urllib2.urlopen(req, data)
|
||||
handle.close()
|
||||
|
||||
|
||||
except urllib2.URLError, e:
|
||||
# if we get an error back that doesn't have an error code then who knows what's really happening
|
||||
if not hasattr(e, 'code'):
|
||||
|
@ -80,19 +80,19 @@ class PushoverNotifier:
|
|||
if e.code == 404:
|
||||
logger.log("Username is wrong/not a pushover email. Pushover will send an email to it", logger.WARNING)
|
||||
return False
|
||||
|
||||
|
||||
# For HTTP status code 401's, it is because you are passing in either an invalid token, or the user has not added your service.
|
||||
elif e.code == 401:
|
||||
|
||||
|
||||
#HTTP status 401 if the user doesn't have the service added
|
||||
subscribeNote = self._sendPushover(msg, title, userKey )
|
||||
subscribeNote = self._sendPushover(msg, title, userKey)
|
||||
if subscribeNote:
|
||||
logger.log("Subscription send", logger.DEBUG)
|
||||
return True
|
||||
else:
|
||||
logger.log("Subscription could not be send", logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
# If you receive an HTTP status code of 400, it is because you failed to send the proper parameters
|
||||
elif e.code == 400:
|
||||
logger.log("Wrong data sent to pushover", logger.ERROR)
|
||||
|
@ -104,17 +104,17 @@ class PushoverNotifier:
|
|||
def notify_snatch(self, ep_name, title=notifyStrings[NOTIFY_SNATCH]):
|
||||
if sickbeard.PUSHOVER_NOTIFY_ONSNATCH:
|
||||
self._notifyPushover(title, ep_name)
|
||||
|
||||
|
||||
|
||||
def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]):
|
||||
if sickbeard.PUSHOVER_NOTIFY_ONDOWNLOAD:
|
||||
self._notifyPushover(title, ep_name)
|
||||
|
||||
|
||||
def notify_subtitle_download(self, ep_name, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]):
|
||||
if sickbeard.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||
self._notifyPushover(title, ep_name + ": " + lang)
|
||||
|
||||
def _notifyPushover(self, title, message, userKey=None ):
|
||||
def _notifyPushover(self, title, message, userKey=None):
|
||||
"""
|
||||
Sends a pushover notification based on the provided info or SB config
|
||||
|
||||
|
@ -137,4 +137,5 @@ class PushoverNotifier:
|
|||
self._sendPushover(message, title)
|
||||
return True
|
||||
|
||||
|
||||
notifier = PushoverNotifier
|
||||
|
|
|
@ -25,28 +25,28 @@ from urllib2 import Request, urlopen, URLError
|
|||
from sickbeard import logger
|
||||
from sickbeard import encodingKludge as ek
|
||||
|
||||
class pyTivoNotifier:
|
||||
|
||||
class pyTivoNotifier:
|
||||
def notify_snatch(self, ep_name):
|
||||
pass
|
||||
|
||||
def notify_download(self, ep_name):
|
||||
pass
|
||||
|
||||
|
||||
def notify_subtitle_download(self, ep_name, lang):
|
||||
pass
|
||||
|
||||
def update_library(self, ep_obj):
|
||||
|
||||
# Values from config
|
||||
|
||||
|
||||
if not sickbeard.USE_PYTIVO:
|
||||
return False
|
||||
|
||||
|
||||
host = sickbeard.PYTIVO_HOST
|
||||
shareName = sickbeard.PYTIVO_SHARE_NAME
|
||||
tsn = sickbeard.PYTIVO_TIVO_NAME
|
||||
|
||||
|
||||
# There are two more values required, the container and file.
|
||||
#
|
||||
# container: The share name, show name and season
|
||||
|
@ -58,34 +58,35 @@ class pyTivoNotifier:
|
|||
# There might be better ways to arrive at the values, but this is the best I have been able to
|
||||
# come up with.
|
||||
#
|
||||
|
||||
|
||||
|
||||
|
||||
# Calculated values
|
||||
|
||||
|
||||
showPath = ep_obj.show.location
|
||||
showName = ep_obj.show.name
|
||||
rootShowAndSeason = ek.ek(os.path.dirname, ep_obj.location)
|
||||
rootShowAndSeason = ek.ek(os.path.dirname, ep_obj.location)
|
||||
absPath = ep_obj.location
|
||||
|
||||
|
||||
# Some show names have colons in them which are illegal in a path location, so strip them out.
|
||||
# (Are there other characters?)
|
||||
showName = showName.replace(":","")
|
||||
|
||||
showName = showName.replace(":", "")
|
||||
|
||||
root = showPath.replace(showName, "")
|
||||
showAndSeason = rootShowAndSeason.replace(root, "")
|
||||
|
||||
|
||||
container = shareName + "/" + showAndSeason
|
||||
file = "/" + absPath.replace(root, "")
|
||||
|
||||
|
||||
# Finally create the url and make request
|
||||
requestUrl = "http://" + host + "/TiVoConnect?" + urlencode( {'Command':'Push', 'Container':container, 'File':file, 'tsn':tsn} )
|
||||
|
||||
requestUrl = "http://" + host + "/TiVoConnect?" + urlencode(
|
||||
{'Command': 'Push', 'Container': container, 'File': file, 'tsn': tsn})
|
||||
|
||||
logger.log(u"pyTivo notification: Requesting " + requestUrl)
|
||||
|
||||
request = Request( requestUrl )
|
||||
|
||||
request = Request(requestUrl)
|
||||
|
||||
try:
|
||||
response = urlopen(request) #@UnusedVariable
|
||||
response = urlopen(request) #@UnusedVariable
|
||||
except URLError, e:
|
||||
if hasattr(e, 'reason'):
|
||||
logger.log(u"pyTivo notification: Error, failed to reach a server")
|
||||
|
@ -99,4 +100,5 @@ class pyTivoNotifier:
|
|||
logger.log(u"pyTivo notification: Successfully requested transfer of file")
|
||||
return True
|
||||
|
||||
|
||||
notifier = pyTivoNotifier
|
||||
|
|
|
@ -27,14 +27,14 @@ from sickbeard import logger
|
|||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard.exceptions import ex
|
||||
|
||||
class synoIndexNotifier:
|
||||
|
||||
class synoIndexNotifier:
|
||||
def notify_snatch(self, ep_name):
|
||||
pass
|
||||
|
||||
def notify_download(self, ep_name):
|
||||
pass
|
||||
|
||||
|
||||
def notify_subtitle_download(self, ep_name, lang):
|
||||
pass
|
||||
|
||||
|
@ -46,15 +46,17 @@ class synoIndexNotifier:
|
|||
|
||||
def moveObject(self, old_path, new_path):
|
||||
if sickbeard.USE_SYNOINDEX:
|
||||
synoindex_cmd = ['/usr/syno/bin/synoindex', '-N', ek.ek(os.path.abspath, new_path), ek.ek(os.path.abspath, old_path)]
|
||||
logger.log(u"Executing command "+str(synoindex_cmd))
|
||||
logger.log(u"Absolute path to command: "+ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG)
|
||||
synoindex_cmd = ['/usr/syno/bin/synoindex', '-N', ek.ek(os.path.abspath, new_path),
|
||||
ek.ek(os.path.abspath, old_path)]
|
||||
logger.log(u"Executing command " + str(synoindex_cmd))
|
||||
logger.log(u"Absolute path to command: " + ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG)
|
||||
try:
|
||||
p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() #@UnusedVariable
|
||||
logger.log(u"Script result: "+str(out), logger.DEBUG)
|
||||
p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() #@UnusedVariable
|
||||
logger.log(u"Script result: " + str(out), logger.DEBUG)
|
||||
except OSError, e:
|
||||
logger.log(u"Unable to run synoindex: "+ex(e))
|
||||
logger.log(u"Unable to run synoindex: " + ex(e))
|
||||
|
||||
def deleteFolder(self, cur_path):
|
||||
self.makeObject('-D', cur_path)
|
||||
|
@ -71,13 +73,15 @@ class synoIndexNotifier:
|
|||
def makeObject(self, cmd_arg, cur_path):
|
||||
if sickbeard.USE_SYNOINDEX:
|
||||
synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, ek.ek(os.path.abspath, cur_path)]
|
||||
logger.log(u"Executing command "+str(synoindex_cmd))
|
||||
logger.log(u"Absolute path to command: "+ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG)
|
||||
logger.log(u"Executing command " + str(synoindex_cmd))
|
||||
logger.log(u"Absolute path to command: " + ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG)
|
||||
try:
|
||||
p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() #@UnusedVariable
|
||||
logger.log(u"Script result: "+str(out), logger.DEBUG)
|
||||
p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() #@UnusedVariable
|
||||
logger.log(u"Script result: " + str(out), logger.DEBUG)
|
||||
except OSError, e:
|
||||
logger.log(u"Unable to run synoindex: "+ex(e))
|
||||
logger.log(u"Unable to run synoindex: " + ex(e))
|
||||
|
||||
|
||||
notifier = synoIndexNotifier
|
||||
|
|
|
@ -27,8 +27,8 @@ from sickbeard import encodingKludge as ek
|
|||
from sickbeard.exceptions import ex
|
||||
from sickbeard import common
|
||||
|
||||
class synologyNotifier:
|
||||
|
||||
class synologyNotifier:
|
||||
def notify_snatch(self, ep_name):
|
||||
if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH:
|
||||
self._send_synologyNotifier(ep_name, common.notifyStrings[common.NOTIFY_SNATCH])
|
||||
|
@ -36,20 +36,22 @@ class synologyNotifier:
|
|||
def notify_download(self, ep_name):
|
||||
if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD:
|
||||
self._send_synologyNotifier(ep_name, common.notifyStrings[common.NOTIFY_DOWNLOAD])
|
||||
|
||||
|
||||
def notify_subtitle_download(self, ep_name, lang):
|
||||
if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||
self._send_synologyNotifier(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD])
|
||||
|
||||
def _send_synologyNotifier(self, message, title):
|
||||
synodsmnotify_cmd = ["/usr/syno/bin/synodsmnotify", "@administrators", title, message]
|
||||
logger.log(u"Executing command "+str(synodsmnotify_cmd))
|
||||
logger.log(u"Absolute path to command: "+ek.ek(os.path.abspath, synodsmnotify_cmd[0]), logger.DEBUG)
|
||||
logger.log(u"Executing command " + str(synodsmnotify_cmd))
|
||||
logger.log(u"Absolute path to command: " + ek.ek(os.path.abspath, synodsmnotify_cmd[0]), logger.DEBUG)
|
||||
try:
|
||||
p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() #@UnusedVariable
|
||||
logger.log(u"Script result: "+str(out), logger.DEBUG)
|
||||
p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() #@UnusedVariable
|
||||
logger.log(u"Script result: " + str(out), logger.DEBUG)
|
||||
except OSError, e:
|
||||
logger.log(u"Unable to run synodsmnotify: "+ex(e))
|
||||
logger.log(u"Unable to run synodsmnotify: " + ex(e))
|
||||
|
||||
|
||||
notifier = synologyNotifier
|
||||
|
|
|
@ -20,6 +20,7 @@ import sickbeard
|
|||
from sickbeard import logger
|
||||
from lib.trakt import *
|
||||
|
||||
|
||||
class TraktNotifier:
|
||||
"""
|
||||
A "notifier" for trakt.tv which keeps track of what has and hasn't been added to your library.
|
||||
|
@ -30,7 +31,7 @@ class TraktNotifier:
|
|||
|
||||
def notify_download(self, ep_name):
|
||||
pass
|
||||
|
||||
|
||||
def notify_subtitle_download(self, ep_name, lang):
|
||||
pass
|
||||
|
||||
|
@ -40,20 +41,20 @@ class TraktNotifier:
|
|||
|
||||
ep_obj: The TVEpisode object to add to trakt
|
||||
"""
|
||||
|
||||
|
||||
if sickbeard.USE_TRAKT:
|
||||
|
||||
|
||||
# URL parameters
|
||||
data = {
|
||||
'indexer_id': ep_obj.show.indexerid,
|
||||
'title': ep_obj.show.name,
|
||||
'year': ep_obj.show.startyear,
|
||||
'episodes': [ {
|
||||
'season': ep_obj.season,
|
||||
'episode': ep_obj.episode
|
||||
} ]
|
||||
}
|
||||
|
||||
'episodes': [{
|
||||
'season': ep_obj.season,
|
||||
'episode': ep_obj.episode
|
||||
}]
|
||||
}
|
||||
|
||||
if data is not None:
|
||||
TraktCall("show/episode/library/%API%", self._api(), self._username(), self._password(), data)
|
||||
if sickbeard.TRAKT_REMOVE_WATCHLIST:
|
||||
|
@ -70,7 +71,7 @@ class TraktNotifier:
|
|||
|
||||
Returns: True if the request succeeded, False otherwise
|
||||
"""
|
||||
|
||||
|
||||
data = TraktCall("account/test/%API%", api, username, password, {})
|
||||
if data["status"] == "success":
|
||||
return True
|
||||
|
|
|
@ -23,84 +23,84 @@ from sickbeard.exceptions import ex
|
|||
|
||||
# parse_qsl moved to urlparse module in v2.6
|
||||
try:
|
||||
from urlparse import parse_qsl #@UnusedImport
|
||||
from urlparse import parse_qsl #@UnusedImport
|
||||
except:
|
||||
from cgi import parse_qsl #@Reimport
|
||||
from cgi import parse_qsl #@Reimport
|
||||
|
||||
import lib.oauth2 as oauth
|
||||
import lib.pythontwitter as twitter
|
||||
|
||||
class TwitterNotifier:
|
||||
|
||||
class TwitterNotifier:
|
||||
consumer_key = "vHHtcB6WzpWDG6KYlBMr8g"
|
||||
consumer_secret = "zMqq5CB3f8cWKiRO2KzWPTlBanYmV0VYxSXZ0Pxds0E"
|
||||
|
||||
|
||||
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
|
||||
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
|
||||
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
|
||||
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
|
||||
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
|
||||
|
||||
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
|
||||
|
||||
def notify_snatch(self, ep_name):
|
||||
if sickbeard.TWITTER_NOTIFY_ONSNATCH:
|
||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_SNATCH]+': '+ep_name)
|
||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_SNATCH] + ': ' + ep_name)
|
||||
|
||||
def notify_download(self, ep_name):
|
||||
if sickbeard.TWITTER_NOTIFY_ONDOWNLOAD:
|
||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_DOWNLOAD]+': '+ep_name)
|
||||
|
||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_DOWNLOAD] + ': ' + ep_name)
|
||||
|
||||
def notify_subtitle_download(self, ep_name, lang):
|
||||
if sickbeard.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]+' '+ep_name + ": " + lang)
|
||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD] + ' ' + ep_name + ": " + lang)
|
||||
|
||||
def test_notify(self):
|
||||
return self._notifyTwitter("This is a test notification from Sick Beard", force=True)
|
||||
|
||||
def _get_authorization(self):
|
||||
|
||||
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
|
||||
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
|
||||
oauth_client = oauth.Client(oauth_consumer)
|
||||
|
||||
|
||||
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
|
||||
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
|
||||
oauth_client = oauth.Client(oauth_consumer)
|
||||
|
||||
logger.log('Requesting temp token from Twitter')
|
||||
|
||||
|
||||
resp, content = oauth_client.request(self.REQUEST_TOKEN_URL, 'GET')
|
||||
|
||||
|
||||
if resp['status'] != '200':
|
||||
logger.log('Invalid respond from Twitter requesting temp token: %s' % resp['status'])
|
||||
else:
|
||||
request_token = dict(parse_qsl(content))
|
||||
|
||||
|
||||
sickbeard.TWITTER_USERNAME = request_token['oauth_token']
|
||||
sickbeard.TWITTER_PASSWORD = request_token['oauth_token_secret']
|
||||
|
||||
return self.AUTHORIZATION_URL+"?oauth_token="+ request_token['oauth_token']
|
||||
|
||||
|
||||
return self.AUTHORIZATION_URL + "?oauth_token=" + request_token['oauth_token']
|
||||
|
||||
def _get_credentials(self, key):
|
||||
request_token = {}
|
||||
|
||||
|
||||
request_token['oauth_token'] = sickbeard.TWITTER_USERNAME
|
||||
request_token['oauth_token_secret'] = sickbeard.TWITTER_PASSWORD
|
||||
request_token['oauth_callback_confirmed'] = 'true'
|
||||
|
||||
|
||||
token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
|
||||
token.set_verifier(key)
|
||||
|
||||
logger.log('Generating and signing request for an access token using key '+key)
|
||||
|
||||
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
|
||||
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
|
||||
logger.log('oauth_consumer: '+str(oauth_consumer))
|
||||
oauth_client = oauth.Client(oauth_consumer, token)
|
||||
logger.log('oauth_client: '+str(oauth_client))
|
||||
|
||||
logger.log('Generating and signing request for an access token using key ' + key)
|
||||
|
||||
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
|
||||
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
|
||||
logger.log('oauth_consumer: ' + str(oauth_consumer))
|
||||
oauth_client = oauth.Client(oauth_consumer, token)
|
||||
logger.log('oauth_client: ' + str(oauth_client))
|
||||
resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key)
|
||||
logger.log('resp, content: '+str(resp)+','+str(content))
|
||||
|
||||
access_token = dict(parse_qsl(content))
|
||||
logger.log('access_token: '+str(access_token))
|
||||
|
||||
logger.log('resp[status] = '+str(resp['status']))
|
||||
logger.log('resp, content: ' + str(resp) + ',' + str(content))
|
||||
|
||||
access_token = dict(parse_qsl(content))
|
||||
logger.log('access_token: ' + str(access_token))
|
||||
|
||||
logger.log('resp[status] = ' + str(resp['status']))
|
||||
if resp['status'] != '200':
|
||||
logger.log('The request for a token with did not succeed: '+str(resp['status']), logger.ERROR)
|
||||
logger.log('The request for a token with did not succeed: ' + str(resp['status']), logger.ERROR)
|
||||
return False
|
||||
else:
|
||||
logger.log('Your Twitter Access Token key: %s' % access_token['oauth_token'])
|
||||
|
@ -108,33 +108,34 @@ class TwitterNotifier:
|
|||
sickbeard.TWITTER_USERNAME = access_token['oauth_token']
|
||||
sickbeard.TWITTER_PASSWORD = access_token['oauth_token_secret']
|
||||
return True
|
||||
|
||||
|
||||
|
||||
|
||||
def _send_tweet(self, message=None):
|
||||
|
||||
username=self.consumer_key
|
||||
password=self.consumer_secret
|
||||
access_token_key=sickbeard.TWITTER_USERNAME
|
||||
access_token_secret=sickbeard.TWITTER_PASSWORD
|
||||
|
||||
logger.log(u"Sending tweet: "+message)
|
||||
|
||||
|
||||
username = self.consumer_key
|
||||
password = self.consumer_secret
|
||||
access_token_key = sickbeard.TWITTER_USERNAME
|
||||
access_token_secret = sickbeard.TWITTER_PASSWORD
|
||||
|
||||
logger.log(u"Sending tweet: " + message)
|
||||
|
||||
api = twitter.Api(username, password, access_token_key, access_token_secret)
|
||||
|
||||
|
||||
try:
|
||||
api.PostUpdate(message.encode('utf8'))
|
||||
except Exception, e:
|
||||
logger.log(u"Error Sending Tweet: "+ex(e), logger.ERROR)
|
||||
logger.log(u"Error Sending Tweet: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _notifyTwitter(self, message='', force=False):
|
||||
prefix = sickbeard.TWITTER_PREFIX
|
||||
|
||||
|
||||
if not sickbeard.USE_TWITTER and not force:
|
||||
return False
|
||||
|
||||
return self._send_tweet(prefix+": "+message)
|
||||
|
||||
return self._send_tweet(prefix + ": " + message)
|
||||
|
||||
|
||||
notifier = TwitterNotifier
|
|
@ -41,7 +41,6 @@ except ImportError:
|
|||
|
||||
|
||||
class XBMCNotifier:
|
||||
|
||||
sb_logo_url = 'http://www.sickbeard.com/xbmc-notify.png'
|
||||
|
||||
def _get_xbmc_version(self, host, username, password):
|
||||
|
@ -133,18 +132,22 @@ class XBMCNotifier:
|
|||
if xbmcapi:
|
||||
if (xbmcapi <= 4):
|
||||
logger.log(u"Detected XBMC version <= 11, using XBMC HTTP API", logger.DEBUG)
|
||||
command = {'command': 'ExecBuiltIn', 'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode("utf-8") + ')'}
|
||||
command = {'command': 'ExecBuiltIn',
|
||||
'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode(
|
||||
"utf-8") + ')'}
|
||||
notifyResult = self._send_to_xbmc(command, curHost, username, password)
|
||||
if notifyResult:
|
||||
result += curHost + ':' + str(notifyResult)
|
||||
else:
|
||||
logger.log(u"Detected XBMC version >= 12, using XBMC JSON API", logger.DEBUG)
|
||||
command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % (title.encode("utf-8"), message.encode("utf-8"), self.sb_logo_url)
|
||||
command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % (
|
||||
title.encode("utf-8"), message.encode("utf-8"), self.sb_logo_url)
|
||||
notifyResult = self._send_to_xbmc_json(command, curHost, username, password)
|
||||
if notifyResult:
|
||||
result += curHost + ':' + notifyResult["result"].decode(sickbeard.SYS_ENCODING)
|
||||
else:
|
||||
logger.log(u"Failed to detect XBMC version for '" + curHost + "', check configuration and try again.", logger.ERROR)
|
||||
logger.log(u"Failed to detect XBMC version for '" + curHost + "', check configuration and try again.",
|
||||
logger.ERROR)
|
||||
result += curHost + ':False'
|
||||
|
||||
return result
|
||||
|
@ -182,14 +185,15 @@ class XBMCNotifier:
|
|||
else:
|
||||
return True
|
||||
else:
|
||||
logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.", logger.DEBUG)
|
||||
logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.",
|
||||
logger.DEBUG)
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
##############################################################################
|
||||
# Legacy HTTP API (pre XBMC 12) methods
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
# Legacy HTTP API (pre XBMC 12) methods
|
||||
##############################################################################
|
||||
|
||||
def _send_to_xbmc(self, command, host=None, username=None, password=None):
|
||||
"""Handles communication to XBMC servers via HTTP API
|
||||
|
@ -242,7 +246,8 @@ class XBMCNotifier:
|
|||
return result
|
||||
|
||||
except (urllib2.URLError, IOError), e:
|
||||
logger.log(u"Warning: Couldn't contact XBMC HTTP at " + fixStupidEncodings(url) + " " + ex(e), logger.WARNING)
|
||||
logger.log(u"Warning: Couldn't contact XBMC HTTP at " + fixStupidEncodings(url) + " " + ex(e),
|
||||
logger.WARNING)
|
||||
return False
|
||||
|
||||
def _update_library(self, host=None, showName=None):
|
||||
|
@ -271,11 +276,12 @@ class XBMCNotifier:
|
|||
logger.log(u"Updating library in XBMC via HTTP method for show " + showName, logger.DEBUG)
|
||||
|
||||
pathSql = 'select path.strPath from path, tvshow, tvshowlinkpath where ' \
|
||||
'tvshow.c00 = "%s" and tvshowlinkpath.idShow = tvshow.idShow ' \
|
||||
'and tvshowlinkpath.idPath = path.idPath' % (showName)
|
||||
'tvshow.c00 = "%s" and tvshowlinkpath.idShow = tvshow.idShow ' \
|
||||
'and tvshowlinkpath.idPath = path.idPath' % (showName)
|
||||
|
||||
# use this to get xml back for the path lookups
|
||||
xmlCommand = {'command': 'SetResponseFormat(webheader;false;webfooter;false;header;<xml>;footer;</xml>;opentag;<tag>;closetag;</tag>;closefinaltag;false)'}
|
||||
xmlCommand = {
|
||||
'command': 'SetResponseFormat(webheader;false;webfooter;false;header;<xml>;footer;</xml>;opentag;<tag>;closetag;</tag>;closefinaltag;false)'}
|
||||
# sql used to grab path(s)
|
||||
sqlCommand = {'command': 'QueryVideoDatabase(%s)' % (pathSql)}
|
||||
# set output back to default
|
||||
|
@ -313,7 +319,8 @@ class XBMCNotifier:
|
|||
updateCommand = {'command': 'ExecBuiltIn', 'parameter': 'XBMC.updatelibrary(video, %s)' % (unEncPath)}
|
||||
request = self._send_to_xbmc(updateCommand, host)
|
||||
if not request:
|
||||
logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + unEncPath, logger.ERROR)
|
||||
logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + unEncPath,
|
||||
logger.ERROR)
|
||||
return False
|
||||
# sleep for a few seconds just to be sure xbmc has a chance to finish each directory
|
||||
if len(paths) > 1:
|
||||
|
@ -330,9 +337,9 @@ class XBMCNotifier:
|
|||
|
||||
return True
|
||||
|
||||
##############################################################################
|
||||
# JSON-RPC API (XBMC 12+) methods
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
# JSON-RPC API (XBMC 12+) methods
|
||||
##############################################################################
|
||||
|
||||
def _send_to_xbmc_json(self, command, host=None, username=None, password=None):
|
||||
"""Handles communication to XBMC servers via JSONRPC
|
||||
|
@ -377,7 +384,8 @@ class XBMCNotifier:
|
|||
try:
|
||||
response = urllib2.urlopen(req)
|
||||
except urllib2.URLError, e:
|
||||
logger.log(u"Error while trying to retrieve XBMC API version for " + host + ": " + ex(e), logger.WARNING)
|
||||
logger.log(u"Error while trying to retrieve XBMC API version for " + host + ": " + ex(e),
|
||||
logger.WARNING)
|
||||
return False
|
||||
|
||||
# parse the json result
|
||||
|
@ -385,13 +393,14 @@ class XBMCNotifier:
|
|||
result = json.load(response)
|
||||
response.close()
|
||||
logger.log(u"XBMC JSON response: " + str(result), logger.DEBUG)
|
||||
return result # need to return response for parsing
|
||||
return result # need to return response for parsing
|
||||
except ValueError, e:
|
||||
logger.log(u"Unable to decode JSON: " + response, logger.WARNING)
|
||||
return False
|
||||
|
||||
except IOError, e:
|
||||
logger.log(u"Warning: Couldn't contact XBMC JSON API at " + fixStupidEncodings(url) + " " + ex(e), logger.WARNING)
|
||||
logger.log(u"Warning: Couldn't contact XBMC JSON API at " + fixStupidEncodings(url) + " " + ex(e),
|
||||
logger.WARNING)
|
||||
return False
|
||||
|
||||
def _update_library_json(self, host=None, showName=None):
|
||||
|
@ -430,7 +439,7 @@ class XBMCNotifier:
|
|||
for show in shows:
|
||||
if (show["label"] == showName):
|
||||
tvshowid = show["tvshowid"]
|
||||
break # exit out of loop otherwise the label and showname will not match up
|
||||
break # exit out of loop otherwise the label and showname will not match up
|
||||
|
||||
# this can be big, so free some memory
|
||||
del shows
|
||||
|
@ -441,27 +450,34 @@ class XBMCNotifier:
|
|||
return False
|
||||
|
||||
# lookup tv-show path
|
||||
pathCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShowDetails","params":{"tvshowid":%d, "properties": ["file"]},"id":1}' % (tvshowid)
|
||||
pathCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShowDetails","params":{"tvshowid":%d, "properties": ["file"]},"id":1}' % (
|
||||
tvshowid)
|
||||
pathResponse = self._send_to_xbmc_json(pathCommand, host)
|
||||
|
||||
path = pathResponse["result"]["tvshowdetails"]["file"]
|
||||
logger.log(u"Received Show: " + show["label"] + " with ID: " + str(tvshowid) + " Path: " + path, logger.DEBUG)
|
||||
logger.log(u"Received Show: " + show["label"] + " with ID: " + str(tvshowid) + " Path: " + path,
|
||||
logger.DEBUG)
|
||||
|
||||
if (len(path) < 1):
|
||||
logger.log(u"No valid path found for " + showName + " with ID: " + str(tvshowid) + " on " + host, logger.WARNING)
|
||||
logger.log(u"No valid path found for " + showName + " with ID: " + str(tvshowid) + " on " + host,
|
||||
logger.WARNING)
|
||||
return False
|
||||
|
||||
logger.log(u"XBMC Updating " + showName + " on " + host + " at " + path, logger.DEBUG)
|
||||
updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % (json.dumps(path))
|
||||
updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % (
|
||||
json.dumps(path))
|
||||
request = self._send_to_xbmc_json(updateCommand, host)
|
||||
if not request:
|
||||
logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + path, logger.ERROR)
|
||||
logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + path,
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
# catch if there was an error in the returned request
|
||||
for r in request:
|
||||
if 'error' in r:
|
||||
logger.log(u"Error while attempting to update show directory for " + showName + " on " + host + " at " + path, logger.ERROR)
|
||||
logger.log(
|
||||
u"Error while attempting to update show directory for " + showName + " on " + host + " at " + path,
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
# do a full update if requested
|
||||
|
@ -476,9 +492,9 @@ class XBMCNotifier:
|
|||
|
||||
return True
|
||||
|
||||
##############################################################################
|
||||
# Public functions which will call the JSON or Legacy HTTP API methods
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
# Public functions which will call the JSON or Legacy HTTP API methods
|
||||
##############################################################################
|
||||
|
||||
def notify_snatch(self, ep_name):
|
||||
if sickbeard.XBMC_NOTIFY_ONSNATCH:
|
||||
|
@ -493,7 +509,8 @@ class XBMCNotifier:
|
|||
self._notify_xbmc(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD])
|
||||
|
||||
def test_notify(self, host, username, password):
|
||||
return self._notify_xbmc("Testing XBMC notifications from Sick Beard", "Test Notification", host, username, password, force=True)
|
||||
return self._notify_xbmc("Testing XBMC notifications from Sick Beard", "Test Notification", host, username,
|
||||
password, force=True)
|
||||
|
||||
def update_library(self, showName=None):
|
||||
"""Public wrapper for the update library functions to branch the logic for JSON-RPC or legacy HTTP API
|
||||
|
@ -521,10 +538,12 @@ class XBMCNotifier:
|
|||
for host in [x.strip() for x in sickbeard.XBMC_HOST.split(",")]:
|
||||
if self._send_update_library(host, showName):
|
||||
if sickbeard.XBMC_UPDATE_ONLYFIRST:
|
||||
logger.log(u"Successfully updated '" + host + "', stopped sending update library commands.", logger.DEBUG)
|
||||
logger.log(u"Successfully updated '" + host + "', stopped sending update library commands.",
|
||||
logger.DEBUG)
|
||||
return True
|
||||
else:
|
||||
logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.", logger.ERROR)
|
||||
logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.",
|
||||
logger.ERROR)
|
||||
result = result + 1
|
||||
|
||||
# needed for the 'update xbmc' submenu command
|
||||
|
@ -534,4 +553,5 @@ class XBMCNotifier:
|
|||
else:
|
||||
return False
|
||||
|
||||
|
||||
notifier = XBMCNotifier
|
||||
|
|
|
@ -32,7 +32,6 @@ from sickbeard.exceptions import ex
|
|||
|
||||
|
||||
def getSeasonNZBs(name, urlData, season):
|
||||
|
||||
try:
|
||||
showXML = etree.ElementTree(etree.XML(urlData))
|
||||
except SyntaxError:
|
||||
|
@ -78,7 +77,6 @@ def getSeasonNZBs(name, urlData, season):
|
|||
|
||||
|
||||
def createNZBString(fileElements, xmlns):
|
||||
|
||||
rootElement = etree.Element("nzb")
|
||||
if xmlns:
|
||||
rootElement.set("xmlns", xmlns)
|
||||
|
@ -90,7 +88,6 @@ def createNZBString(fileElements, xmlns):
|
|||
|
||||
|
||||
def saveNZB(nzbName, nzbString):
|
||||
|
||||
try:
|
||||
with ek.ek(open, nzbName + ".nzb", 'w') as nzb_fh:
|
||||
nzb_fh.write(nzbString)
|
||||
|
@ -108,7 +105,6 @@ def stripNS(element, ns):
|
|||
|
||||
|
||||
def splitResult(result):
|
||||
|
||||
urlData = helpers.getURL(result.url)
|
||||
|
||||
if urlData is None:
|
||||
|
@ -143,17 +139,23 @@ def splitResult(result):
|
|||
return False
|
||||
|
||||
# make sure the result is sane
|
||||
if (parse_result.season_number != None and parse_result.season_number != season) or (parse_result.season_number == None and season != 1):
|
||||
logger.log(u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it", logger.WARNING)
|
||||
if (parse_result.season_number != None and parse_result.season_number != season) or (
|
||||
parse_result.season_number == None and season != 1):
|
||||
logger.log(
|
||||
u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it",
|
||||
logger.WARNING)
|
||||
continue
|
||||
elif len(parse_result.episode_numbers) == 0:
|
||||
logger.log(u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it", logger.WARNING)
|
||||
logger.log(
|
||||
u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it",
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
wantEp = True
|
||||
for epNo in parse_result.episode_numbers:
|
||||
if not result.extraInfo[0].wantEpisode(season, epNo, result.quality):
|
||||
logger.log(u"Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality], logger.DEBUG)
|
||||
logger.log(u"Ignoring result " + newNZB + " because we don't want an episode that is " +
|
||||
Quality.qualityStrings[result.quality], logger.DEBUG)
|
||||
wantEp = False
|
||||
break
|
||||
if not wantEp:
|
||||
|
|
|
@ -33,8 +33,8 @@ from sickbeard import logger, helpers
|
|||
|
||||
from common import Quality
|
||||
|
||||
def sendNZB(nzb, proper = False):
|
||||
|
||||
def sendNZB(nzb, proper=False):
|
||||
addToTop = False
|
||||
nzbgetprio = 0
|
||||
nzbgetXMLrpc = "http://%(username)s:%(password)s@%(host)s/xmlrpc"
|
||||
|
@ -43,17 +43,21 @@ def sendNZB(nzb, proper = False):
|
|||
logger.log(u"No NZBget host found in configuration. Please configure it.", logger.ERROR)
|
||||
return False
|
||||
|
||||
url = nzbgetXMLrpc % {"host": sickbeard.NZBGET_HOST, "username": sickbeard.NZBGET_USERNAME, "password": sickbeard.NZBGET_PASSWORD}
|
||||
url = nzbgetXMLrpc % {"host": sickbeard.NZBGET_HOST, "username": sickbeard.NZBGET_USERNAME,
|
||||
"password": sickbeard.NZBGET_PASSWORD}
|
||||
|
||||
nzbGetRPC = xmlrpclib.ServerProxy(url)
|
||||
try:
|
||||
if nzbGetRPC.writelog("INFO", "Sickbeard connected to drop of %s any moment now." % (nzb.name + ".nzb")):
|
||||
logger.log(u"Successful connected to NZBget", logger.DEBUG)
|
||||
else:
|
||||
logger.log(u"Successful connected to NZBget, but unable to send a message" % (nzb.name + ".nzb"), logger.ERROR)
|
||||
logger.log(u"Successful connected to NZBget, but unable to send a message" % (nzb.name + ".nzb"),
|
||||
logger.ERROR)
|
||||
|
||||
except httplib.socket.error:
|
||||
logger.log(u"Please check your NZBget host and port (if it is running). NZBget is not responding to this combination", logger.ERROR)
|
||||
logger.log(
|
||||
u"Please check your NZBget host and port (if it is running). NZBget is not responding to this combination",
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
except xmlrpclib.ProtocolError, e:
|
||||
|
@ -102,10 +106,12 @@ def sendNZB(nzb, proper = False):
|
|||
if nzbget_version == 0:
|
||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, addToTop, nzbcontent64)
|
||||
elif nzbget_version >= 12:
|
||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False, nzbcontent64, False, dupekey, dupescore, "score")
|
||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False,
|
||||
nzbcontent64, False, dupekey, dupescore, "score")
|
||||
else:
|
||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False, nzbcontent64)
|
||||
|
||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False,
|
||||
nzbcontent64)
|
||||
|
||||
if nzbget_result:
|
||||
logger.log(u"NZB sent to NZBget successfully", logger.DEBUG)
|
||||
return True
|
||||
|
|
|
@ -46,8 +46,6 @@ from sickbeard.exceptions import ex
|
|||
|
||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
||||
|
||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
||||
from common import indexerStrings
|
||||
|
||||
class PostProcessor(object):
|
||||
"""
|
||||
|
@ -59,7 +57,7 @@ class PostProcessor(object):
|
|||
EXISTS_SMALLER = 3
|
||||
DOESNT_EXIST = 4
|
||||
|
||||
IGNORED_FILESTRINGS = [ "/.AppleDouble/", ".DS_Store" ]
|
||||
IGNORED_FILESTRINGS = ["/.AppleDouble/", ".DS_Store"]
|
||||
|
||||
NZB_NAME = 1
|
||||
FOLDER_NAME = 2
|
||||
|
@ -149,7 +147,8 @@ class PostProcessor(object):
|
|||
return PostProcessor.EXISTS_SMALLER
|
||||
|
||||
else:
|
||||
self._log(u"File " + existing_file + " doesn't exist so there's no worries about replacing it", logger.DEBUG)
|
||||
self._log(u"File " + existing_file + " doesn't exist so there's no worries about replacing it",
|
||||
logger.DEBUG)
|
||||
return PostProcessor.DOESNT_EXIST
|
||||
|
||||
def list_associated_files(self, file_path, base_name_only=False, subtitles_only=False):
|
||||
|
@ -185,7 +184,7 @@ class PostProcessor(object):
|
|||
if associated_file_path == file_path:
|
||||
continue
|
||||
# only list it if the only non-shared part is the extension or if it is a subtitle
|
||||
if subtitles_only and not associated_file_path[len(associated_file_path)-3:] in common.subtitleExtensions:
|
||||
if subtitles_only and not associated_file_path[len(associated_file_path) - 3:] in common.subtitleExtensions:
|
||||
continue
|
||||
|
||||
#Exclude .rar files from associated list
|
||||
|
@ -227,7 +226,7 @@ class PostProcessor(object):
|
|||
# File is read-only, so make it writeable
|
||||
self._log('Read only mode on file ' + cur_file + ' Will try to make it writeable', logger.DEBUG)
|
||||
try:
|
||||
ek.ek(os.chmod,cur_file,stat.S_IWRITE)
|
||||
ek.ek(os.chmod, cur_file, stat.S_IWRITE)
|
||||
except:
|
||||
self._log(u'Cannot change permissions of ' + cur_file, logger.WARNING)
|
||||
|
||||
|
@ -235,7 +234,8 @@ class PostProcessor(object):
|
|||
# do the library update for synoindex
|
||||
notifiers.synoindex_notifier.deleteFile(cur_file)
|
||||
|
||||
def _combined_file_operation (self, file_path, new_path, new_base_name, associated_files=False, action=None, subtitles=False):
|
||||
def _combined_file_operation(self, file_path, new_path, new_base_name, associated_files=False, action=None,
|
||||
subtitles=False):
|
||||
"""
|
||||
Performs a generic operation (move or copy) on a file. Can rename the file as well as change its location,
|
||||
and optionally move associated files too.
|
||||
|
@ -321,7 +321,8 @@ class PostProcessor(object):
|
|||
self._log("Unable to move file " + cur_file_path + " to " + new_file_path + ": " + str(e), logger.ERROR)
|
||||
raise e
|
||||
|
||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move, subtitles=subtitles)
|
||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move,
|
||||
subtitles=subtitles)
|
||||
|
||||
def _copy(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
||||
"""
|
||||
|
@ -331,7 +332,7 @@ class PostProcessor(object):
|
|||
associated_files: Boolean, whether we should copy similarly-named files too
|
||||
"""
|
||||
|
||||
def _int_copy (cur_file_path, new_file_path):
|
||||
def _int_copy(cur_file_path, new_file_path):
|
||||
|
||||
self._log(u"Copying file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
|
||||
try:
|
||||
|
@ -341,7 +342,8 @@ class PostProcessor(object):
|
|||
logger.log("Unable to copy file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
||||
raise e
|
||||
|
||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy, subtitles=subtitles)
|
||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy,
|
||||
subtitles=subtitles)
|
||||
|
||||
|
||||
def _hardlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
||||
|
@ -361,6 +363,7 @@ class PostProcessor(object):
|
|||
except (IOError, OSError), e:
|
||||
self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
||||
raise e
|
||||
|
||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_hard_link)
|
||||
|
||||
def _moveAndSymlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
||||
|
@ -380,7 +383,9 @@ class PostProcessor(object):
|
|||
except (IOError, OSError), e:
|
||||
self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
||||
raise e
|
||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move_and_sym_link)
|
||||
|
||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files,
|
||||
action=_int_move_and_sym_link)
|
||||
|
||||
def _history_lookup(self):
|
||||
"""
|
||||
|
@ -476,7 +481,8 @@ class PostProcessor(object):
|
|||
|
||||
# remember whether it's a proper
|
||||
if parse_result.extra_info:
|
||||
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None
|
||||
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info,
|
||||
re.I) != None
|
||||
|
||||
# if the result is complete then remember that for later
|
||||
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
|
||||
|
@ -488,9 +494,11 @@ class PostProcessor(object):
|
|||
elif test_name == self.file_name:
|
||||
self.good_results[self.FILE_NAME] = True
|
||||
else:
|
||||
logger.log(u"Nothing was good, found " + repr(test_name) + " and wanted either " + repr(self.nzb_name) + ", " + repr(self.folder_name) + ", or " + repr(self.file_name))
|
||||
logger.log(u"Nothing was good, found " + repr(test_name) + " and wanted either " + repr(
|
||||
self.nzb_name) + ", " + repr(self.folder_name) + ", or " + repr(self.file_name))
|
||||
else:
|
||||
logger.log(u"Parse result not sufficient(all following have to be set). Will not save release name", logger.DEBUG)
|
||||
logger.log(u"Parse result not sufficient(all following have to be set). Will not save release name",
|
||||
logger.DEBUG)
|
||||
logger.log("Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
|
||||
logger.log("Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
|
||||
logger.log("Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
|
||||
|
@ -507,7 +515,7 @@ class PostProcessor(object):
|
|||
|
||||
# see if we can find the name directly in the DB, if so use it
|
||||
for cur_name in name_list:
|
||||
self._log(u"Looking up " + cur_name +u" in the DB", logger.DEBUG)
|
||||
self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG)
|
||||
db_result = helpers.searchDBForShow(cur_name)
|
||||
if db_result:
|
||||
self._log(u"Lookup successful, using " + db_result[0] + " id " + str(db_result[1]), logger.DEBUG)
|
||||
|
@ -521,11 +529,12 @@ class PostProcessor(object):
|
|||
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
self._log(u"Looking up name " + cur_name + u" on " + self.indexer + "", logger.DEBUG)
|
||||
self._log(u"Looking up name " + cur_name + u" on " + sickbeard.indexerApi(self.indexer).name + "",
|
||||
logger.DEBUG)
|
||||
showObj = t[cur_name]
|
||||
except (indexer_exceptions.indexer_exception, IOError):
|
||||
except (sickbeard.indexer_exception, IOError):
|
||||
# if none found, search on all languages
|
||||
try:
|
||||
lINDEXER_API_PARMS = {'indexer': self.indexer}
|
||||
|
@ -533,18 +542,21 @@ class PostProcessor(object):
|
|||
lINDEXER_API_PARMS['search_all_languages'] = True
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
self._log(u"Looking up name " + cur_name + u" in all languages on " + self.indexer + "", logger.DEBUG)
|
||||
self._log(u"Looking up name " + cur_name + u" in all languages on " + sickbeard.indexerApi(
|
||||
self.indexer).name + "", logger.DEBUG)
|
||||
showObj = t[cur_name]
|
||||
except (indexer_exceptions.indexer_exception, IOError):
|
||||
except (sickbeard.indexer_exception, IOError):
|
||||
pass
|
||||
|
||||
continue
|
||||
except (IOError):
|
||||
continue
|
||||
|
||||
self._log(u"Lookup successful, using " + self.indexer + " id " + str(showObj["id"]), logger.DEBUG)
|
||||
self._log(
|
||||
u"Lookup successful, using " + sickbeard.indexerApi(self.indexer).name + " id " + str(showObj["id"]),
|
||||
logger.DEBUG)
|
||||
_finalize(parse_result)
|
||||
return (int(showObj["id"]), season, episodes)
|
||||
|
||||
|
@ -559,7 +571,7 @@ class PostProcessor(object):
|
|||
indexer_id = season = None
|
||||
episodes = []
|
||||
|
||||
# try to look up the nzb in history
|
||||
# try to look up the nzb in history
|
||||
attempt_list = [self._history_lookup,
|
||||
|
||||
# try to analyze the nzb name
|
||||
|
@ -577,7 +589,7 @@ class PostProcessor(object):
|
|||
# try to analyze the dir + file name together as one name
|
||||
lambda: self._analyze_name(self.folder_name + u' ' + self.file_name)
|
||||
|
||||
]
|
||||
]
|
||||
|
||||
# attempt every possible method to get our info
|
||||
for cur_attempt in attempt_list:
|
||||
|
@ -598,18 +610,19 @@ class PostProcessor(object):
|
|||
|
||||
# for air-by-date shows we need to look up the season/episode from tvdb
|
||||
if season == -1 and indexer_id and episodes:
|
||||
self._log(u"Looks like this is an air-by-date show, attempting to convert the date to season/episode", logger.DEBUG)
|
||||
self._log(u"Looks like this is an air-by-date show, attempting to convert the date to season/episode",
|
||||
logger.DEBUG)
|
||||
|
||||
# try to get language set for this show
|
||||
indexer_lang = None
|
||||
try:
|
||||
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
|
||||
if(showObj != None):
|
||||
if (showObj != None):
|
||||
# set the language of the show
|
||||
indexer_lang = showObj.lang
|
||||
self.indexer = showObj.indexer
|
||||
except exceptions.MultipleShowObjectsException:
|
||||
raise #TODO: later I'll just log this, for now I want to know about it ASAP
|
||||
raise #TODO: later I'll just log this, for now I want to know about it ASAP
|
||||
|
||||
try:
|
||||
lINDEXER_API_PARMS = {'indexer': self.indexer}
|
||||
|
@ -617,7 +630,7 @@ class PostProcessor(object):
|
|||
if indexer_lang and not indexer_lang == 'en':
|
||||
lINDEXER_API_PARMS = {'language': indexer_lang}
|
||||
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
epObj = t[indexer_id].airedOn(episodes[0])[0]
|
||||
|
||||
|
@ -625,22 +638,28 @@ class PostProcessor(object):
|
|||
episodes = [int(epObj["episodenumber"])]
|
||||
|
||||
self._log(u"Got season " + str(season) + " episodes " + str(episodes), logger.DEBUG)
|
||||
except indexer_exceptions.indexer_episodenotfound, e:
|
||||
self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(indexer_id) + u", skipping", logger.DEBUG)
|
||||
except sickbeard.indexer_episodenotfound, e:
|
||||
self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(
|
||||
indexer_id) + u", skipping", logger.DEBUG)
|
||||
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
|
||||
episodes = []
|
||||
continue
|
||||
except indexer_exceptions.indexer_error, e:
|
||||
logger.log(u"Unable to contact " + self.indexer + ": " + ex(e), logger.WARNING)
|
||||
except sickbeard.indexer_error, e:
|
||||
logger.log(u"Unable to contact " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e),
|
||||
logger.WARNING)
|
||||
episodes = []
|
||||
continue
|
||||
|
||||
# if there's no season then we can hopefully just use 1 automatically
|
||||
elif season == None and indexer_id:
|
||||
myDB = db.DBConnection()
|
||||
numseasonsSQlResult = myDB.select("SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", [indexer_id])
|
||||
numseasonsSQlResult = myDB.select(
|
||||
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0",
|
||||
[indexer_id])
|
||||
if int(numseasonsSQlResult[0][0]) == 1 and season == None:
|
||||
self._log(u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...", logger.DEBUG)
|
||||
self._log(
|
||||
u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...",
|
||||
logger.DEBUG)
|
||||
season = 1
|
||||
|
||||
if indexer_id and season != None and episodes:
|
||||
|
@ -667,11 +686,12 @@ class PostProcessor(object):
|
|||
try:
|
||||
show_obj = helpers.findCertainShow(sickbeard.showList, indexer_id)
|
||||
except exceptions.MultipleShowObjectsException:
|
||||
raise #TODO: later I'll just log this, for now I want to know about it ASAP
|
||||
raise #TODO: later I'll just log this, for now I want to know about it ASAP
|
||||
|
||||
# if we can't find the show then there's nothing we can really do
|
||||
if not show_obj:
|
||||
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode", logger.ERROR)
|
||||
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
|
||||
logger.ERROR)
|
||||
raise exceptions.PostProcessingFailed()
|
||||
|
||||
root_ep = None
|
||||
|
@ -687,7 +707,7 @@ class PostProcessor(object):
|
|||
except exceptions.EpisodeNotFoundException, e:
|
||||
self._log(u"Unable to create episode: " + ex(e), logger.DEBUG)
|
||||
raise exceptions.PostProcessingFailed()
|
||||
|
||||
|
||||
# associate all the episodes together under a single root episode
|
||||
if root_ep == None:
|
||||
root_ep = curEp
|
||||
|
@ -711,9 +731,11 @@ class PostProcessor(object):
|
|||
|
||||
# if there is a quality available in the status then we don't need to bother guessing from the filename
|
||||
if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST:
|
||||
oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) #@UnusedVariable
|
||||
oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) #@UnusedVariable
|
||||
if ep_quality != common.Quality.UNKNOWN:
|
||||
self._log(u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)
|
||||
self._log(
|
||||
u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality],
|
||||
logger.DEBUG)
|
||||
return ep_quality
|
||||
|
||||
# nzb name is the most reliable if it exists, followed by folder name and lastly file name
|
||||
|
@ -727,18 +749,24 @@ class PostProcessor(object):
|
|||
continue
|
||||
|
||||
ep_quality = common.Quality.nameQuality(cur_name)
|
||||
self._log(u"Looking up quality for name " + cur_name + u", got " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)
|
||||
self._log(
|
||||
u"Looking up quality for name " + cur_name + u", got " + common.Quality.qualityStrings[ep_quality],
|
||||
logger.DEBUG)
|
||||
|
||||
# if we find a good one then use it
|
||||
if ep_quality != common.Quality.UNKNOWN:
|
||||
logger.log(cur_name + u" looks like it has quality " + common.Quality.qualityStrings[ep_quality] + ", using that", logger.DEBUG)
|
||||
logger.log(cur_name + u" looks like it has quality " + common.Quality.qualityStrings[
|
||||
ep_quality] + ", using that", logger.DEBUG)
|
||||
return ep_quality
|
||||
|
||||
# if we didn't get a quality from one of the names above, try assuming from each of the names
|
||||
ep_quality = common.Quality.assumeQuality(self.file_name)
|
||||
self._log(u"Guessing quality for name " + self.file_name+u", got " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)
|
||||
self._log(
|
||||
u"Guessing quality for name " + self.file_name + u", got " + common.Quality.qualityStrings[ep_quality],
|
||||
logger.DEBUG)
|
||||
if ep_quality != common.Quality.UNKNOWN:
|
||||
logger.log(self.file_name + u" looks like it has quality " + common.Quality.qualityStrings[ep_quality] + ", using that", logger.DEBUG)
|
||||
logger.log(self.file_name + u" looks like it has quality " + common.Quality.qualityStrings[
|
||||
ep_quality] + ", using that", logger.DEBUG)
|
||||
return ep_quality
|
||||
|
||||
test = str(ep_quality)
|
||||
|
@ -757,13 +785,15 @@ class PostProcessor(object):
|
|||
script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0])
|
||||
self._log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)
|
||||
|
||||
script_cmd = script_cmd + [ep_obj.location, self.file_path, str(ep_obj.show.indexerid), str(ep_obj.season), str(ep_obj.episode), str(ep_obj.airdate)]
|
||||
script_cmd = script_cmd + [ep_obj.location, self.file_path, str(ep_obj.show.indexerid), str(ep_obj.season),
|
||||
str(ep_obj.episode), str(ep_obj.airdate)]
|
||||
|
||||
# use subprocess to run the command and capture output
|
||||
self._log(u"Executing command " + str(script_cmd))
|
||||
try:
|
||||
p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() # @UnusedVariable
|
||||
p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
||||
out, err = p.communicate() # @UnusedVariable
|
||||
self._log(u"Script result: " + str(out), logger.DEBUG)
|
||||
|
||||
except OSError, e:
|
||||
|
@ -795,12 +825,15 @@ class PostProcessor(object):
|
|||
|
||||
# if the user downloaded it manually and it's higher quality than the existing episode then it's priority
|
||||
if new_ep_quality > old_ep_quality and new_ep_quality != common.Quality.UNKNOWN:
|
||||
self._log(u"This was manually downloaded but it appears to be better quality than what we have so I'm marking it as priority", logger.DEBUG)
|
||||
self._log(
|
||||
u"This was manually downloaded but it appears to be better quality than what we have so I'm marking it as priority",
|
||||
logger.DEBUG)
|
||||
return True
|
||||
|
||||
# if the user downloaded it manually and it appears to be a PROPER/REPACK then it's priority
|
||||
if self.is_proper and new_ep_quality >= old_ep_quality and new_ep_quality != common.Quality.UNKNOWN:
|
||||
self._log(u"This was manually downloaded but it appears to be a proper so I'm marking it as priority", logger.DEBUG)
|
||||
self._log(u"This was manually downloaded but it appears to be a proper so I'm marking it as priority",
|
||||
logger.DEBUG)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
@ -825,7 +858,7 @@ class PostProcessor(object):
|
|||
# try to find the file info
|
||||
indexer_id = season = episodes = None
|
||||
if 'auto' in self.indexer:
|
||||
for indexer in indexerStrings:
|
||||
for indexer in sickbeard.indexerApi().indexers:
|
||||
self.indexer = indexer
|
||||
|
||||
# try to find the file info
|
||||
|
@ -833,7 +866,8 @@ class PostProcessor(object):
|
|||
if indexer_id and season != None and episodes:
|
||||
break
|
||||
|
||||
self._log(u"Can't find show on " + self.indexer + ", auto trying next indexer in list", logger.WARNING)
|
||||
self._log(u"Can't find show on " + sickbeard.indexerApi(
|
||||
self.indexer).name + ", auto trying next indexer in list", logger.WARNING)
|
||||
else:
|
||||
(indexer_id, season, episodes) = self._find_info()
|
||||
|
||||
|
@ -865,17 +899,22 @@ class PostProcessor(object):
|
|||
|
||||
# if there's an existing file that we don't want to replace stop here
|
||||
if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME):
|
||||
self._log(u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.ERROR)
|
||||
self._log(
|
||||
u"File exists and we are not going to replace it because it's not smaller, quitting post-processing",
|
||||
logger.ERROR)
|
||||
return False
|
||||
elif existing_file_status == PostProcessor.EXISTS_SMALLER:
|
||||
self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG)
|
||||
elif existing_file_status != PostProcessor.DOESNT_EXIST:
|
||||
self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR)
|
||||
self._log(u"Unknown existing file status. This should never happen, please log this as a bug.",
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
# if the file is priority then we're going to replace it even if it exists
|
||||
else:
|
||||
self._log(u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG)
|
||||
self._log(
|
||||
u"This download is marked a priority download so I'm going to replace an existing file if I find one",
|
||||
logger.DEBUG)
|
||||
|
||||
# delete the existing file (and company)
|
||||
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
||||
|
@ -883,7 +922,8 @@ class PostProcessor(object):
|
|||
self._delete(cur_ep.location, associated_files=True)
|
||||
# clean up any left over folders
|
||||
if cur_ep.location:
|
||||
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location)
|
||||
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location),
|
||||
keep_dir=ep_obj.show._location)
|
||||
except (OSError, IOError):
|
||||
raise exceptions.PostProcessingFailed("Unable to delete the existing files")
|
||||
|
||||
|
@ -953,7 +993,8 @@ class PostProcessor(object):
|
|||
|
||||
dest_path = ek.ek(os.path.dirname, proper_absolute_path)
|
||||
except exceptions.ShowDirNotFoundException:
|
||||
raise exceptions.PostProcessingFailed(u"Unable to post-process an episode if the show dir doesn't exist, quitting")
|
||||
raise exceptions.PostProcessingFailed(
|
||||
u"Unable to post-process an episode if the show dir doesn't exist, quitting")
|
||||
|
||||
self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG)
|
||||
|
||||
|
@ -974,16 +1015,20 @@ class PostProcessor(object):
|
|||
try:
|
||||
# move the episode and associated files to the show dir
|
||||
if self.process_method == "copy":
|
||||
self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||
self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
||||
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||
elif self.process_method == "move":
|
||||
self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||
self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
||||
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||
elif self.process_method == "hardlink":
|
||||
self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||
self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
||||
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||
elif self.process_method == "symlink":
|
||||
self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||
self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
||||
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||
else:
|
||||
logger.log(u"Unknown process method: " + sickbeard.PROCESS_METHOD, logger.ERROR)
|
||||
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
|
||||
logger.log(u"Unknown process method: " + sickbeard.PROCESS_METHOD, logger.ERROR)
|
||||
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
|
||||
except (OSError, IOError):
|
||||
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
|
||||
|
||||
|
|
|
@ -36,11 +36,14 @@ from sickbeard import failedProcessor
|
|||
from lib.unrar2 import RarFile, RarInfo
|
||||
from lib.unrar2.rar_exceptions import *
|
||||
|
||||
def logHelper (logMessage, logLevel=logger.MESSAGE):
|
||||
|
||||
def logHelper(logMessage, logLevel=logger.MESSAGE):
|
||||
logger.log(logMessage, logLevel)
|
||||
return logMessage + u"\n"
|
||||
|
||||
def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, failed=False, type="auto", indexer="auto"):
|
||||
|
||||
def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, failed=False, type="auto",
|
||||
indexer="auto"):
|
||||
"""
|
||||
Scans through the files in dirName and processes whatever media files it finds
|
||||
|
||||
|
@ -72,7 +75,9 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
|
|||
|
||||
# if we didn't find a real dir then quit
|
||||
if not ek.ek(os.path.isdir, dirName):
|
||||
returnStr += logHelper(u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.", logger.DEBUG)
|
||||
returnStr += logHelper(
|
||||
u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.",
|
||||
logger.DEBUG)
|
||||
return returnStr
|
||||
|
||||
path, dirs, files = get_path_dir_files(dirName, nzbName, type)
|
||||
|
@ -128,26 +133,28 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
|
|||
#Don't Link media when the media is extracted from a rar in the same path
|
||||
if process_method in ('hardlink', 'symlink') and videoInRar:
|
||||
process_media(processPath, videoInRar, nzbName, 'move', force, is_priority, indexer)
|
||||
process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force, is_priority, indexer)
|
||||
process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force,
|
||||
is_priority, indexer)
|
||||
delete_files(processPath, rarContent)
|
||||
else:
|
||||
process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, indexer)
|
||||
|
||||
#Delete all file not needed
|
||||
if process_method != "move" or not process_result \
|
||||
or type=="manual": #Avoid to delete files if is Manual PostProcessing
|
||||
or type == "manual": #Avoid to delete files if is Manual PostProcessing
|
||||
continue
|
||||
|
||||
delete_files(processPath, notwantedFiles)
|
||||
|
||||
if process_method == "move" and \
|
||||
ek.ek(os.path.normpath, processPath) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
|
||||
ek.ek(os.path.normpath, processPath) != ek.ek(os.path.normpath,
|
||||
sickbeard.TV_DOWNLOAD_DIR):
|
||||
delete_dir(processPath)
|
||||
|
||||
return returnStr
|
||||
|
||||
def validateDir(path, dirName, nzbNameOriginal, failed):
|
||||
|
||||
def validateDir(path, dirName, nzbNameOriginal, failed):
|
||||
global process_result, returnStr
|
||||
|
||||
returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)
|
||||
|
@ -156,10 +163,12 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
|
|||
returnStr += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG)
|
||||
failed = True
|
||||
elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'):
|
||||
returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG)
|
||||
returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized.",
|
||||
logger.DEBUG)
|
||||
failed = True
|
||||
elif ek.ek(os.path.basename, dirName).startswith('_UNPACK_'):
|
||||
returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG)
|
||||
returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked.",
|
||||
logger.DEBUG)
|
||||
|
||||
if failed:
|
||||
process_failed(os.path.join(path, dirName), nzbNameOriginal)
|
||||
|
@ -169,8 +178,12 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
|
|||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows")
|
||||
for sqlShow in sqlResults:
|
||||
if dirName.lower().startswith(ek.ek(os.path.realpath, sqlShow["location"]).lower()+os.sep) or dirName.lower() == ek.ek(os.path.realpath, sqlShow["location"]).lower():
|
||||
returnStr += logHelper(u"You're trying to post process an episode that's already been moved to its show dir, skipping", logger.ERROR)
|
||||
if dirName.lower().startswith(
|
||||
ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek(
|
||||
os.path.realpath, sqlShow["location"]).lower():
|
||||
returnStr += logHelper(
|
||||
u"You're trying to post process an episode that's already been moved to its show dir, skipping",
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
# Get the videofile list for the next checks
|
||||
|
@ -211,8 +224,8 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
|
|||
|
||||
return False
|
||||
|
||||
def unRAR(path, rarFiles, force):
|
||||
|
||||
def unRAR(path, rarFiles, force):
|
||||
global process_result, returnStr
|
||||
|
||||
unpacked_files = []
|
||||
|
@ -232,27 +245,29 @@ def unRAR(path, rarFiles, force):
|
|||
skip_file = False
|
||||
for file_in_archive in [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]:
|
||||
if already_postprocessed(path, file_in_archive, force):
|
||||
returnStr += logHelper(u"Archive file already post-processed, extraction skipped: " + file_in_archive, logger.DEBUG)
|
||||
returnStr += logHelper(
|
||||
u"Archive file already post-processed, extraction skipped: " + file_in_archive,
|
||||
logger.DEBUG)
|
||||
skip_file = True
|
||||
break
|
||||
|
||||
if skip_file:
|
||||
continue
|
||||
|
||||
rar_handle.extract(path = path, withSubpath = False, overwrite = False)
|
||||
rar_handle.extract(path=path, withSubpath=False, overwrite=False)
|
||||
unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]
|
||||
del rar_handle
|
||||
except Exception, e:
|
||||
returnStr += logHelper(u"Failed Unrar archive " + archive + ': ' + ex(e), logger.ERROR)
|
||||
process_result = False
|
||||
continue
|
||||
returnStr += logHelper(u"Failed Unrar archive " + archive + ': ' + ex(e), logger.ERROR)
|
||||
process_result = False
|
||||
continue
|
||||
|
||||
returnStr += logHelper(u"UnRar content: " + str(unpacked_files), logger.DEBUG)
|
||||
|
||||
return unpacked_files
|
||||
|
||||
def already_postprocessed(dirName, videofile, force):
|
||||
|
||||
def already_postprocessed(dirName, videofile, force):
|
||||
global returnStr
|
||||
|
||||
if force:
|
||||
|
@ -266,7 +281,8 @@ def already_postprocessed(dirName, videofile, force):
|
|||
myDB = db.DBConnection()
|
||||
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName])
|
||||
if sqlResult:
|
||||
returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping", logger.DEBUG)
|
||||
returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping",
|
||||
logger.DEBUG)
|
||||
return True
|
||||
|
||||
# This is needed for video whose name differ from dirName
|
||||
|
@ -275,7 +291,8 @@ def already_postprocessed(dirName, videofile, force):
|
|||
|
||||
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]])
|
||||
if sqlResult:
|
||||
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping", logger.DEBUG)
|
||||
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
|
||||
logger.DEBUG)
|
||||
return True
|
||||
|
||||
#Needed if we have downloaded the same episode @ different quality
|
||||
|
@ -285,13 +302,14 @@ def already_postprocessed(dirName, videofile, force):
|
|||
search_sql += " and history.resource LIKE ?"
|
||||
sqlResult = myDB.select(search_sql, [u'%' + videofile])
|
||||
if sqlResult:
|
||||
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping", logger.DEBUG)
|
||||
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
|
||||
logger.DEBUG)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, indexer):
|
||||
|
||||
def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, indexer):
|
||||
global process_result, returnStr
|
||||
|
||||
for cur_video_file in videoFiles:
|
||||
|
@ -314,14 +332,15 @@ def process_media(processPath, videoFiles, nzbName, process_method, force, is_pr
|
|||
if process_result:
|
||||
returnStr += logHelper(u"Processing succeeded for " + cur_video_file_path)
|
||||
else:
|
||||
returnStr += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message, logger.WARNING)
|
||||
returnStr += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message,
|
||||
logger.WARNING)
|
||||
|
||||
#If something fail abort the processing on dir
|
||||
if not process_result:
|
||||
break
|
||||
|
||||
def delete_files(processPath, notwantedFiles):
|
||||
|
||||
def delete_files(processPath, notwantedFiles):
|
||||
global returnStr, process_result
|
||||
|
||||
if not process_result:
|
||||
|
@ -333,30 +352,32 @@ def delete_files(processPath, notwantedFiles):
|
|||
cur_file_path = ek.ek(os.path.join, processPath, cur_file)
|
||||
|
||||
if not ek.ek(os.path.isfile, cur_file_path):
|
||||
continue #Prevent error when a notwantedfiles is an associated files
|
||||
continue #Prevent error when a notwantedfiles is an associated files
|
||||
|
||||
returnStr += logHelper(u"Deleting file " + cur_file, logger.DEBUG)
|
||||
|
||||
#check first the read-only attribute
|
||||
#check first the read-only attribute
|
||||
file_attribute = ek.ek(os.stat, cur_file_path)[0]
|
||||
if (not file_attribute & stat.S_IWRITE):
|
||||
# File is read-only, so make it writeable
|
||||
returnStr += logHelper(u"Changing ReadOnly Flag for file " + cur_file, logger.DEBUG)
|
||||
try:
|
||||
ek.ek(os.chmod,cur_file_path,stat.S_IWRITE)
|
||||
ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
|
||||
except OSError, e:
|
||||
returnStr += logHelper(u"Cannot change permissions of " + cur_file_path + ': ' + e.strerror, logger.DEBUG)
|
||||
returnStr += logHelper(u"Cannot change permissions of " + cur_file_path + ': ' + e.strerror,
|
||||
logger.DEBUG)
|
||||
try:
|
||||
ek.ek(os.remove, cur_file_path)
|
||||
except OSError, e:
|
||||
returnStr += logHelper(u"Unable to delete file " + cur_file + ': ' + e.strerror, logger.DEBUG)
|
||||
|
||||
def delete_dir(processPath):
|
||||
|
||||
def delete_dir(processPath):
|
||||
global returnStr
|
||||
|
||||
if not ek.ek(os.listdir, processPath) == []:
|
||||
returnStr += logHelper(u"Skipping Deleting folder " + processPath + ' because some files was not deleted/processed', logger.DEBUG)
|
||||
returnStr += logHelper(
|
||||
u"Skipping Deleting folder " + processPath + ' because some files was not deleted/processed', logger.DEBUG)
|
||||
return
|
||||
|
||||
returnStr += logHelper(u"Deleting folder " + processPath, logger.DEBUG)
|
||||
|
@ -366,15 +387,16 @@ def delete_dir(processPath):
|
|||
except (OSError, IOError), e:
|
||||
returnStr += logHelper(u"Warning: unable to remove the folder " + processPath + ": " + ex(e), logger.WARNING)
|
||||
|
||||
def get_path_dir_files(dirName, nzbName, type):
|
||||
|
||||
if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or type =="manual": #Scheduled Post Processing Active
|
||||
def get_path_dir_files(dirName, nzbName, type):
|
||||
if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or type == "manual": #Scheduled Post Processing Active
|
||||
#Get at first all the subdir in the dirName
|
||||
for path, dirs, files in ek.ek(os.walk, dirName):
|
||||
break
|
||||
else:
|
||||
path, dirs = ek.ek(os.path.split, dirName) #Script Post Processing
|
||||
if not nzbName is None and not nzbName.endswith('.nzb') and os.path.isfile(os.path.join(dirName, nzbName)): #For single torrent file without Dir
|
||||
path, dirs = ek.ek(os.path.split, dirName) #Script Post Processing
|
||||
if not nzbName is None and not nzbName.endswith('.nzb') and os.path.isfile(
|
||||
os.path.join(dirName, nzbName)): #For single torrent file without Dir
|
||||
dirs = []
|
||||
files = [os.path.join(dirName, nzbName)]
|
||||
else:
|
||||
|
@ -383,6 +405,7 @@ def get_path_dir_files(dirName, nzbName, type):
|
|||
|
||||
return path, dirs, files
|
||||
|
||||
|
||||
def process_failed(dirName, nzbName):
|
||||
"""Process a download that did not complete correctly"""
|
||||
|
||||
|
@ -405,4 +428,6 @@ def process_failed(dirName, nzbName):
|
|||
if process_result:
|
||||
returnStr += logHelper(u"Failed Download Processing succeeded: (" + str(nzbName) + ", " + dirName + ")")
|
||||
else:
|
||||
returnStr += logHelper(u"Failed Download Processing failed: (" + str(nzbName) + ", " + dirName + "): " + process_fail_message, logger.WARNING)
|
||||
returnStr += logHelper(
|
||||
u"Failed Download Processing failed: (" + str(nzbName) + ", " + dirName + "): " + process_fail_message,
|
||||
logger.WARNING)
|
||||
|
|
|
@ -31,14 +31,10 @@ from sickbeard import history
|
|||
|
||||
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality
|
||||
|
||||
|
||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
||||
|
||||
from name_parser.parser import NameParser, InvalidNameException
|
||||
|
||||
|
||||
class ProperFinder():
|
||||
|
||||
def __init__(self):
|
||||
self.updateInterval = datetime.timedelta(hours=1)
|
||||
|
||||
|
@ -56,7 +52,7 @@ class ProperFinder():
|
|||
dayDiff = (datetime.date.today() - self._get_lastProperSearch()).days
|
||||
|
||||
# if it's less than an interval after the update time then do an update
|
||||
if hourDiff >= 0 and hourDiff < self.updateInterval.seconds / 3600 or dayDiff >=1:
|
||||
if hourDiff >= 0 and hourDiff < self.updateInterval.seconds / 3600 or dayDiff >= 1:
|
||||
logger.log(u"Beginning the search for new propers")
|
||||
else:
|
||||
return
|
||||
|
@ -64,7 +60,7 @@ class ProperFinder():
|
|||
propers = self._getProperList()
|
||||
|
||||
self._downloadPropers(propers)
|
||||
|
||||
|
||||
self._set_lastProperSearch(datetime.datetime.today().toordinal())
|
||||
|
||||
def _getProperList(self):
|
||||
|
@ -110,7 +106,9 @@ class ProperFinder():
|
|||
continue
|
||||
|
||||
if not parse_result.episode_numbers:
|
||||
logger.log(u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG)
|
||||
logger.log(
|
||||
u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
# populate our Proper instance
|
||||
|
@ -138,7 +136,9 @@ class ProperFinder():
|
|||
|
||||
# if it matches
|
||||
if genericName == self._genericName(curSceneName):
|
||||
logger.log(u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name, logger.DEBUG)
|
||||
logger.log(
|
||||
u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name,
|
||||
logger.DEBUG)
|
||||
|
||||
# set the indexerid in the db to the show's indexerid
|
||||
curProper.indexerid = curShow.indexerid
|
||||
|
@ -157,7 +157,8 @@ class ProperFinder():
|
|||
continue
|
||||
|
||||
if not show_name_helpers.filterBadReleases(curProper.name):
|
||||
logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", logger.DEBUG)
|
||||
logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
# if we have an air-by-date show then get the real season/episode numbers
|
||||
|
@ -175,18 +176,21 @@ class ProperFinder():
|
|||
lINDEXER_API_PARMS['language'] = indexer_lang
|
||||
|
||||
try:
|
||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
||||
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||
|
||||
epObj = t[curProper.indexerid].airedOn(curProper.episode)[0]
|
||||
|
||||
curProper.season = int(epObj["seasonnumber"])
|
||||
curProper.episodes = [int(epObj["episodenumber"])]
|
||||
except indexer_exceptions.indexer_episodenotfound:
|
||||
logger.log(u"Unable to find episode with date " + str(curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING)
|
||||
except sickbeard.indexer_episodenotfound:
|
||||
logger.log(u"Unable to find episode with date " + str(
|
||||
curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING)
|
||||
continue
|
||||
|
||||
# check if we actually want this proper (if it's the right quality)
|
||||
sqlResults = db.DBConnection().select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.indexerid, curProper.season, curProper.episode])
|
||||
sqlResults = db.DBConnection().select(
|
||||
"SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[curProper.indexerid, curProper.season, curProper.episode])
|
||||
if not sqlResults:
|
||||
continue
|
||||
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
|
||||
|
@ -196,7 +200,8 @@ class ProperFinder():
|
|||
continue
|
||||
|
||||
# if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
|
||||
if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map(operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
|
||||
if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map(
|
||||
operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
|
||||
logger.log(u"Found a proper that we need: " + str(curProper.name))
|
||||
finalPropers.append(curProper)
|
||||
|
||||
|
@ -214,11 +219,13 @@ class ProperFinder():
|
|||
"SELECT resource FROM history "
|
||||
"WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? "
|
||||
"AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED]) + ")",
|
||||
[curProper.indexerid, curProper.season, curProper.episode, curProper.quality, historyLimit.strftime(history.dateFormat)])
|
||||
[curProper.indexerid, curProper.season, curProper.episode, curProper.quality,
|
||||
historyLimit.strftime(history.dateFormat)])
|
||||
|
||||
# if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it
|
||||
if len(historyResults) == 0:
|
||||
logger.log(u"Unable to find an original history entry for proper " + curProper.name + " so I'm not downloading it.")
|
||||
logger.log(
|
||||
u"Unable to find an original history entry for proper " + curProper.name + " so I'm not downloading it.")
|
||||
continue
|
||||
|
||||
else:
|
||||
|
@ -237,7 +244,8 @@ class ProperFinder():
|
|||
# get the episode object
|
||||
showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid)
|
||||
if showObj == None:
|
||||
logger.log(u"Unable to find the show with indexerid " + str(curProper.indexerid) + " so unable to download the proper", logger.ERROR)
|
||||
logger.log(u"Unable to find the show with indexerid " + str(
|
||||
curProper.indexerid) + " so unable to download the proper", logger.ERROR)
|
||||
continue
|
||||
epObj = showObj.getEpisode(curProper.season, curProper.episode)
|
||||
|
||||
|
@ -263,7 +271,8 @@ class ProperFinder():
|
|||
sqlResults = myDB.select("SELECT * FROM info")
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
myDB.action("INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)", [0, 0, str(when)])
|
||||
myDB.action("INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)",
|
||||
[0, 0, str(when)])
|
||||
else:
|
||||
myDB.action("UPDATE info SET last_proper_search=" + str(when))
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ __all__ = ['ezrss',
|
|||
'btn',
|
||||
'thepiratebay',
|
||||
'kat',
|
||||
'publichd',
|
||||
'publichd',
|
||||
'torrentleech',
|
||||
'scc',
|
||||
'hdtorrents',
|
||||
|
@ -30,16 +30,16 @@ __all__ = ['ezrss',
|
|||
'hdbits',
|
||||
'iptorrents',
|
||||
'omgwtfnzbs',
|
||||
'nextgen'
|
||||
]
|
||||
'nextgen'
|
||||
]
|
||||
|
||||
import sickbeard
|
||||
from sickbeard import logger
|
||||
|
||||
from os import sys
|
||||
|
||||
def sortedProviderList():
|
||||
|
||||
def sortedProviderList():
|
||||
initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList
|
||||
providerDict = dict(zip([x.getID() for x in initialList], initialList))
|
||||
|
||||
|
@ -57,12 +57,12 @@ def sortedProviderList():
|
|||
|
||||
return newList
|
||||
|
||||
def makeProviderList():
|
||||
|
||||
def makeProviderList():
|
||||
return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
|
||||
|
||||
def getNewznabProviderList(data):
|
||||
|
||||
def getNewznabProviderList(data):
|
||||
defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')]
|
||||
providerList = filter(lambda x: x, [makeNewznabProvider(x) for x in data.split('!!!')])
|
||||
|
||||
|
@ -80,12 +80,11 @@ def getNewznabProviderList(data):
|
|||
providerDict[curDefault.name].name = curDefault.name
|
||||
providerDict[curDefault.name].url = curDefault.url
|
||||
providerDict[curDefault.name].needs_auth = curDefault.needs_auth
|
||||
|
||||
|
||||
return filter(lambda x: x, providerList)
|
||||
|
||||
|
||||
def makeNewznabProvider(configString):
|
||||
|
||||
if not configString:
|
||||
return None
|
||||
|
||||
|
@ -102,12 +101,13 @@ def makeNewznabProvider(configString):
|
|||
|
||||
return newProvider
|
||||
|
||||
|
||||
def getTorrentRssProviderList(data):
|
||||
providerList = filter(lambda x: x, [makeTorrentRssProvider(x) for x in data.split('!!!')])
|
||||
return filter(lambda x: x, providerList)
|
||||
|
||||
def makeTorrentRssProvider(configString):
|
||||
|
||||
def makeTorrentRssProvider(configString):
|
||||
if not configString:
|
||||
return None
|
||||
|
||||
|
@ -120,20 +120,24 @@ def makeTorrentRssProvider(configString):
|
|||
|
||||
return newProvider
|
||||
|
||||
|
||||
def getDefaultNewznabProviders():
|
||||
return 'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040,5060|0!!!NZBs.org|http://nzbs.org/||5030,5040,5060,5070,5090|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0'
|
||||
|
||||
|
||||
def getProviderModule(name):
|
||||
name = name.lower()
|
||||
prefix = "sickbeard.providers."
|
||||
if name in __all__ and prefix+name in sys.modules:
|
||||
return sys.modules[prefix+name]
|
||||
if name in __all__ and prefix + name in sys.modules:
|
||||
return sys.modules[prefix + name]
|
||||
else:
|
||||
raise Exception("Can't find " + prefix+name + " in " + "Providers")
|
||||
raise Exception("Can't find " + prefix + name + " in " + "Providers")
|
||||
|
||||
|
||||
def getProviderClass(id):
|
||||
|
||||
providerMatch = [x for x in sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if x.getID() == id]
|
||||
providerMatch = [x for x in
|
||||
sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if
|
||||
x.getID() == id]
|
||||
|
||||
if len(providerMatch) != 1:
|
||||
return None
|
||||
|
|
|
@ -36,7 +36,6 @@ import math
|
|||
|
||||
|
||||
class BTNProvider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
generic.TorrentProvider.__init__(self, "BTN")
|
||||
|
@ -64,8 +63,10 @@ class BTNProvider(generic.TorrentProvider):
|
|||
return self._checkAuth()
|
||||
|
||||
if 'api-error' in parsedJSON:
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['api-error'], logger.DEBUG)
|
||||
raise AuthException("Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['api-error'],
|
||||
logger.DEBUG)
|
||||
raise AuthException(
|
||||
"Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||
|
||||
return True
|
||||
|
||||
|
@ -150,7 +151,7 @@ class BTNProvider(generic.TorrentProvider):
|
|||
|
||||
except Exception, error:
|
||||
errorstring = str(error)
|
||||
if(errorstring.startswith('<') and errorstring.endswith('>')):
|
||||
if (errorstring.startswith('<') and errorstring.endswith('>')):
|
||||
errorstring = errorstring[1:-1]
|
||||
logger.log(u"Unknown error while accessing " + self.name + ": " + errorstring, logger.ERROR)
|
||||
|
||||
|
@ -296,7 +297,6 @@ class BTNProvider(generic.TorrentProvider):
|
|||
|
||||
|
||||
class BTNCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
||||
|
@ -328,13 +328,14 @@ class BTNCache(tvcache.TVCache):
|
|||
ci = self._parseItem(item)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
|
||||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
else:
|
||||
raise AuthException("Your authentication info for " + self.provider.name + " is incorrect, check your config")
|
||||
raise AuthException(
|
||||
"Your authentication info for " + self.provider.name + " is incorrect, check your config")
|
||||
|
||||
else:
|
||||
return []
|
||||
|
@ -350,7 +351,9 @@ class BTNCache(tvcache.TVCache):
|
|||
|
||||
# Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, older things will need to be done through backlog
|
||||
if seconds_since_last_update > 86400:
|
||||
logger.log(u"The last known successful update on " + self.provider.name + " was more than 24 hours ago, only trying to fetch the last 24 hours!", logger.WARNING)
|
||||
logger.log(
|
||||
u"The last known successful update on " + self.provider.name + " was more than 24 hours ago, only trying to fetch the last 24 hours!",
|
||||
logger.WARNING)
|
||||
seconds_since_last_update = 86400
|
||||
|
||||
data = self.provider._doSearch(search_params=None, age=seconds_since_last_update)
|
||||
|
@ -364,10 +367,12 @@ class BTNCache(tvcache.TVCache):
|
|||
logger.log(u"Adding item to results: " + title, logger.DEBUG)
|
||||
return self._addCacheEntry(title, url)
|
||||
else:
|
||||
logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", logger.ERROR)
|
||||
logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable",
|
||||
logger.ERROR)
|
||||
return None
|
||||
|
||||
def _checkAuth(self, data):
|
||||
return self.provider._checkAuthFromData(data)
|
||||
|
||||
|
||||
provider = BTNProvider()
|
||||
|
|
|
@ -31,8 +31,8 @@ from sickbeard.helpers import sanitizeSceneName, get_xml_text
|
|||
from sickbeard import show_name_helpers
|
||||
from sickbeard.exceptions import ex
|
||||
|
||||
class DTTProvider(generic.TorrentProvider):
|
||||
|
||||
class DTTProvider(generic.TorrentProvider):
|
||||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, "DailyTvTorrents")
|
||||
self.supportsBacklog = True
|
||||
|
@ -41,45 +41,45 @@ class DTTProvider(generic.TorrentProvider):
|
|||
|
||||
def isEnabled(self):
|
||||
return sickbeard.DTT
|
||||
|
||||
|
||||
def imageName(self):
|
||||
return 'dailytvtorrents.gif'
|
||||
|
||||
|
||||
def getQuality(self, item):
|
||||
url = item.getElementsByTagName('enclosure')[0].getAttribute('url')
|
||||
quality = Quality.sceneQuality(url)
|
||||
return quality
|
||||
|
||||
def findSeasonResults(self, show, season):
|
||||
|
||||
|
||||
return generic.TorrentProvider.findSeasonResults(self, show, season)
|
||||
|
||||
|
||||
def _dtt_show_id(self, show_name):
|
||||
return sanitizeSceneName(show_name).replace('.','-').lower()
|
||||
return sanitizeSceneName(show_name).replace('.', '-').lower()
|
||||
|
||||
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
||||
search_string = []
|
||||
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
||||
show_string = sanitizeSceneName(show_name).replace('.','-').lower()
|
||||
show_string = sanitizeSceneName(show_name).replace('.', '-').lower()
|
||||
search_string.append(show_string)
|
||||
|
||||
return search_string
|
||||
|
||||
|
||||
def _get_episode_search_strings(self, episode):
|
||||
return self._get_season_search_strings(episode.show, episode.season)
|
||||
|
||||
def _doSearch(self, search_params, show=None):
|
||||
|
||||
# show_id = self._dtt_show_id(show.name)
|
||||
|
||||
params = {"items" : "all"}
|
||||
def _doSearch(self, search_params, show=None):
|
||||
|
||||
# show_id = self._dtt_show_id(show.name)
|
||||
|
||||
params = {"items": "all"}
|
||||
|
||||
if sickbeard.DTT_NORAR:
|
||||
params.update({"norar" : "yes"})
|
||||
params.update({"norar": "yes"})
|
||||
|
||||
if sickbeard.DTT_SINGLE:
|
||||
params.update({"single" : "yes"})
|
||||
params.update({"single": "yes"})
|
||||
|
||||
searchURL = self.url + "rss/show/" + search_params + "?" + urllib.urlencode(params)
|
||||
|
||||
|
@ -89,13 +89,13 @@ class DTTProvider(generic.TorrentProvider):
|
|||
|
||||
if not data:
|
||||
return []
|
||||
|
||||
|
||||
try:
|
||||
parsedXML = parseString(data)
|
||||
items = parsedXML.getElementsByTagName('item')
|
||||
except Exception, e:
|
||||
logger.log(u"Error trying to load DTT RSS feed: "+ex(e), logger.ERROR)
|
||||
logger.log(u"RSS data: "+data, logger.DEBUG)
|
||||
logger.log(u"Error trying to load DTT RSS feed: " + ex(e), logger.ERROR)
|
||||
logger.log(u"RSS data: " + data, logger.DEBUG)
|
||||
return []
|
||||
|
||||
results = []
|
||||
|
@ -114,8 +114,8 @@ class DTTProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
class DTTCache(tvcache.TVCache):
|
||||
|
||||
class DTTCache(tvcache.TVCache):
|
||||
def __init__(self, provider):
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
||||
|
@ -123,23 +123,24 @@ class DTTCache(tvcache.TVCache):
|
|||
self.minTime = 30
|
||||
|
||||
def _getRSSData(self):
|
||||
|
||||
params = {"items" : "all"}
|
||||
|
||||
params = {"items": "all"}
|
||||
|
||||
if sickbeard.DTT_NORAR:
|
||||
params.update({"norar" : "yes"})
|
||||
params.update({"norar": "yes"})
|
||||
|
||||
if sickbeard.DTT_SINGLE:
|
||||
params.update({"single" : "yes"})
|
||||
params.update({"single": "yes"})
|
||||
|
||||
url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params)
|
||||
logger.log(u"DTT cache update URL: "+ url, logger.DEBUG)
|
||||
logger.log(u"DTT cache update URL: " + url, logger.DEBUG)
|
||||
data = self.provider.getURL(url)
|
||||
return data
|
||||
|
||||
def _parseItem(self, item):
|
||||
title, url = self.provider._get_title_and_url(item)
|
||||
logger.log(u"Adding item from RSS to cache: "+title, logger.DEBUG)
|
||||
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
|
||||
return self._addCacheEntry(title, url)
|
||||
|
||||
|
||||
provider = DTTProvider()
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
import urllib
|
||||
import re
|
||||
|
||||
try:
|
||||
import xml.etree.cElementTree as etree
|
||||
except ImportError:
|
||||
|
@ -33,7 +34,6 @@ from sickbeard import helpers
|
|||
|
||||
|
||||
class EZRSSProvider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
generic.TorrentProvider.__init__(self, "EZRSS")
|
||||
|
@ -52,7 +52,8 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
|
||||
def getQuality(self, item):
|
||||
|
||||
filename = helpers.get_xml_text(item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName'))
|
||||
filename = helpers.get_xml_text(
|
||||
item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName'))
|
||||
quality = Quality.nameQuality(filename)
|
||||
|
||||
return quality
|
||||
|
@ -62,7 +63,8 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
results = {}
|
||||
|
||||
if show.air_by_date:
|
||||
logger.log(self.name + u" doesn't support air-by-date backlog because of limitations on their RSS search.", logger.WARNING)
|
||||
logger.log(self.name + u" doesn't support air-by-date backlog because of limitations on their RSS search.",
|
||||
logger.WARNING)
|
||||
return results
|
||||
|
||||
results = generic.TorrentProvider.findSeasonResults(self, show, season)
|
||||
|
@ -134,14 +136,17 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG)
|
||||
results.append(curItem)
|
||||
else:
|
||||
logger.log(u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable", logger.ERROR)
|
||||
logger.log(
|
||||
u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable",
|
||||
logger.ERROR)
|
||||
|
||||
return results
|
||||
|
||||
def _get_title_and_url(self, item):
|
||||
(title, url) = generic.TorrentProvider._get_title_and_url(self, item)
|
||||
|
||||
filename = helpers.get_xml_text(item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName'))
|
||||
filename = helpers.get_xml_text(
|
||||
item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName'))
|
||||
|
||||
if filename:
|
||||
new_title = self._extract_name_from_filename(filename)
|
||||
|
@ -161,7 +166,6 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
|
||||
|
||||
class EZRSSCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
@ -192,7 +196,10 @@ class EZRSSCache(tvcache.TVCache):
|
|||
return self._addCacheEntry(title, url)
|
||||
|
||||
else:
|
||||
logger.log(u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", logger.ERROR)
|
||||
logger.log(
|
||||
u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
|
||||
logger.ERROR)
|
||||
return None
|
||||
|
||||
|
||||
provider = EZRSSProvider()
|
||||
|
|
|
@ -31,7 +31,7 @@ import collections
|
|||
import sickbeard
|
||||
|
||||
from sickbeard import helpers, classes, logger, db
|
||||
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT#, SEED_POLICY_TIME, SEED_POLICY_RATIO
|
||||
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT #, SEED_POLICY_TIME, SEED_POLICY_RATIO
|
||||
from sickbeard import tvcache
|
||||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard.exceptions import ex
|
||||
|
@ -40,8 +40,8 @@ from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
|||
from sickbeard import scene_numbering
|
||||
from sickbeard.common import Quality, Overview
|
||||
|
||||
class GenericProvider:
|
||||
|
||||
class GenericProvider:
|
||||
NZB = "nzb"
|
||||
TORRENT = "torrent"
|
||||
|
||||
|
@ -61,7 +61,7 @@ class GenericProvider:
|
|||
|
||||
@staticmethod
|
||||
def makeID(name):
|
||||
return re.sub("[^\w\d_]", "_", name.strip().lower())
|
||||
return re.sub("[^\w\d_]", "_", name.strip().lower())
|
||||
|
||||
def imageName(self):
|
||||
return self.getID() + '.png'
|
||||
|
@ -94,9 +94,9 @@ class GenericProvider:
|
|||
result = classes.TorrentSearchResult(episodes)
|
||||
else:
|
||||
result = classes.SearchResult(episodes)
|
||||
|
||||
result.provider = self
|
||||
|
||||
|
||||
result.provider = self
|
||||
|
||||
return result
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None):
|
||||
|
@ -121,7 +121,7 @@ class GenericProvider:
|
|||
Save the result to disk.
|
||||
"""
|
||||
|
||||
logger.log(u"Downloading a result from " + self.name+" at " + result.url)
|
||||
logger.log(u"Downloading a result from " + self.name + " at " + result.url)
|
||||
|
||||
data = self.getURL(result.url)
|
||||
|
||||
|
@ -189,7 +189,7 @@ class GenericProvider:
|
|||
|
||||
Returns a Quality value obtained from the node's data
|
||||
"""
|
||||
(title, url) = self._get_title_and_url(item) # @UnusedVariable
|
||||
(title, url) = self._get_title_and_url(item) # @UnusedVariable
|
||||
quality = Quality.sceneQuality(title)
|
||||
return quality
|
||||
|
||||
|
@ -201,7 +201,7 @@ class GenericProvider:
|
|||
|
||||
def _get_episode_search_strings(self, ep_obj):
|
||||
return []
|
||||
|
||||
|
||||
def _get_title_and_url(self, item):
|
||||
"""
|
||||
Retrieves the title and URL data from the item XML node
|
||||
|
@ -217,9 +217,9 @@ class GenericProvider:
|
|||
url = helpers.get_xml_text(item.find('link'))
|
||||
if url:
|
||||
url = url.replace('&', '&')
|
||||
|
||||
|
||||
return (title, url)
|
||||
|
||||
|
||||
def findEpisode(self, episode, manualSearch=False):
|
||||
|
||||
self._checkAuth()
|
||||
|
@ -229,7 +229,7 @@ class GenericProvider:
|
|||
sceneEpisode.convertToSceneNumbering()
|
||||
|
||||
logger.log(u'Searching "%s" for "%s" as "%s"'
|
||||
% (self.name, episode.prettyName() , sceneEpisode.prettyName()))
|
||||
% (self.name, episode.prettyName(), sceneEpisode.prettyName()))
|
||||
|
||||
self.cache.updateCache()
|
||||
results = self.cache.searchCache(episode, manualSearch)
|
||||
|
@ -261,16 +261,20 @@ class GenericProvider:
|
|||
|
||||
if episode.show.air_by_date:
|
||||
if parse_result.air_date != episode.airdate:
|
||||
logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG)
|
||||
logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
|
||||
logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG)
|
||||
logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(
|
||||
episode.episode) + ", skipping it", logger.DEBUG)
|
||||
continue
|
||||
|
||||
quality = self.getQuality(item)
|
||||
|
||||
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
|
||||
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
|
||||
logger.log(
|
||||
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
|
||||
quality], logger.DEBUG)
|
||||
continue
|
||||
|
||||
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
||||
|
@ -280,8 +284,8 @@ class GenericProvider:
|
|||
result.name = title
|
||||
result.quality = quality
|
||||
result.provider = self
|
||||
result.content = None
|
||||
|
||||
result.content = None
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
@ -298,12 +302,12 @@ class GenericProvider:
|
|||
seasonEp = show.getAllEpisodes(season)
|
||||
wantedEp = [x for x in seasonEp if show.getOverview(x.status) in (Overview.WANTED, Overview.QUAL)]
|
||||
map(lambda x: x.convertToSceneNumbering(), wantedEp)
|
||||
for x in wantedEp: sceneSeasons.setdefault(x.season,[]).append(x)
|
||||
for x in wantedEp: sceneSeasons.setdefault(x.season, []).append(x)
|
||||
|
||||
if wantedEp == seasonEp and not show.air_by_date:
|
||||
searchSeason = True
|
||||
|
||||
for sceneSeason,sceneEpisodes in sceneSeasons.iteritems():
|
||||
for sceneSeason, sceneEpisodes in sceneSeasons.iteritems():
|
||||
for curString in self._get_season_search_strings(show, str(sceneSeason), sceneEpisodes, searchSeason):
|
||||
itemList += self._doSearch(curString)
|
||||
|
||||
|
@ -323,8 +327,10 @@ class GenericProvider:
|
|||
|
||||
if not show.air_by_date:
|
||||
# this check is meaningless for non-season searches
|
||||
if (parse_result.season_number != None and parse_result.season_number != season) or (parse_result.season_number == None and season != 1):
|
||||
logger.log(u"The result " + title + " doesn't seem to be a valid episode for season " + str(season) + ", ignoring", logger.DEBUG)
|
||||
if (parse_result.season_number != None and parse_result.season_number != season) or (
|
||||
parse_result.season_number == None and season != 1):
|
||||
logger.log(u"The result " + title + " doesn't seem to be a valid episode for season " + str(
|
||||
season) + ", ignoring", logger.DEBUG)
|
||||
continue
|
||||
|
||||
# we just use the existing info for normal searches
|
||||
|
@ -333,14 +339,19 @@ class GenericProvider:
|
|||
|
||||
else:
|
||||
if not parse_result.air_by_date:
|
||||
logger.log(u"This is supposed to be an air-by-date search but the result "+title+" didn't parse as one, skipping it", logger.DEBUG)
|
||||
logger.log(
|
||||
u"This is supposed to be an air-by-date search but the result " + title + " didn't parse as one, skipping it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show.indexerid, parse_result.air_date.toordinal()])
|
||||
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
|
||||
[show.indexerid, parse_result.air_date.toordinal()])
|
||||
|
||||
if len(sql_results) != 1:
|
||||
logger.log(u"Tried to look up the date for the episode "+title+" but the database didn't give proper results, skipping it", logger.WARNING)
|
||||
logger.log(
|
||||
u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
actual_season = int(sql_results[0]["season"])
|
||||
|
@ -354,7 +365,9 @@ class GenericProvider:
|
|||
break
|
||||
|
||||
if not wantEp:
|
||||
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
|
||||
logger.log(
|
||||
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
|
||||
quality], logger.DEBUG)
|
||||
continue
|
||||
|
||||
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
||||
|
@ -375,7 +388,8 @@ class GenericProvider:
|
|||
epNum = epObj[0].episode
|
||||
elif len(epObj) > 1:
|
||||
epNum = MULTI_EP_RESULT
|
||||
logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers), logger.DEBUG)
|
||||
logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
|
||||
parse_result.episode_numbers), logger.DEBUG)
|
||||
elif len(epObj) == 0:
|
||||
epNum = SEASON_RESULT
|
||||
result.extraInfo = [show]
|
||||
|
@ -396,26 +410,23 @@ class GenericProvider:
|
|||
|
||||
|
||||
class NZBProvider(GenericProvider):
|
||||
|
||||
def __init__(self, name):
|
||||
|
||||
GenericProvider.__init__(self, name)
|
||||
|
||||
self.providerType = GenericProvider.NZB
|
||||
|
||||
|
||||
class TorrentProvider(GenericProvider):
|
||||
|
||||
def __init__(self, name):
|
||||
|
||||
GenericProvider.__init__(self, name)
|
||||
|
||||
self.providerType = GenericProvider.TORRENT
|
||||
|
||||
|
||||
# self.option = {SEED_POLICY_TIME : '',
|
||||
# SEED_POLICY_RATIO: '',
|
||||
# 'PROCESS_METHOD': ''
|
||||
# }
|
||||
|
||||
|
||||
# def get_provider_options(self):
|
||||
# pass
|
||||
#
|
||||
|
|
|
@ -30,7 +30,6 @@ except ImportError:
|
|||
|
||||
|
||||
class HDBitsProvider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
generic.TorrentProvider.__init__(self, "HDBits")
|
||||
|
@ -49,7 +48,7 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
|
||||
def _checkAuth(self):
|
||||
|
||||
if not sickbeard.HDBITS_USERNAME or not sickbeard.HDBITS_PASSKEY:
|
||||
if not sickbeard.HDBITS_USERNAME or not sickbeard.HDBITS_PASSKEY:
|
||||
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
|
||||
|
||||
return True
|
||||
|
@ -61,8 +60,10 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
|
||||
if 'status' in parsedJSON and 'message' in parsedJSON:
|
||||
if parsedJSON.get('status') == 5:
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['message'], logger.DEBUG)
|
||||
raise AuthException("Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['message'],
|
||||
logger.DEBUG)
|
||||
raise AuthException(
|
||||
"Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||
|
||||
return True
|
||||
|
||||
|
@ -115,16 +116,19 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
|
||||
if episode.show.air_by_date:
|
||||
if parse_result.air_date != episode.airdate:
|
||||
logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG)
|
||||
logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
|
||||
logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG)
|
||||
logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(
|
||||
episode.episode) + ", skipping it", logger.DEBUG)
|
||||
continue
|
||||
|
||||
quality = self.getQuality(item)
|
||||
|
||||
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
|
||||
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
|
||||
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " +
|
||||
Quality.qualityStrings[quality], logger.DEBUG)
|
||||
continue
|
||||
|
||||
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
||||
|
@ -170,7 +174,6 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
|
||||
|
||||
class HDBitsCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
@ -206,7 +209,8 @@ class HDBitsCache(tvcache.TVCache):
|
|||
if parsedJSON and 'data' in parsedJSON:
|
||||
items = parsedJSON['data']
|
||||
else:
|
||||
logger.log(u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it", logger.ERROR)
|
||||
logger.log(u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it",
|
||||
logger.ERROR)
|
||||
return []
|
||||
|
||||
cl = []
|
||||
|
@ -214,13 +218,14 @@ class HDBitsCache(tvcache.TVCache):
|
|||
ci = self._parseItem(item)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
|
||||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
else:
|
||||
raise exceptions.AuthException("Your authentication info for " + self.provider.name + " is incorrect, check your config")
|
||||
raise exceptions.AuthException(
|
||||
"Your authentication info for " + self.provider.name + " is incorrect, check your config")
|
||||
|
||||
else:
|
||||
return []
|
||||
|
@ -236,10 +241,12 @@ class HDBitsCache(tvcache.TVCache):
|
|||
logger.log(u"Adding item to results: " + title, logger.DEBUG)
|
||||
return self._addCacheEntry(title, url)
|
||||
else:
|
||||
logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", logger.ERROR)
|
||||
logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable",
|
||||
logger.ERROR)
|
||||
return None
|
||||
|
||||
def _checkAuth(self, data):
|
||||
return self.provider._checkAuthFromData(data)
|
||||
|
||||
|
||||
provider = HDBitsProvider()
|
||||
|
|
|
@ -30,22 +30,22 @@ from sickbeard import db
|
|||
from sickbeard import classes
|
||||
from sickbeard import helpers
|
||||
from sickbeard import show_name_helpers
|
||||
from sickbeard.common import Overview
|
||||
from sickbeard.common import Overview
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard import clients
|
||||
from lib import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
class HDTorrentsProvider(generic.TorrentProvider):
|
||||
|
||||
urls = {'base_url' : 'https://hdts.ru/index.php',
|
||||
'login' : 'https://hdts.ru/login.php',
|
||||
'detail' : 'https://www.hdts.ru/details.php?id=%s',
|
||||
'search' : 'https://hdts.ru/torrents.php?search=%s&active=1&options=0%s',
|
||||
'download' : 'https://www.sceneaccess.eu/%s',
|
||||
'home' : 'https://www.hdts.ru/%s'
|
||||
}
|
||||
class HDTorrentsProvider(generic.TorrentProvider):
|
||||
urls = {'base_url': 'https://hdts.ru/index.php',
|
||||
'login': 'https://hdts.ru/login.php',
|
||||
'detail': 'https://www.hdts.ru/details.php?id=%s',
|
||||
'search': 'https://hdts.ru/torrents.php?search=%s&active=1&options=0%s',
|
||||
'download': 'https://www.sceneaccess.eu/%s',
|
||||
'home': 'https://www.hdts.ru/%s'
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
|
||||
|
@ -60,7 +60,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
self.categories = "&category[]=59&category[]=60&category[]=30&category[]=38"
|
||||
|
||||
self.session = requests.Session()
|
||||
|
||||
|
||||
self.cookies = None
|
||||
|
||||
def isEnabled(self):
|
||||
|
@ -72,42 +72,42 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
def getQuality(self, item):
|
||||
|
||||
quality = Quality.sceneQuality(item[0])
|
||||
return quality
|
||||
return quality
|
||||
|
||||
def _doLogin(self):
|
||||
|
||||
if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
|
||||
return True
|
||||
|
||||
|
||||
if sickbeard.HDTORRENTS_UID and sickbeard.HDTORRENTS_HASH:
|
||||
|
||||
|
||||
requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
|
||||
|
||||
else:
|
||||
|
||||
else:
|
||||
|
||||
login_params = {'uid': sickbeard.HDTORRENTS_USERNAME,
|
||||
'pwd': sickbeard.HDTORRENTS_PASSWORD,
|
||||
'submit': 'Confirm',
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
if re.search('You need cookies enabled to log in.', response.text) \
|
||||
or response.status_code == 401:
|
||||
or response.status_code == 401:
|
||||
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
sickbeard.HDTORRENTS_UID = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
|
||||
sickbeard.HDTORRENTS_HASH = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
|
||||
|
||||
|
||||
self.cookies = {'uid': sickbeard.HDTORRENTS_UID,
|
||||
'pass': sickbeard.HDTORRENTS_HASH
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return True
|
||||
|
||||
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
||||
|
@ -121,7 +121,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
if searchSeason:
|
||||
search_string = {'Season': [], 'Episode': []}
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
||||
ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX
|
||||
ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX
|
||||
search_string['Season'].append(ep_string)
|
||||
|
||||
for ep_obj in wantedEp:
|
||||
|
@ -141,16 +141,17 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
if ep_obj.show.air_by_date:
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
||||
str(ep_obj.airdate) +'|'+\
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||
str(ep_obj.airdate) + '|' + \
|
||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||
search_string['Episode'].append(ep_string)
|
||||
else:
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode}
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season,
|
||||
'episodenumber': ep_obj.episode}
|
||||
|
||||
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
||||
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
||||
|
||||
return [search_string]
|
||||
|
||||
|
@ -170,17 +171,17 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
if search_string == '':
|
||||
continue
|
||||
search_string = str(search_string).replace('.',' ')
|
||||
search_string = str(search_string).replace('.', ' ')
|
||||
searchURL = self.urls['search'] % (search_string, self.categories)
|
||||
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
|
||||
|
||||
data = self.getURL(searchURL)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Remove HDTorrents NEW list
|
||||
split_data = data.partition('<!-- Show New Torrents After Last Visit -->\n\n\n\n')
|
||||
data = split_data[2]
|
||||
|
@ -189,10 +190,11 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
html = BeautifulSoup(data, features=["html5lib", "permissive"])
|
||||
|
||||
#Get first entry in table
|
||||
entries = html.find_all('td', attrs={'align' : 'center'})
|
||||
entries = html.find_all('td', attrs={'align': 'center'})
|
||||
|
||||
if not entries:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG)
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
try:
|
||||
|
@ -206,10 +208,10 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
continue
|
||||
|
||||
if mode != 'RSS' and seeders == 0:
|
||||
continue
|
||||
continue
|
||||
|
||||
if not title or not download_url:
|
||||
continue
|
||||
continue
|
||||
|
||||
item = title, download_url, id, seeders, leechers
|
||||
logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
|
||||
|
@ -217,12 +219,12 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
items[mode].append(item)
|
||||
|
||||
#Now attempt to get any others
|
||||
result_table = html.find('table', attrs = {'class' : 'mainblockcontenttt'})
|
||||
result_table = html.find('table', attrs={'class': 'mainblockcontenttt'})
|
||||
|
||||
if not result_table:
|
||||
continue
|
||||
|
||||
entries = result_table.find_all('td', attrs={'align' : 'center', 'class' : 'listas'})
|
||||
entries = result_table.find_all('td', attrs={'align': 'center', 'class': 'listas'})
|
||||
|
||||
if not entries:
|
||||
continue
|
||||
|
@ -232,7 +234,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
if not block2:
|
||||
continue
|
||||
cells = block2.find_all('td')
|
||||
|
||||
|
||||
try:
|
||||
title = cells[1].find('b').get_text().strip('\t ').replace('Blu-ray', 'bd50')
|
||||
url = self.urls['home'] % cells[4].find('a')['href']
|
||||
|
@ -245,7 +247,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
continue
|
||||
|
||||
if mode != 'RSS' and seeders == 0:
|
||||
continue
|
||||
continue
|
||||
|
||||
if not title or not download_url:
|
||||
continue
|
||||
|
@ -256,13 +258,13 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
#For each search mode sort all the items by seeders
|
||||
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
||||
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
||||
|
||||
results += items[mode]
|
||||
|
||||
results += items[mode]
|
||||
|
||||
return results
|
||||
|
||||
def _get_title_and_url(self, item):
|
||||
|
@ -270,7 +272,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
title, url, id, seeders, leechers = item
|
||||
|
||||
if url:
|
||||
url = str(url).replace('&','&')
|
||||
url = str(url).replace('&', '&')
|
||||
|
||||
return (title, url)
|
||||
|
||||
|
@ -284,15 +286,16 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
response = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading "+self.name+" URL: " + ex(e), logger.ERROR)
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
@ -301,12 +304,13 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
sqlResults = db.DBConnection().select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -323,7 +327,6 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
|
||||
class HDTorrentsCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
@ -338,12 +341,12 @@ class HDTorrentsCache(tvcache.TVCache):
|
|||
|
||||
search_params = {'RSS': []}
|
||||
rss_results = self.provider._doSearch(search_params)
|
||||
|
||||
|
||||
if rss_results:
|
||||
self.setLastUpdate()
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
|
||||
self._clearCache()
|
||||
|
||||
|
@ -369,4 +372,5 @@ class HDTorrentsCache(tvcache.TVCache):
|
|||
|
||||
return self._addCacheEntry(title, url)
|
||||
|
||||
|
||||
provider = HDTorrentsProvider()
|
||||
|
|
|
@ -29,66 +29,66 @@ from sickbeard import db
|
|||
from sickbeard import classes
|
||||
from sickbeard import helpers
|
||||
from sickbeard import show_name_helpers
|
||||
from sickbeard.common import Overview
|
||||
from sickbeard.common import Overview
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard import clients
|
||||
from lib import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
class IPTorrentsProvider(generic.TorrentProvider):
|
||||
|
||||
urls = {'base_url' : 'https://www.iptorrents.com',
|
||||
'login' : 'https://www.iptorrents.com/torrents/',
|
||||
'search' : 'https://www.iptorrents.com/torrents/?%s%s&q=%s&qf=ti',
|
||||
}
|
||||
class IPTorrentsProvider(generic.TorrentProvider):
|
||||
urls = {'base_url': 'https://www.iptorrents.com',
|
||||
'login': 'https://www.iptorrents.com/torrents/',
|
||||
'search': 'https://www.iptorrents.com/torrents/?%s%s&q=%s&qf=ti',
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
|
||||
generic.TorrentProvider.__init__(self, "IPTorrents")
|
||||
|
||||
|
||||
self.supportsBacklog = True
|
||||
|
||||
self.cache = IPTorrentsCache(self)
|
||||
|
||||
|
||||
self.url = self.urls['base_url']
|
||||
|
||||
|
||||
self.session = None
|
||||
|
||||
self.categorie = 'l73=1&l78=1&l66=1&l65=1&l79=1&l5=1&l4=1'
|
||||
|
||||
def isEnabled(self):
|
||||
return sickbeard.IPTORRENTS
|
||||
|
||||
|
||||
def imageName(self):
|
||||
return 'iptorrents.png'
|
||||
|
||||
|
||||
def getQuality(self, item):
|
||||
|
||||
|
||||
quality = Quality.sceneQuality(item[0])
|
||||
return quality
|
||||
return quality
|
||||
|
||||
def _doLogin(self):
|
||||
|
||||
login_params = {'username': sickbeard.IPTORRENTS_USERNAME,
|
||||
'password': sickbeard.IPTORRENTS_PASSWORD,
|
||||
'login': 'submit',
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
self.session = requests.Session()
|
||||
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
if re.search('tries left', response.text) \
|
||||
or re.search('<title>IPT</title>', response.text) \
|
||||
or response.status_code == 401:
|
||||
logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR)
|
||||
or re.search('<title>IPT</title>', response.text) \
|
||||
or response.status_code == 401:
|
||||
logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
return True
|
||||
|
||||
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
||||
|
@ -102,7 +102,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
if searchSeason:
|
||||
search_string = {'Season': [], 'Episode': []}
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
||||
ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX
|
||||
ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX
|
||||
search_string['Season'].append(ep_string)
|
||||
|
||||
for ep_obj in wantedEp:
|
||||
|
@ -116,65 +116,67 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
||||
|
||||
search_string = {'Episode': []}
|
||||
|
||||
|
||||
if not ep_obj:
|
||||
return []
|
||||
|
||||
|
||||
if ep_obj.show.air_by_date:
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
||||
str(ep_obj.airdate) +'|'+\
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||
str(ep_obj.airdate) + '|' + \
|
||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||
search_string['Episode'].append(ep_string)
|
||||
else:
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + ' %s' %add_string
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season,
|
||||
'episodenumber': ep_obj.episode} + ' %s' % add_string
|
||||
|
||||
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
||||
|
||||
|
||||
return [search_string]
|
||||
|
||||
def _doSearch(self, search_params):
|
||||
|
||||
|
||||
results = []
|
||||
items = {'Season': [], 'Episode': [], 'RSS': []}
|
||||
|
||||
freeleech = '&free=on' if sickbeard.IPTORRENTS_FREELEECH else ''
|
||||
|
||||
|
||||
if not self._doLogin():
|
||||
return []
|
||||
|
||||
return []
|
||||
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
# URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
|
||||
searchURL = self.urls['search'] % (self.categorie, freeleech, unidecode(search_string))
|
||||
searchURL += ';o=seeders' if mode != 'RSS' else ''
|
||||
|
||||
|
||||
logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
|
||||
|
||||
|
||||
data = self.getURL(searchURL)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
|
||||
try:
|
||||
html = BeautifulSoup(data, features=["html5lib", "permissive"])
|
||||
|
||||
if not html:
|
||||
logger.log(u"Invalid HTML data: " + str(data) , logger.DEBUG)
|
||||
logger.log(u"Invalid HTML data: " + str(data), logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
||||
if html.find(text='No Torrents Found!'):
|
||||
logger.log(u"No results found for: " + search_string + " (" + searchURL + ")", logger.DEBUG)
|
||||
continue
|
||||
|
||||
torrent_table = html.find('table', attrs = {'class' : 'torrents'})
|
||||
|
||||
torrent_table = html.find('table', attrs={'class': 'torrents'})
|
||||
torrents = torrent_table.find_all('tr') if torrent_table else []
|
||||
|
||||
#Continue only if one Release is found
|
||||
if len(torrents)<2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.WARNING)
|
||||
if len(torrents) < 2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
for result in torrents[1:]:
|
||||
|
@ -184,7 +186,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
torrent_name = torrent.string
|
||||
torrent_download_url = self.urls['base_url'] + (result.find_all('td')[3].find('a'))['href']
|
||||
torrent_details_url = self.urls['base_url'] + torrent['href']
|
||||
torrent_seeders = int(result.find('td', attrs = {'class' : 'ac t_seeders'}).string)
|
||||
torrent_seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).string)
|
||||
## Not used, perhaps in the future ##
|
||||
#torrent_id = int(torrent['href'].replace('/details.php?id=', ''))
|
||||
#torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
|
||||
|
@ -194,7 +196,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
# Filter unseeded torrent and torrents with no name/url
|
||||
if mode != 'RSS' and torrent_seeders == 0:
|
||||
continue
|
||||
|
||||
|
||||
if not torrent_name or not torrent_download_url:
|
||||
continue
|
||||
|
||||
|
@ -203,18 +205,18 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
results += items[mode]
|
||||
|
||||
results += items[mode]
|
||||
|
||||
return results
|
||||
|
||||
def _get_title_and_url(self, item):
|
||||
|
||||
|
||||
title, url = item
|
||||
|
||||
|
||||
if url:
|
||||
url = str(url).replace('&','&')
|
||||
url = str(url).replace('&', '&')
|
||||
|
||||
return (title, url)
|
||||
|
||||
|
@ -228,7 +230,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
response = self.session.get(url)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
|
@ -236,7 +238,8 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
@ -245,12 +248,13 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
sqlResults = db.DBConnection().select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -267,7 +271,6 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
|
||||
class IPTorrentsCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
@ -282,12 +285,12 @@ class IPTorrentsCache(tvcache.TVCache):
|
|||
|
||||
search_params = {'RSS': ['']}
|
||||
rss_results = self.provider._doSearch(search_params)
|
||||
|
||||
|
||||
if rss_results:
|
||||
self.setLastUpdate()
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
|
||||
self._clearCache()
|
||||
|
||||
|
@ -301,7 +304,7 @@ class IPTorrentsCache(tvcache.TVCache):
|
|||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
@ -311,6 +314,7 @@ class IPTorrentsCache(tvcache.TVCache):
|
|||
|
||||
logger.log(u"Adding item to cache: " + title, logger.DEBUG)
|
||||
|
||||
return self._addCacheEntry(title, url)
|
||||
return self._addCacheEntry(title, url)
|
||||
|
||||
|
||||
provider = IPTorrentsProvider()
|
||||
|
|
|
@ -45,8 +45,8 @@ from lib import requests
|
|||
from bs4 import BeautifulSoup
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
class KATProvider(generic.TorrentProvider):
|
||||
|
||||
class KATProvider(generic.TorrentProvider):
|
||||
def __init__(self):
|
||||
|
||||
generic.TorrentProvider.__init__(self, "KickAssTorrents")
|
||||
|
@ -57,7 +57,7 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
self.url = 'http://kickass.to/'
|
||||
|
||||
self.searchurl = self.url+'usearch/%s/?field=seeders&sorder=desc' #order by seed
|
||||
self.searchurl = self.url + 'usearch/%s/?field=seeders&sorder=desc' #order by seed
|
||||
|
||||
def isEnabled(self):
|
||||
return sickbeard.KAT
|
||||
|
@ -95,12 +95,12 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
return quality_string
|
||||
|
||||
def _find_season_quality(self,title, torrent_link, ep_number):
|
||||
def _find_season_quality(self, title, torrent_link, ep_number):
|
||||
""" Return the modified title of a Season Torrent with the quality found inspecting torrent file list """
|
||||
|
||||
mediaExtensions = ['avi', 'mkv', 'wmv', 'divx',
|
||||
'vob', 'dvr-ms', 'wtv', 'ts'
|
||||
'ogv', 'rar', 'zip', 'mp4']
|
||||
'ogv', 'rar', 'zip', 'mp4']
|
||||
|
||||
quality = Quality.UNKNOWN
|
||||
|
||||
|
@ -113,18 +113,21 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
soup = BeautifulSoup(data, features=["html5lib", "permissive"])
|
||||
file_table = soup.find('table', attrs = {'class': 'torrentFileList'})
|
||||
file_table = soup.find('table', attrs={'class': 'torrentFileList'})
|
||||
|
||||
if not file_table:
|
||||
return None
|
||||
|
||||
files = [x.text for x in file_table.find_all('td', attrs = {'class' : 'torFileName'} )]
|
||||
files = [x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'})]
|
||||
videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files)
|
||||
|
||||
#Filtering SingleEpisode/MultiSeason Torrent
|
||||
if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1 ):
|
||||
logger.log(u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG)
|
||||
logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG)
|
||||
if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1):
|
||||
logger.log(u"Result " + title + " have " + str(
|
||||
ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG)
|
||||
logger.log(
|
||||
u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...",
|
||||
logger.DEBUG)
|
||||
return None
|
||||
|
||||
if Quality.sceneQuality(title) != Quality.UNKNOWN:
|
||||
|
@ -134,7 +137,7 @@ class KATProvider(generic.TorrentProvider):
|
|||
quality = Quality.sceneQuality(os.path.basename(fileName))
|
||||
if quality != Quality.UNKNOWN: break
|
||||
|
||||
if fileName!=None and quality == Quality.UNKNOWN:
|
||||
if fileName != None and quality == Quality.UNKNOWN:
|
||||
quality = Quality.assumeQuality(os.path.basename(fileName))
|
||||
|
||||
if quality == Quality.UNKNOWN:
|
||||
|
@ -147,15 +150,16 @@ class KATProvider(generic.TorrentProvider):
|
|||
except InvalidNameException:
|
||||
return None
|
||||
|
||||
logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG)
|
||||
logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG)
|
||||
|
||||
if parse_result.series_name and parse_result.season_number:
|
||||
title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality)
|
||||
title = parse_result.series_name + ' S%02d' % int(
|
||||
parse_result.season_number) + ' ' + self._reverseQuality(quality)
|
||||
|
||||
return title
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
|
||||
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
||||
|
@ -169,10 +173,11 @@ class KATProvider(generic.TorrentProvider):
|
|||
if searchSeason:
|
||||
search_string = {'Season': [], 'Episode': []}
|
||||
for show_name in set(allPossibleShowNames(show)):
|
||||
ep_string = show_name +' S%02d' % int(season) + ' -S%02d' % int(season) + 'E' + ' category:tv' #1) ShowName SXX -SXXE
|
||||
ep_string = show_name + ' S%02d' % int(season) + ' -S%02d' % int(
|
||||
season) + 'E' + ' category:tv' #1) ShowName SXX -SXXE
|
||||
search_string['Season'].append(ep_string)
|
||||
|
||||
ep_string = show_name+' Season '+str(season)+' -Ep*' + ' category:tv' #2) ShowName Season X
|
||||
ep_string = show_name + ' Season ' + str(season) + ' -Ep*' + ' category:tv' #2) ShowName Season X
|
||||
search_string['Season'].append(ep_string)
|
||||
|
||||
for ep_obj in wantedEp:
|
||||
|
@ -184,7 +189,7 @@ class KATProvider(generic.TorrentProvider):
|
|||
return [search_string]
|
||||
|
||||
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
||||
|
||||
|
||||
search_string = {'Episode': []}
|
||||
|
||||
if not ep_obj:
|
||||
|
@ -194,20 +199,22 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
if ep_obj.show.air_by_date:
|
||||
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
||||
ep_string = sanitizeSceneName(show_name) +' '+\
|
||||
str(ep_obj.airdate) +'|'+\
|
||||
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||
str(ep_obj.airdate) + '|' + \
|
||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||
|
||||
search_string['Episode'].append(ep_string)
|
||||
else:
|
||||
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
||||
ep_string = sanitizeSceneName(show_name) +' '+\
|
||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\
|
||||
sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\
|
||||
sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + ' %s category:tv' %add_string \
|
||||
|
||||
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season,
|
||||
'episodenumber': ep_obj.episode} + '|' + \
|
||||
sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season,
|
||||
'episodenumber': ep_obj.episode} + '|' + \
|
||||
sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season,
|
||||
'episodenumber': ep_obj.episode} + ' %s category:tv' % add_string
|
||||
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
||||
|
||||
|
||||
return [search_string]
|
||||
|
||||
|
||||
|
@ -218,14 +225,14 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
|
||||
if mode != 'RSS':
|
||||
searchURL = self.searchurl %(urllib.quote(unidecode(search_string)))
|
||||
searchURL = self.searchurl % (urllib.quote(unidecode(search_string)))
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
else:
|
||||
searchURL = self.url + 'tv/?field=time_add&sorder=desc'
|
||||
logger.log(u"KAT cache update URL: "+ searchURL, logger.DEBUG)
|
||||
|
||||
logger.log(u"KAT cache update URL: " + searchURL, logger.DEBUG)
|
||||
|
||||
html = self.getURL(searchURL)
|
||||
if not html:
|
||||
continue
|
||||
|
@ -233,14 +240,15 @@ class KATProvider(generic.TorrentProvider):
|
|||
try:
|
||||
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
||||
|
||||
torrent_table = soup.find('table', attrs = {'class' : 'data'})
|
||||
torrent_table = soup.find('table', attrs={'class': 'data'})
|
||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||
|
||||
#Continue only if one Release is found
|
||||
if len(torrent_rows)<2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.WARNING)
|
||||
if len(torrent_rows) < 2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
|
||||
for tr in torrent_rows[1:]:
|
||||
|
||||
try:
|
||||
|
@ -249,17 +257,19 @@ class KATProvider(generic.TorrentProvider):
|
|||
title = (tr.find('div', {'class': 'torrentname'}).find_all('a')[1]).text
|
||||
url = tr.find('a', 'imagnet')['href']
|
||||
verified = True if tr.find('a', 'iverify') else False
|
||||
trusted = True if tr.find('img', {'alt': 'verified'}) else False
|
||||
trusted = True if tr.find('img', {'alt': 'verified'}) else False
|
||||
seeders = int(tr.find_all('td')[-2].text)
|
||||
leechers = int(tr.find_all('td')[-1].text)
|
||||
except (AttributeError, TypeError):
|
||||
continue
|
||||
|
||||
if mode != 'RSS' and seeders == 0:
|
||||
continue
|
||||
|
||||
continue
|
||||
|
||||
if sickbeard.KAT_VERIFIED and not verified:
|
||||
logger.log(u"KAT Provider found result "+title+" but that doesn't seem like a verified result so I'm ignoring it",logger.DEBUG)
|
||||
logger.log(
|
||||
u"KAT Provider found result " + title + " but that doesn't seem like a verified result so I'm ignoring it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
#Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent
|
||||
|
@ -275,21 +285,22 @@ class KATProvider(generic.TorrentProvider):
|
|||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
#For each search mode sort all the items by seeders
|
||||
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
||||
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||
logger.ERROR)
|
||||
|
||||
#For each search mode sort all the items by seeders
|
||||
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
||||
|
||||
results += items[mode]
|
||||
|
||||
results += items[mode]
|
||||
|
||||
return results
|
||||
|
||||
def _get_title_and_url(self, item):
|
||||
|
||||
|
||||
title, url, id, seeders, leechers = item
|
||||
|
||||
|
||||
if url:
|
||||
url = url.replace('&','&')
|
||||
url = url.replace('&', '&')
|
||||
|
||||
return (title, url)
|
||||
|
||||
|
@ -298,53 +309,55 @@ class KATProvider(generic.TorrentProvider):
|
|||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
r = requests.get(url)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
|
||||
if r.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
|
||||
return r.content
|
||||
|
||||
def downloadResult(self, result):
|
||||
"""
|
||||
Save the result to disk.
|
||||
"""
|
||||
|
||||
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
|
||||
|
||||
|
||||
if not torrent_hash:
|
||||
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
|
||||
return False
|
||||
|
||||
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
|
||||
return False
|
||||
|
||||
try:
|
||||
r = requests.get('http://torcache.net/torrent/' + torrent_hash + '.torrent')
|
||||
except Exception, e:
|
||||
logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
if not r.status_code == 200:
|
||||
return False
|
||||
|
||||
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||
|
||||
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
|
||||
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||
magnetFileContent = r.content
|
||||
|
||||
try:
|
||||
try:
|
||||
with open(magnetFileName, 'wb') as fileOut:
|
||||
fileOut.write(magnetFileContent)
|
||||
|
||||
|
||||
helpers.chmodAsParent(magnetFileName)
|
||||
|
||||
|
||||
except EnvironmentError, e:
|
||||
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE)
|
||||
return True
|
||||
|
||||
|
@ -353,15 +366,16 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate, s.indexer FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
sqlResults = db.DBConnection().select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate, s.indexer FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
||||
for sqlShow in sqlResults:
|
||||
curShow = helpers.findCertainShow(sickbeard.showList, int(sqlShow["showid"]))
|
||||
curEp = curShow.getEpisode(int(sqlShow["season"]), int(sqlShow["episode"]))
|
||||
|
@ -375,7 +389,6 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
|
||||
class KATCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
@ -390,12 +403,12 @@ class KATCache(tvcache.TVCache):
|
|||
|
||||
search_params = {'RSS': ['rss']}
|
||||
rss_results = self.provider._doSearch(search_params)
|
||||
|
||||
|
||||
if rss_results:
|
||||
self.setLastUpdate()
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
|
||||
self._clearCache()
|
||||
|
||||
|
@ -420,5 +433,6 @@ class KATCache(tvcache.TVCache):
|
|||
logger.log(u"Adding item to cache: " + title, logger.DEBUG)
|
||||
|
||||
return self._addCacheEntry(title, url)
|
||||
|
||||
|
||||
|
||||
provider = KATProvider()
|
||||
|
|
|
@ -20,7 +20,7 @@ import os
|
|||
import re
|
||||
import sys
|
||||
import time
|
||||
import urllib,urlparse
|
||||
import urllib, urlparse
|
||||
|
||||
from xml.dom.minidom import parseString
|
||||
from datetime import datetime, timedelta
|
||||
|
@ -35,8 +35,8 @@ from sickbeard.common import Quality
|
|||
from sickbeard.exceptions import ex
|
||||
from lib.dateutil.parser import parse as parseDate
|
||||
|
||||
class NewzbinDownloader(urllib.FancyURLopener):
|
||||
|
||||
class NewzbinDownloader(urllib.FancyURLopener):
|
||||
def __init__(self):
|
||||
urllib.FancyURLopener.__init__(self)
|
||||
|
||||
|
@ -63,8 +63,8 @@ class NewzbinDownloader(urllib.FancyURLopener):
|
|||
|
||||
raise exceptions.NewzbinAPIThrottled()
|
||||
|
||||
class NewzbinProvider(generic.NZBProvider):
|
||||
|
||||
class NewzbinProvider(generic.NZBProvider):
|
||||
def __init__(self):
|
||||
|
||||
generic.NZBProvider.__init__(self, "Newzbin")
|
||||
|
@ -92,7 +92,7 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
else:
|
||||
attr_dict[cur_attr].append(cur_attr_value)
|
||||
|
||||
logger.log("Finding quality of item based on attributes "+str(attr_dict), logger.DEBUG)
|
||||
logger.log("Finding quality of item based on attributes " + str(attr_dict), logger.DEBUG)
|
||||
|
||||
if self._is_SDTV(attr_dict):
|
||||
quality = Quality.SDTV
|
||||
|
@ -109,17 +109,18 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
else:
|
||||
quality = Quality.UNKNOWN
|
||||
|
||||
logger.log("Resulting quality: "+str(quality), logger.DEBUG)
|
||||
logger.log("Resulting quality: " + str(quality), logger.DEBUG)
|
||||
|
||||
return quality
|
||||
|
||||
def _is_SDTV(self, attrs):
|
||||
|
||||
# Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
|
||||
video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
|
||||
and ('720p' not in attrs['Video Fmt']) \
|
||||
and ('1080p' not in attrs['Video Fmt']) \
|
||||
and ('1080i' not in attrs['Video Fmt'])
|
||||
video_fmt = 'Video Fmt' in attrs and (
|
||||
'XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
|
||||
and ('720p' not in attrs['Video Fmt']) \
|
||||
and ('1080p' not in attrs['Video Fmt']) \
|
||||
and ('1080i' not in attrs['Video Fmt'])
|
||||
|
||||
# Source: TV Cap or HDTV or (None)
|
||||
source = 'Source' not in attrs or 'TV Cap' in attrs['Source'] or 'HDTV' in attrs['Source']
|
||||
|
@ -132,11 +133,12 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
def _is_SDDVD(self, attrs):
|
||||
|
||||
# Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
|
||||
video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
|
||||
and ('720p' not in attrs['Video Fmt']) \
|
||||
and ('1080p' not in attrs['Video Fmt']) \
|
||||
and ('1080i' not in attrs['Video Fmt'])
|
||||
|
||||
video_fmt = 'Video Fmt' in attrs and (
|
||||
'XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
|
||||
and ('720p' not in attrs['Video Fmt']) \
|
||||
and ('1080p' not in attrs['Video Fmt']) \
|
||||
and ('1080i' not in attrs['Video Fmt'])
|
||||
|
||||
# Source: DVD
|
||||
source = 'Source' in attrs and 'DVD' in attrs['Source']
|
||||
|
||||
|
@ -148,7 +150,7 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
def _is_HDTV(self, attrs):
|
||||
# Video Fmt: H.264/x264, 720p
|
||||
video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
|
||||
and ('720p' in attrs['Video Fmt'])
|
||||
and ('720p' in attrs['Video Fmt'])
|
||||
|
||||
# Source: TV Cap or HDTV or (None)
|
||||
source = 'Source' not in attrs or 'TV Cap' in attrs['Source'] or 'HDTV' in attrs['Source']
|
||||
|
@ -162,7 +164,7 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
|
||||
# Video Fmt: H.264/x264, 720p
|
||||
video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
|
||||
and ('720p' in attrs['Video Fmt'])
|
||||
and ('720p' in attrs['Video Fmt'])
|
||||
|
||||
# Source: WEB-DL
|
||||
source = 'Source' in attrs and 'WEB-DL' in attrs['Source']
|
||||
|
@ -176,7 +178,7 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
|
||||
# Video Fmt: H.264/x264, 720p
|
||||
video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
|
||||
and ('720p' in attrs['Video Fmt'])
|
||||
and ('720p' in attrs['Video Fmt'])
|
||||
|
||||
# Source: Blu-ray or HD-DVD
|
||||
source = 'Source' in attrs and ('Blu-ray' in attrs['Source'] or 'HD-DVD' in attrs['Source'])
|
||||
|
@ -187,7 +189,7 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
|
||||
# Video Fmt: H.264/x264, 1080p
|
||||
video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \
|
||||
and ('1080p' in attrs['Video Fmt'])
|
||||
and ('1080p' in attrs['Video Fmt'])
|
||||
|
||||
# Source: Blu-ray or HD-DVD
|
||||
source = 'Source' in attrs and ('Blu-ray' in attrs['Source'] or 'HD-DVD' in attrs['Source'])
|
||||
|
@ -207,19 +209,20 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
|
||||
id = self.getIDFromURL(nzb.url)
|
||||
if not id:
|
||||
logger.log("Unable to get an ID from "+str(nzb.url)+", can't download from Newzbin's API", logger.ERROR)
|
||||
logger.log("Unable to get an ID from " + str(nzb.url) + ", can't download from Newzbin's API", logger.ERROR)
|
||||
return False
|
||||
|
||||
logger.log("Downloading an NZB from newzbin with id "+id)
|
||||
logger.log("Downloading an NZB from newzbin with id " + id)
|
||||
|
||||
fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.name)+'.nzb')
|
||||
fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.name) + '.nzb')
|
||||
logger.log("Saving to " + fileName)
|
||||
|
||||
urllib._urlopener = NewzbinDownloader()
|
||||
|
||||
params = urllib.urlencode({"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": id})
|
||||
params = urllib.urlencode(
|
||||
{"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": id})
|
||||
try:
|
||||
urllib.urlretrieve(self.url+"api/dnzb/", fileName, data=params)
|
||||
urllib.urlretrieve(self.url + "api/dnzb/", fileName, data=params)
|
||||
except exceptions.NewzbinAPIThrottled:
|
||||
logger.log("Done waiting for Newzbin API throttle limit, starting downloads again")
|
||||
self.downloadResult(nzb)
|
||||
|
@ -235,7 +238,7 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
f = myOpener.openit(url)
|
||||
|
@ -256,36 +259,36 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
suffix = ''
|
||||
else:
|
||||
suffix = 'x'
|
||||
searchTerms = ['^"'+x+' - '+str(season)+suffix+'"' for x in nameList]
|
||||
searchTerms = ['^"' + x + ' - ' + str(season) + suffix + '"' for x in nameList]
|
||||
#searchTerms += ['^"'+x+' - Season '+str(season)+'"' for x in nameList]
|
||||
searchStr = " OR ".join(searchTerms)
|
||||
|
||||
searchStr += " -subpack -extras"
|
||||
|
||||
logger.log("Searching newzbin for string "+searchStr, logger.DEBUG)
|
||||
|
||||
logger.log("Searching newzbin for string " + searchStr, logger.DEBUG)
|
||||
|
||||
return [searchStr]
|
||||
|
||||
def _get_episode_search_strings(self, ep_obj):
|
||||
|
||||
nameList = set(show_name_helpers.allPossibleShowNames(ep_obj.show))
|
||||
if not ep_obj.show.air_by_date:
|
||||
searchStr = " OR ".join(['^"'+x+' - %dx%02d"'%(ep_obj.season, ep_obj.episode) for x in nameList])
|
||||
searchStr = " OR ".join(['^"' + x + ' - %dx%02d"' % (ep_obj.season, ep_obj.episode) for x in nameList])
|
||||
else:
|
||||
searchStr = " OR ".join(['^"'+x+' - '+str(ep_obj.airdate)+'"' for x in nameList])
|
||||
searchStr = " OR ".join(['^"' + x + ' - ' + str(ep_obj.airdate) + '"' for x in nameList])
|
||||
return [searchStr]
|
||||
|
||||
def _doSearch(self, searchStr, show=None):
|
||||
|
||||
data = self._getRSSData(searchStr.encode('utf-8'))
|
||||
|
||||
|
||||
item_list = []
|
||||
|
||||
try:
|
||||
parsedXML = parseString(data)
|
||||
items = parsedXML.getElementsByTagName('item')
|
||||
except Exception, e:
|
||||
logger.log("Error trying to load Newzbin RSS feed: "+ex(e), logger.ERROR)
|
||||
logger.log("Error trying to load Newzbin RSS feed: " + ex(e), logger.ERROR)
|
||||
return []
|
||||
|
||||
for cur_item in items:
|
||||
|
@ -301,7 +304,7 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
post_date = parseDate(dateString).replace(tzinfo=None)
|
||||
retention_date = datetime.now() - timedelta(days=sickbeard.USENET_RETENTION)
|
||||
if post_date < retention_date:
|
||||
logger.log(u"Date "+str(post_date)+" is out of retention range, skipping", logger.DEBUG)
|
||||
logger.log(u"Date " + str(post_date) + " is out of retention range, skipping", logger.DEBUG)
|
||||
continue
|
||||
except Exception, e:
|
||||
logger.log("Error parsing date from Newzbin RSS feed: " + str(e), logger.ERROR)
|
||||
|
@ -315,21 +318,21 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
def _getRSSData(self, search=None):
|
||||
|
||||
params = {
|
||||
'searchaction': 'Search',
|
||||
'fpn': 'p',
|
||||
'category': 8,
|
||||
'u_nfo_posts_only': 0,
|
||||
'u_url_posts_only': 0,
|
||||
'u_comment_posts_only': 0,
|
||||
'u_show_passworded': 0,
|
||||
'u_v3_retention': 0,
|
||||
'ps_rb_video_format': 3082257,
|
||||
'ps_rb_language': 4096,
|
||||
'sort': 'date',
|
||||
'order': 'desc',
|
||||
'u_post_results_amt': 50,
|
||||
'feed': 'rss',
|
||||
'hauth': 1,
|
||||
'searchaction': 'Search',
|
||||
'fpn': 'p',
|
||||
'category': 8,
|
||||
'u_nfo_posts_only': 0,
|
||||
'u_url_posts_only': 0,
|
||||
'u_comment_posts_only': 0,
|
||||
'u_show_passworded': 0,
|
||||
'u_v3_retention': 0,
|
||||
'ps_rb_video_format': 3082257,
|
||||
'ps_rb_language': 4096,
|
||||
'sort': 'date',
|
||||
'order': 'desc',
|
||||
'u_post_results_amt': 50,
|
||||
'feed': 'rss',
|
||||
'hauth': 1,
|
||||
}
|
||||
|
||||
if search:
|
||||
|
@ -350,8 +353,8 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
if sickbeard.NEWZBIN_USERNAME in (None, "") or sickbeard.NEWZBIN_PASSWORD in (None, ""):
|
||||
raise exceptions.AuthException("Newzbin authentication details are empty, check your config")
|
||||
|
||||
class NewzbinCache(tvcache.TVCache):
|
||||
|
||||
class NewzbinCache(tvcache.TVCache):
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
@ -374,14 +377,16 @@ class NewzbinCache(tvcache.TVCache):
|
|||
raise exceptions.AuthException("Invalid Newzbin username/password")
|
||||
|
||||
if not title or not url:
|
||||
logger.log("The XML returned from the "+self.provider.name+" feed is incomplete, this result is unusable", logger.ERROR)
|
||||
logger.log(
|
||||
"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
|
||||
logger.ERROR)
|
||||
return
|
||||
|
||||
quality = self.provider.getQuality(item)
|
||||
|
||||
logger.log("Found quality "+str(quality), logger.DEBUG)
|
||||
logger.log("Found quality " + str(quality), logger.DEBUG)
|
||||
|
||||
logger.log("Adding item from RSS to cache: "+title, logger.DEBUG)
|
||||
logger.log("Adding item from RSS to cache: " + title, logger.DEBUG)
|
||||
|
||||
self._addCacheEntry(title, url, quality=quality)
|
||||
|
||||
|
|
|
@ -42,7 +42,6 @@ from sickbeard.exceptions import ex, AuthException
|
|||
|
||||
|
||||
class NewznabProvider(generic.NZBProvider):
|
||||
|
||||
def __init__(self, name, url, key='', catIDs='5030,5040,5060'):
|
||||
|
||||
generic.NZBProvider.__init__(self, name)
|
||||
|
@ -73,7 +72,8 @@ class NewznabProvider(generic.NZBProvider):
|
|||
return self.name + '|' + self.url + '|' + self.key + '|' + self.catIDs + '|' + str(int(self.enabled))
|
||||
|
||||
def imageName(self):
|
||||
if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')):
|
||||
if ek.ek(os.path.isfile,
|
||||
ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')):
|
||||
return self.getID() + '.png'
|
||||
return 'newznab.png'
|
||||
|
||||
|
@ -155,7 +155,8 @@ class NewznabProvider(generic.NZBProvider):
|
|||
def _checkAuth(self):
|
||||
|
||||
if self.needs_auth and not self.key:
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + "API key is missing", logger.DEBUG)
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + "API key is missing",
|
||||
logger.DEBUG)
|
||||
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
|
||||
|
||||
return True
|
||||
|
@ -173,9 +174,11 @@ class NewznabProvider(generic.NZBProvider):
|
|||
elif code == '101':
|
||||
raise AuthException("Your account on " + self.name + " has been suspended, contact the administrator.")
|
||||
elif code == '102':
|
||||
raise AuthException("Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
|
||||
raise AuthException(
|
||||
"Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
|
||||
else:
|
||||
logger.log(u"Unknown error given from " + self.name + ": " + parsedXML.attrib['description'], logger.ERROR)
|
||||
logger.log(u"Unknown error given from " + self.name + ": " + parsedXML.attrib['description'],
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -237,7 +240,9 @@ class NewznabProvider(generic.NZBProvider):
|
|||
logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG)
|
||||
results.append(curItem)
|
||||
else:
|
||||
logger.log(u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable", logger.DEBUG)
|
||||
logger.log(
|
||||
u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
return results
|
||||
|
||||
|
@ -248,7 +253,8 @@ class NewznabProvider(generic.NZBProvider):
|
|||
search_terms = ['.proper.', '.repack.']
|
||||
|
||||
cache_results = self.cache.listPropers(search_date)
|
||||
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time'])) for x in cache_results]
|
||||
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time'])) for x in
|
||||
cache_results]
|
||||
|
||||
for term in search_terms:
|
||||
for item in self._doSearch({'q': term}, max_age=4):
|
||||
|
@ -260,7 +266,8 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
try:
|
||||
# we could probably do dateStr = descriptionStr but we want date in this format
|
||||
date_text = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', description_text).group(1)
|
||||
date_text = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}',
|
||||
description_text).group(1)
|
||||
except:
|
||||
date_text = None
|
||||
|
||||
|
@ -281,7 +288,6 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
|
||||
class NewznabCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
@ -314,4 +320,4 @@ class NewznabCache(tvcache.TVCache):
|
|||
return data
|
||||
|
||||
def _checkAuth(self, parsedXML):
|
||||
return self.provider._checkAuthFromData(parsedXML)
|
||||
return self.provider._checkAuthFromData(parsedXML)
|
||||
|
|
|
@ -78,7 +78,7 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
return {
|
||||
'username': sickbeard.NEXTGEN_USERNAME,
|
||||
'password': sickbeard.NEXTGEN_PASSWORD,
|
||||
}
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
if "<title>NextGen - Login</title>" in output:
|
||||
|
@ -107,12 +107,13 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
try:
|
||||
login_params = self.getLoginParams()
|
||||
self.session = requests.Session()
|
||||
self.session.headers.update({'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20130519 Firefox/24.0)'})
|
||||
self.session.headers.update(
|
||||
{'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20130519 Firefox/24.0)'})
|
||||
data = self.session.get(self.urls['login_page'])
|
||||
bs = BeautifulSoup(data.content.decode('iso-8859-1'))
|
||||
csrfraw = bs.find('form', attrs = {'id': 'login'})['action']
|
||||
output = self.session.post(self.urls['base_url']+csrfraw, data=login_params)
|
||||
|
||||
csrfraw = bs.find('form', attrs={'id': 'login'})['action']
|
||||
output = self.session.post(self.urls['base_url'] + csrfraw, data=login_params)
|
||||
|
||||
if self.loginSuccess(output):
|
||||
self.last_login_check = now
|
||||
self.login_opener = self.session
|
||||
|
@ -138,7 +139,7 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
if searchSeason:
|
||||
search_string = {'Season': [], 'Episode': []}
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
||||
ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX
|
||||
ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX
|
||||
search_string['Season'].append(ep_string)
|
||||
|
||||
for ep_obj in wantedEp:
|
||||
|
@ -158,8 +159,8 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
|
||||
if ep_obj.show.air_by_date:
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
||||
str(ep_obj.airdate) +'|'+\
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||
str(ep_obj.airdate) + '|' + \
|
||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||
search_string['Episode'].append(ep_string)
|
||||
else:
|
||||
|
@ -193,16 +194,17 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
html = BeautifulSoup(data.decode('iso-8859-1'), features=["html5lib", "permissive"])
|
||||
resultsTable = html.find('div', attrs = {'id' : 'torrent-table-wrapper'})
|
||||
resultsTable = html.find('div', attrs={'id': 'torrent-table-wrapper'})
|
||||
|
||||
if not resultsTable:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG)
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
# Collecting entries
|
||||
entries_std = html.find_all('div' , attrs = {'id' : 'torrent-std'})
|
||||
entries_sticky = html.find_all('div' , attrs = {'id' : 'torrent-sticky'})
|
||||
|
||||
entries_std = html.find_all('div', attrs={'id': 'torrent-std'})
|
||||
entries_sticky = html.find_all('div', attrs={'id': 'torrent-sticky'})
|
||||
|
||||
entries = entries_std + entries_sticky
|
||||
|
||||
#Xirg STANDARD TORRENTS
|
||||
|
@ -210,10 +212,13 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
if len(entries) > 0:
|
||||
|
||||
for result in entries:
|
||||
|
||||
|
||||
try:
|
||||
torrentName = ((result.find('div', attrs = {'id' :'torrent-udgivelse2-users'})).find('a'))['title']
|
||||
torrentId = (((result.find('div', attrs = {'id' :'torrent-download'})).find('a'))['href']).replace('download.php?id=','')
|
||||
torrentName = \
|
||||
((result.find('div', attrs={'id': 'torrent-udgivelse2-users'})).find('a'))['title']
|
||||
torrentId = (
|
||||
((result.find('div', attrs={'id': 'torrent-download'})).find('a'))['href']).replace(
|
||||
'download.php?id=', '')
|
||||
torrent_name = str(torrentName)
|
||||
torrent_download_url = (self.urls['download'] % torrentId).encode('utf8')
|
||||
torrent_details_url = (self.urls['detail'] % torrentId).encode('utf8')
|
||||
|
@ -223,25 +228,28 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
#torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
|
||||
except (AttributeError, TypeError):
|
||||
continue
|
||||
|
||||
|
||||
# Filter unseeded torrent and torrents with no name/url
|
||||
#if mode != 'RSS' and torrent_seeders == 0:
|
||||
# continue
|
||||
|
||||
|
||||
if not torrent_name or not torrent_download_url:
|
||||
continue
|
||||
|
||||
|
||||
item = torrent_name, torrent_download_url
|
||||
logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")", logger.DEBUG)
|
||||
logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")",
|
||||
logger.DEBUG)
|
||||
items[mode].append(item)
|
||||
|
||||
|
||||
else:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.WARNING)
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||
logger.ERROR)
|
||||
|
||||
results += items[mode]
|
||||
|
||||
|
@ -267,7 +275,7 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
response = self.session.get(url)
|
||||
|
@ -276,7 +284,8 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
@ -285,12 +294,13 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
sqlResults = db.DBConnection().select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
|
|
@ -36,14 +36,14 @@ from sickbeard import tvcache
|
|||
|
||||
REMOTE_DBG = False
|
||||
|
||||
class NyaaProvider(generic.TorrentProvider):
|
||||
|
||||
class NyaaProvider(generic.TorrentProvider):
|
||||
def __init__(self):
|
||||
|
||||
generic.TorrentProvider.__init__(self, "NyaaTorrents")
|
||||
|
||||
|
||||
self.supportsBacklog = True
|
||||
|
||||
|
||||
self.supportsAbsoluteNumbering = True
|
||||
|
||||
self.cache = NyaaCache(self)
|
||||
|
@ -52,22 +52,23 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
|
||||
def isEnabled(self):
|
||||
return sickbeard.NYAA
|
||||
|
||||
|
||||
def imageName(self):
|
||||
return 'nyaatorrents.png'
|
||||
|
||||
|
||||
def getQuality(self, item, anime=False):
|
||||
self.debug()
|
||||
title = helpers.get_xml_text(item.getElementsByTagName('title')[0]).replace("/"," ")
|
||||
title = helpers.get_xml_text(item.getElementsByTagName('title')[0]).replace("/", " ")
|
||||
quality = Quality.sceneQuality(title, anime)
|
||||
return quality
|
||||
|
||||
return quality
|
||||
|
||||
def findSeasonResults(self, show, season):
|
||||
results = {}
|
||||
|
||||
|
||||
results = generic.TorrentProvider.findSeasonResults(self, show, season)
|
||||
|
||||
|
||||
return results
|
||||
|
||||
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
||||
names = []
|
||||
names.extend(show_name_helpers.makeSceneShowSearchStrings(show))
|
||||
|
@ -77,12 +78,12 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
return self._get_season_search_strings(ep_obj.show, ep_obj.season)
|
||||
|
||||
def _doSearch(self, search_string, show=None):
|
||||
|
||||
params = {"term" : search_string.encode('utf-8'),
|
||||
"sort" : '2', #Sort Descending By Seeders
|
||||
}
|
||||
|
||||
searchURL = self.url+'?page=rss&'+urllib.urlencode(params)
|
||||
|
||||
params = {"term": search_string.encode('utf-8'),
|
||||
"sort": '2', #Sort Descending By Seeders
|
||||
}
|
||||
|
||||
searchURL = self.url + '?page=rss&' + urllib.urlencode(params)
|
||||
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
|
||||
|
@ -90,42 +91,44 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
|
||||
if not data:
|
||||
return []
|
||||
|
||||
|
||||
try:
|
||||
parsedXML = parseString(data)
|
||||
items = parsedXML.getElementsByTagName('item')
|
||||
except Exception, e:
|
||||
logger.log(u"Error trying to load NyaaTorrents RSS feed: "+ex(e), logger.ERROR)
|
||||
logger.log(u"RSS data: "+data, logger.DEBUG)
|
||||
logger.log(u"Error trying to load NyaaTorrents RSS feed: " + ex(e), logger.ERROR)
|
||||
logger.log(u"RSS data: " + data, logger.DEBUG)
|
||||
return []
|
||||
|
||||
|
||||
results = []
|
||||
|
||||
for curItem in items:
|
||||
|
||||
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
|
||||
|
||||
if not title or not url:
|
||||
logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable: "+data, logger.ERROR)
|
||||
logger.log(
|
||||
u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable: " + data,
|
||||
logger.ERROR)
|
||||
continue
|
||||
|
||||
|
||||
results.append(curItem)
|
||||
|
||||
|
||||
return results
|
||||
|
||||
def _get_title_and_url(self, item):
|
||||
|
||||
return generic.TorrentProvider._get_title_and_url(self, item)
|
||||
|
||||
def findEpisode (self, episode, manualSearch=False):
|
||||
def findEpisode(self, episode, manualSearch=False):
|
||||
|
||||
self._checkAuth()
|
||||
|
||||
logger.log(u"Searching "+self.name+" for " + episode.prettyName())
|
||||
logger.log(u"Searching " + self.name + " for " + episode.prettyName())
|
||||
|
||||
self.cache.updateCache()
|
||||
results = self.cache.searchCache(episode, manualSearch)
|
||||
logger.log(u"Cache results: "+str(results), logger.DEBUG)
|
||||
logger.log(u"Cache results: " + str(results), logger.DEBUG)
|
||||
|
||||
# if we got some results then use them no matter what.
|
||||
# OR
|
||||
|
@ -147,25 +150,31 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
myParser = NameParser(show=episode.show)
|
||||
parse_result = myParser.parse(title)
|
||||
except InvalidNameException:
|
||||
logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING)
|
||||
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
|
||||
continue
|
||||
|
||||
if episode.show.air_by_date:
|
||||
if parse_result.air_date != episode.airdate:
|
||||
logger.log("Episode "+title+" didn't air on "+str(episode.airdate)+", skipping it", logger.DEBUG)
|
||||
logger.log("Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
elif episode.show.anime and episode.show.absolute_numbering:
|
||||
if episode.absolute_number not in parse_result.ab_episode_numbers:
|
||||
logger.log("Episode "+title+" isn't "+str(episode.absolute_number)+", skipping it", logger.DEBUG)
|
||||
logger.log("Episode " + title + " isn't " + str(episode.absolute_number) + ", skipping it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
|
||||
logger.log("Episode "+title+" isn't "+str(episode.season)+"x"+str(episode.episode)+", skipping it", logger.DEBUG)
|
||||
logger.log(
|
||||
"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
quality = self.getQuality(item, episode.show.anime)
|
||||
|
||||
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
|
||||
logger.log(u"Ignoring result "+title+" because we don't want an episode that is "+Quality.qualityStrings[quality], logger.DEBUG)
|
||||
logger.log(
|
||||
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
|
||||
quality], logger.DEBUG)
|
||||
continue
|
||||
|
||||
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
||||
|
@ -181,17 +190,15 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
|
||||
def _extract_name_from_filename(self, filename):
|
||||
name_regex = '(.*?)\.?(\[.*]|\d+\.TPB)\.torrent$'
|
||||
logger.log(u"Comparing "+name_regex+" against "+filename, logger.DEBUG)
|
||||
logger.log(u"Comparing " + name_regex + " against " + filename, logger.DEBUG)
|
||||
match = re.match(name_regex, filename, re.I)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
|
||||
|
||||
|
||||
class NyaaCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
||||
# only poll NyaaTorrents every 15 minutes max
|
||||
|
@ -199,30 +206,30 @@ class NyaaCache(tvcache.TVCache):
|
|||
|
||||
|
||||
def _getRSSData(self):
|
||||
|
||||
params = {
|
||||
"page" : 'rss', # Use RSS page
|
||||
"order" : '1' #Sort Descending By Date
|
||||
}
|
||||
|
||||
url = self.provider.url + '?' + urllib.urlencode(params)
|
||||
"page": 'rss', # Use RSS page
|
||||
"order": '1' #Sort Descending By Date
|
||||
}
|
||||
|
||||
logger.log(u"NyaaTorrents cache update URL: "+ url, logger.DEBUG)
|
||||
url = self.provider.url + '?' + urllib.urlencode(params)
|
||||
|
||||
logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG)
|
||||
|
||||
data = self.provider.getURL(url)
|
||||
|
||||
return data
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = self.provider._get_title_and_url(item)
|
||||
|
||||
if not title or not url:
|
||||
logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable", logger.ERROR)
|
||||
logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable",
|
||||
logger.ERROR)
|
||||
return None
|
||||
|
||||
logger.log(u"Adding item from RSS to cache: "+title, logger.DEBUG)
|
||||
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
|
||||
|
||||
return self._addCacheEntry(title, url)
|
||||
|
||||
|
||||
provider = NyaaProvider()
|
|
@ -34,131 +34,135 @@ from sickbeard import exceptions, logger
|
|||
from sickbeard import tvcache
|
||||
from sickbeard.exceptions import ex
|
||||
|
||||
|
||||
class NZBsProvider(generic.NZBProvider):
|
||||
def __init__(self):
|
||||
|
||||
def __init__(self):
|
||||
generic.NZBProvider.__init__(self, "NZBs.org Old")
|
||||
|
||||
generic.NZBProvider.__init__(self, "NZBs.org Old")
|
||||
self.supportsBacklog = True
|
||||
|
||||
self.supportsBacklog = True
|
||||
self.cache = NZBsCache(self)
|
||||
|
||||
self.cache = NZBsCache(self)
|
||||
self.url = 'https://secure.nzbs.org/'
|
||||
|
||||
self.url = 'https://secure.nzbs.org/'
|
||||
def isEnabled(self):
|
||||
return sickbeard.NZBS
|
||||
|
||||
def isEnabled(self):
|
||||
return sickbeard.NZBS
|
||||
def _checkAuth(self):
|
||||
if sickbeard.NZBS_UID in (None, "") or sickbeard.NZBS_HASH in (None, ""):
|
||||
raise exceptions.AuthException("NZBs.org authentication details are empty, check your config")
|
||||
|
||||
def _checkAuth(self):
|
||||
if sickbeard.NZBS_UID in (None, "") or sickbeard.NZBS_HASH in (None, ""):
|
||||
raise exceptions.AuthException("NZBs.org authentication details are empty, check your config")
|
||||
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
||||
return ['^' + x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)]
|
||||
|
||||
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
||||
return ['^'+x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)]
|
||||
def _get_episode_search_strings(self, ep_obj):
|
||||
return ['^' + x for x in show_name_helpers.makeSceneSearchString(ep_obj)]
|
||||
|
||||
def _get_episode_search_strings(self, ep_obj):
|
||||
return ['^'+x for x in show_name_helpers.makeSceneSearchString(ep_obj)]
|
||||
def _doSearch(self, curString, show=None):
|
||||
|
||||
def _doSearch(self, curString, show=None):
|
||||
curString = curString.replace('.', ' ')
|
||||
|
||||
curString = curString.replace('.', ' ')
|
||||
params = {"action": "search",
|
||||
"q": curString.encode('utf-8'),
|
||||
"dl": 1,
|
||||
"i": sickbeard.NZBS_UID,
|
||||
"h": sickbeard.NZBS_HASH,
|
||||
"age": sickbeard.USENET_RETENTION,
|
||||
"num": 100,
|
||||
"type": 1}
|
||||
|
||||
params = {"action": "search",
|
||||
"q": curString.encode('utf-8'),
|
||||
"dl": 1,
|
||||
"i": sickbeard.NZBS_UID,
|
||||
"h": sickbeard.NZBS_HASH,
|
||||
"age": sickbeard.USENET_RETENTION,
|
||||
"num": 100,
|
||||
"type": 1}
|
||||
searchURL = self.url + "rss.php?" + urllib.urlencode(params)
|
||||
|
||||
searchURL = self.url + "rss.php?" + urllib.urlencode(params)
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
data = self.getURL(searchURL)
|
||||
|
||||
data = self.getURL(searchURL)
|
||||
# Pause to avoid 503's
|
||||
time.sleep(5)
|
||||
|
||||
# Pause to avoid 503's
|
||||
time.sleep(5)
|
||||
if data == None:
|
||||
return []
|
||||
|
||||
if data == None:
|
||||
return []
|
||||
try:
|
||||
parsedXML = parseString(data)
|
||||
items = parsedXML.getElementsByTagName('item')
|
||||
except Exception, e:
|
||||
logger.log(u"Error trying to load NZBs.org RSS feed: " + ex(e), logger.ERROR)
|
||||
return []
|
||||
|
||||
try:
|
||||
parsedXML = parseString(data)
|
||||
items = parsedXML.getElementsByTagName('item')
|
||||
except Exception, e:
|
||||
logger.log(u"Error trying to load NZBs.org RSS feed: "+ex(e), logger.ERROR)
|
||||
return []
|
||||
results = []
|
||||
|
||||
results = []
|
||||
for curItem in items:
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
|
||||
for curItem in items:
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
if not title or not url:
|
||||
logger.log(
|
||||
u"The XML returned from the NZBs.org RSS feed is incomplete, this result is unusable: " + data,
|
||||
logger.ERROR)
|
||||
continue
|
||||
|
||||
if not title or not url:
|
||||
logger.log(u"The XML returned from the NZBs.org RSS feed is incomplete, this result is unusable: "+data, logger.ERROR)
|
||||
continue
|
||||
if "&i=" not in url and "&h=" not in url:
|
||||
raise exceptions.AuthException(
|
||||
"The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config")
|
||||
|
||||
if "&i=" not in url and "&h=" not in url:
|
||||
raise exceptions.AuthException("The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config")
|
||||
results.append(curItem)
|
||||
|
||||
results.append(curItem)
|
||||
return results
|
||||
|
||||
return results
|
||||
def findPropers(self, date=None):
|
||||
|
||||
def findPropers(self, date=None):
|
||||
results = []
|
||||
|
||||
results = []
|
||||
for curString in (".PROPER.", ".REPACK."):
|
||||
|
||||
for curString in (".PROPER.", ".REPACK."):
|
||||
for curResult in self._doSearch(curString):
|
||||
|
||||
for curResult in self._doSearch(curString):
|
||||
(title, url) = self._get_title_and_url(curResult)
|
||||
|
||||
(title, url) = self._get_title_and_url(curResult)
|
||||
pubDate_node = curResult.getElementsByTagName('pubDate')[0]
|
||||
pubDate = helpers.get_xml_text(pubDate_node)
|
||||
|
||||
pubDate_node = curResult.getElementsByTagName('pubDate')[0]
|
||||
pubDate = helpers.get_xml_text(pubDate_node)
|
||||
match = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', pubDate)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
match = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', pubDate)
|
||||
if not match:
|
||||
continue
|
||||
resultDate = datetime.datetime.strptime(match.group(1), "%a, %d %b %Y %H:%M:%S")
|
||||
|
||||
resultDate = datetime.datetime.strptime(match.group(1), "%a, %d %b %Y %H:%M:%S")
|
||||
if date == None or resultDate > date:
|
||||
results.append(classes.Proper(title, url, resultDate))
|
||||
|
||||
if date == None or resultDate > date:
|
||||
results.append(classes.Proper(title, url, resultDate))
|
||||
return results
|
||||
|
||||
return results
|
||||
|
||||
class NZBsCache(tvcache.TVCache):
|
||||
def __init__(self, provider):
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
||||
def __init__(self, provider):
|
||||
# only poll NZBs.org every 15 minutes max
|
||||
self.minTime = 15
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
def _getRSSData(self):
|
||||
url = self.provider.url + 'rss.php?'
|
||||
urlArgs = {'type': 1,
|
||||
'dl': 1,
|
||||
'num': 100,
|
||||
'i': sickbeard.NZBS_UID,
|
||||
'h': sickbeard.NZBS_HASH,
|
||||
'age': sickbeard.USENET_RETENTION}
|
||||
|
||||
# only poll NZBs.org every 15 minutes max
|
||||
self.minTime = 15
|
||||
url += urllib.urlencode(urlArgs)
|
||||
|
||||
def _getRSSData(self):
|
||||
url = self.provider.url + 'rss.php?'
|
||||
urlArgs = {'type': 1,
|
||||
'dl': 1,
|
||||
'num': 100,
|
||||
'i': sickbeard.NZBS_UID,
|
||||
'h': sickbeard.NZBS_HASH,
|
||||
'age': sickbeard.USENET_RETENTION}
|
||||
logger.log(u"NZBs cache update URL: " + url, logger.DEBUG)
|
||||
|
||||
url += urllib.urlencode(urlArgs)
|
||||
data = self.provider.getURL(url)
|
||||
|
||||
logger.log(u"NZBs cache update URL: "+ url, logger.DEBUG)
|
||||
return data
|
||||
|
||||
data = self.provider.getURL(url)
|
||||
def _checkItemAuth(self, title, url):
|
||||
if "&i=" not in url and "&h=" not in url:
|
||||
raise exceptions.AuthException(
|
||||
"The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config")
|
||||
|
||||
return data
|
||||
|
||||
def _checkItemAuth(self, title, url):
|
||||
if "&i=" not in url and "&h=" not in url:
|
||||
raise exceptions.AuthException("The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config")
|
||||
|
||||
provider = NZBsProvider()
|
|
@ -30,7 +30,6 @@ from sickbeard import tvcache, show_name_helpers
|
|||
|
||||
|
||||
class NZBsRUSProvider(generic.NZBProvider):
|
||||
|
||||
def __init__(self):
|
||||
generic.NZBProvider.__init__(self, "NZBs'R'US")
|
||||
self.cache = NZBsRUSCache(self)
|
||||
|
@ -55,12 +54,12 @@ class NZBsRUSProvider(generic.NZBProvider):
|
|||
'key': sickbeard.NZBSRUS_HASH,
|
||||
'xml': 1,
|
||||
'age': sickbeard.USENET_RETENTION,
|
||||
'lang0': 1, # English only from CouchPotato
|
||||
'lang0': 1, # English only from CouchPotato
|
||||
'lang1': 1,
|
||||
'lang3': 1,
|
||||
'c91': 1, # TV:HD
|
||||
'c104': 1, # TV:SD-x264
|
||||
'c75': 1, # TV:XviD
|
||||
'c91': 1, # TV:HD
|
||||
'c104': 1, # TV:SD-x264
|
||||
'c75': 1, # TV:XviD
|
||||
'searchtext': search}
|
||||
|
||||
if not params['age']:
|
||||
|
@ -93,12 +92,11 @@ class NZBsRUSProvider(generic.NZBProvider):
|
|||
nzbID = element.find('id').text
|
||||
key = element.find('key').text
|
||||
url = self.url + 'nzbdownload_rss.php' + '/' + \
|
||||
nzbID + '/' + sickbeard.NZBSRUS_UID + '/' + key + '/'
|
||||
nzbID + '/' + sickbeard.NZBSRUS_UID + '/' + key + '/'
|
||||
return (title, url)
|
||||
|
||||
|
||||
class NZBsRUSCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
# only poll NZBs'R'US every 15 minutes max
|
||||
|
@ -119,4 +117,5 @@ class NZBsRUSCache(tvcache.TVCache):
|
|||
def _checkAuth(self, data):
|
||||
return data != 'Invalid Link'
|
||||
|
||||
|
||||
provider = NZBsRUSProvider()
|
||||
|
|
|
@ -40,7 +40,6 @@ except ImportError:
|
|||
|
||||
|
||||
class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||
|
||||
def __init__(self):
|
||||
generic.NZBProvider.__init__(self, "omgwtfnzbs")
|
||||
self.cache = OmgwtfnzbsCache(self)
|
||||
|
@ -52,7 +51,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
|
||||
def _checkAuth(self):
|
||||
|
||||
if not sickbeard.OMGWTFNZBS_USERNAME or not sickbeard.OMGWTFNZBS_APIKEY:
|
||||
if not sickbeard.OMGWTFNZBS_USERNAME or not sickbeard.OMGWTFNZBS_APIKEY:
|
||||
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
|
||||
|
||||
return True
|
||||
|
@ -73,8 +72,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
description_text = parsedJSON.get('notice')
|
||||
|
||||
if 'information is incorrect' in parsedJSON.get('notice'):
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text), logger.DEBUG)
|
||||
raise AuthException("Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text),
|
||||
logger.DEBUG)
|
||||
raise AuthException(
|
||||
"Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||
|
||||
elif '0 results matched your terms' in parsedJSON.get('notice'):
|
||||
return True
|
||||
|
@ -156,7 +157,6 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
|
||||
|
||||
class OmgwtfnzbsCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
self.minTime = 20
|
||||
|
@ -180,6 +180,7 @@ class OmgwtfnzbsCache(tvcache.TVCache):
|
|||
return data
|
||||
|
||||
def _checkAuth(self, parsedXML):
|
||||
return self.provider._checkAuthFromData(parsedXML)
|
||||
return self.provider._checkAuthFromData(parsedXML)
|
||||
|
||||
|
||||
provider = OmgwtfnzbsProvider()
|
||||
|
|
|
@ -43,8 +43,8 @@ from lib import requests
|
|||
from bs4 import BeautifulSoup
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
class PublicHDProvider(generic.TorrentProvider):
|
||||
|
||||
class PublicHDProvider(generic.TorrentProvider):
|
||||
def __init__(self):
|
||||
|
||||
generic.TorrentProvider.__init__(self, "PublicHD")
|
||||
|
@ -81,10 +81,10 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
if searchSeason:
|
||||
search_string = {'Season': [], 'Episode': []}
|
||||
for show_name in set(allPossibleShowNames(show)):
|
||||
ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX -SXXE
|
||||
ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX -SXXE
|
||||
search_string['Season'].append(ep_string)
|
||||
|
||||
ep_string = show_name+' Season ' + str(season) #2) ShowName Season X
|
||||
ep_string = show_name + ' Season ' + str(season) #2) ShowName Season X
|
||||
search_string['Season'].append(ep_string)
|
||||
|
||||
for ep_obj in wantedEp:
|
||||
|
@ -106,17 +106,18 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
|
||||
if ep_obj.show.air_by_date:
|
||||
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
||||
ep_string = sanitizeSceneName(show_name) +' '+ \
|
||||
str(ep_obj.airdate) +'|'+\
|
||||
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||
str(ep_obj.airdate) + '|' + \
|
||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||
search_string['Episode'].append(ep_string)
|
||||
else:
|
||||
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
||||
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode}
|
||||
|
||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season,
|
||||
'episodenumber': ep_obj.episode}
|
||||
|
||||
for x in add_string.split('|'):
|
||||
to_search = re.sub('\s+', ' ', ep_string + ' %s' %x)
|
||||
to_search = re.sub('\s+', ' ', ep_string + ' %s' % x)
|
||||
search_string['Episode'].append(to_search)
|
||||
|
||||
return [search_string]
|
||||
|
@ -130,10 +131,12 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
for search_string in search_params[mode]:
|
||||
|
||||
if mode == 'RSS':
|
||||
searchURL = self.url + 'index.php?page=torrents&active=1&category=%s' %(';'.join(self.categories[mode]))
|
||||
logger.log(u"PublicHD cache update URL: "+ searchURL, logger.DEBUG)
|
||||
searchURL = self.url + 'index.php?page=torrents&active=1&category=%s' % (
|
||||
';'.join(self.categories[mode]))
|
||||
logger.log(u"PublicHD cache update URL: " + searchURL, logger.DEBUG)
|
||||
else:
|
||||
searchURL = self.searchurl %(urllib.quote(unidecode(search_string)), ';'.join(self.categories[mode]))
|
||||
searchURL = self.searchurl % (
|
||||
urllib.quote(unidecode(search_string)), ';'.join(self.categories[mode]))
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
|
||||
html = self.getURL(searchURL)
|
||||
|
@ -143,19 +146,20 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
try:
|
||||
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
||||
|
||||
torrent_table = soup.find('table', attrs = {'id' : 'torrbg'})
|
||||
torrent_table = soup.find('table', attrs={'id': 'torrbg'})
|
||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||
|
||||
#Continue only if one Release is found
|
||||
if len(torrent_rows)<2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG)
|
||||
if len(torrent_rows) < 2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
for tr in torrent_rows[1:]:
|
||||
|
||||
try:
|
||||
link = self.url + tr.find(href=re.compile('page=torrent-details'))['href']
|
||||
title = tr.find(lambda x: x.has_attr('title')).text.replace('_','.')
|
||||
title = tr.find(lambda x: x.has_attr('title')).text.replace('_', '.')
|
||||
url = tr.find(href=re.compile('magnet+'))['href']
|
||||
seeders = int(tr.find_all('td', {'class': 'header'})[4].text)
|
||||
leechers = int(tr.find_all('td', {'class': 'header'})[5].text)
|
||||
|
@ -173,7 +177,8 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||
logger.ERROR)
|
||||
|
||||
#For each search mode sort all the items by seeders
|
||||
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
||||
|
@ -187,7 +192,7 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
title, url, id, seeders, leechers = item
|
||||
|
||||
if url:
|
||||
url = url.replace('&','&')
|
||||
url = url.replace('&', '&')
|
||||
|
||||
return (title, url)
|
||||
|
||||
|
@ -196,16 +201,17 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
r = requests.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if r.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return r.content
|
||||
|
@ -214,35 +220,36 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
"""
|
||||
Save the result to disk.
|
||||
"""
|
||||
|
||||
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
|
||||
|
||||
|
||||
if not torrent_hash:
|
||||
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
|
||||
return False
|
||||
|
||||
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
|
||||
return False
|
||||
|
||||
try:
|
||||
r = requests.get('http://torcache.net/torrent/' + torrent_hash + '.torrent')
|
||||
except Exception, e:
|
||||
logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
if not r.status_code == 200:
|
||||
return False
|
||||
|
||||
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||
|
||||
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
|
||||
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||
magnetFileContent = r.content
|
||||
|
||||
try:
|
||||
try:
|
||||
with open(magnetFileName, 'wb') as fileOut:
|
||||
fileOut.write(magnetFileContent)
|
||||
|
||||
|
||||
helpers.chmodAsParent(magnetFileName)
|
||||
|
||||
|
||||
except EnvironmentError, e:
|
||||
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE)
|
||||
return True
|
||||
|
||||
|
@ -250,12 +257,13 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
sqlResults = db.DBConnection().select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -272,7 +280,6 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
|
||||
|
||||
class PublicHDCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
@ -318,4 +325,5 @@ class PublicHDCache(tvcache.TVCache):
|
|||
|
||||
return self._addCacheEntry(title, url)
|
||||
|
||||
|
||||
provider = PublicHDProvider()
|
||||
|
|
|
@ -35,8 +35,8 @@ from lib import requests
|
|||
from bs4 import BeautifulSoup
|
||||
from lib.bencode import bdecode
|
||||
|
||||
class TorrentRssProvider(generic.TorrentProvider):
|
||||
|
||||
class TorrentRssProvider(generic.TorrentProvider):
|
||||
def __init__(self, name, url):
|
||||
|
||||
generic.TorrentProvider.__init__(self, name)
|
||||
|
@ -50,105 +50,106 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
return self.name + '|' + self.url + '|' + str(int(self.enabled))
|
||||
|
||||
def imageName(self):
|
||||
if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')):
|
||||
return self.getID() + '.png'
|
||||
if ek.ek(os.path.isfile,
|
||||
ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')):
|
||||
return self.getID() + '.png'
|
||||
return 'torrentrss.png'
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
def _get_title_and_url(self, item):
|
||||
|
||||
|
||||
title, url = None, None
|
||||
|
||||
self.cache._remove_namespace(item)
|
||||
|
||||
title = helpers.get_xml_text(item.find('title'))
|
||||
|
||||
|
||||
attempt_list = [lambda: helpers.get_xml_text(item.find('magnetURI')),
|
||||
|
||||
|
||||
lambda: item.find('enclosure').get('url'),
|
||||
|
||||
|
||||
lambda: helpers.get_xml_text(item.find('link'))]
|
||||
|
||||
|
||||
for cur_attempt in attempt_list:
|
||||
try:
|
||||
url = cur_attempt()
|
||||
except:
|
||||
continue
|
||||
|
||||
|
||||
if title and url:
|
||||
return (title, url)
|
||||
|
||||
|
||||
return (title, url)
|
||||
|
||||
def validateRSS(self):
|
||||
|
||||
try:
|
||||
|
||||
try:
|
||||
|
||||
data = self.cache._getRSSData()
|
||||
|
||||
|
||||
if not data:
|
||||
return (False, 'No data returned from url: ' + self.url)
|
||||
|
||||
|
||||
parsedXML = helpers.parse_xml(data)
|
||||
|
||||
|
||||
if not parsedXML:
|
||||
return (False, 'Unable to parse RSS, is it a real RSS? ')
|
||||
|
||||
|
||||
items = parsedXML.findall('.//item')
|
||||
|
||||
|
||||
if not items:
|
||||
return (False, 'No items found in the RSS feed ' + self.url)
|
||||
|
||||
|
||||
(title, url) = self._get_title_and_url(items[0])
|
||||
|
||||
|
||||
if not title:
|
||||
return (False, 'Unable to get title from first item')
|
||||
|
||||
|
||||
if not url:
|
||||
return (False, 'Unable to get torrent url from first item')
|
||||
|
||||
|
||||
if url.startswith('magnet:') and re.search('urn:btih:([\w]{32,40})', url):
|
||||
return (True, 'RSS feed Parsed correctly')
|
||||
else:
|
||||
|
||||
|
||||
torrent_file = self.getURL(url)
|
||||
try:
|
||||
try:
|
||||
bdecode(torrent_file)
|
||||
except Exception, e:
|
||||
self.dumpHTML(torrent_file)
|
||||
return (False, 'Torrent link is not a valid torrent file: ' + ex(e))
|
||||
|
||||
|
||||
return (True, 'RSS feed Parsed correctly')
|
||||
|
||||
except Exception, e:
|
||||
return (False, 'Error when trying to load RSS: ' + ex(e))
|
||||
|
||||
def getURL(self, url, headers=None):
|
||||
|
||||
|
||||
if not self.session:
|
||||
self.session = requests.Session()
|
||||
|
||||
|
||||
try:
|
||||
url = urljoin(url, urlparse(url).path.replace('//','/'))
|
||||
url = urljoin(url, urlparse(url).path.replace('//', '/'))
|
||||
response = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading "+self.name+" URL: " + ex(e), logger.ERROR)
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
||||
def dumpHTML(self, data):
|
||||
|
||||
|
||||
dumpName = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html')
|
||||
|
||||
try:
|
||||
try:
|
||||
fileOut = open(dumpName, 'wb')
|
||||
fileOut.write(data)
|
||||
fileOut.close()
|
||||
|
@ -157,10 +158,10 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
logger.log(u"Saved custom_torrent html dump " + dumpName + " ", logger.MESSAGE)
|
||||
return True
|
||||
return True
|
||||
|
||||
|
||||
class TorrentRssCache(tvcache.TVCache):
|
||||
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
@ -173,12 +174,12 @@ class TorrentRssCache(tvcache.TVCache):
|
|||
return data
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
||||
(title, url) = self.provider._get_title_and_url(item)
|
||||
if not title or not url:
|
||||
logger.log(u"The XML returned from the RSS feed is incomplete, this result is unusable", logger.ERROR)
|
||||
return None
|
||||
|
||||
|
||||
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
|
||||
return self._addCacheEntry(title, url)
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue