Merge pull request #464 from JackDandy/feature/ChangeProvRefactorAndPEP8

Change refactor, PEP8, code convention cleanse for nzb/torrent code…
This commit is contained in:
JackDandy 2015-07-15 19:32:04 +01:00
commit 40bcc38c5b
51 changed files with 1083 additions and 1191 deletions

10
.gitignore vendored
View file

@ -1,5 +1,5 @@
# SB User Related #
###################### ######################
# SB User Related #
cache/* cache/*
cache.db* cache.db*
config.ini* config.ini*
@ -11,18 +11,18 @@ server.crt
server.key server.key
restore/ restore/
# SB Test Related #
###################### ######################
# SB Test Related #
tests/Logs/* tests/Logs/*
tests/sickbeard.* tests/sickbeard.*
tests/cache.db tests/cache.db
# Compiled source #
###################### ######################
# Compiled source #
*.py[co] *.py[co]
# IDE specific #
###################### ######################
# IDE specific #
*.bak *.bak
*.tmp *.tmp
*.wpr *.wpr
@ -35,8 +35,8 @@ tests/cache.db
Session.vim Session.vim
.ropeproject/* .ropeproject/*
# OS generated files #
###################### ######################
# OS generated files #
.Spotlight-V100 .Spotlight-V100
.Trashes .Trashes
.DS_Store .DS_Store

View file

@ -1,7 +1,7 @@
[SickBeard] [SickBeard]
host=localhost host=localhost
port=8081 port=8081
username= username=
password= password=
web_root= web_root=
ssl=0 ssl=0

View file

@ -1,13 +1,17 @@
from distutils.core import setup from distutils.core import setup
import py2exe, sys, shutil import sys
import shutil
try:
import py2exe
except:
pass
sys.argv.append('py2exe') sys.argv.append('py2exe')
setup( setup(options={'py2exe': {'bundle_files': 1}},
options = {'py2exe': {'bundle_files': 1}}, # windows = [{'console': "sabToSickbeard.py"}],
# windows = [{'console': "sabToSickbeard.py"}], zipfile=None,
zipfile = None, console=['sabToSickbeard.py']
console = ['sabToSickbeard.py'], )
)
shutil.copy('dist/sabToSickbeard.exe', '.') shutil.copy('dist/sabToSickbeard.exe', '.')

View file

Before

Width:  |  Height:  |  Size: 886 B

After

Width:  |  Height:  |  Size: 886 B

View file

@ -3,8 +3,8 @@
#from sickbeard.providers import thepiratebay #from sickbeard.providers import thepiratebay
#from sickbeard.helpers import anon_url, starify #from sickbeard.helpers import anon_url, starify
## ##
#set global $title="Config - Providers" #set global $title = 'Config - Providers'
#set global $header="Search Providers" #set global $header = 'Search Providers'
#set global $sbPath = '../..' #set global $sbPath = '../..'
#set global $topmenu = 'config' #set global $topmenu = 'config'
## ##
@ -39,7 +39,7 @@
#for $curNewznabProvider in $sickbeard.newznabProviderList: #for $curNewznabProvider in $sickbeard.newznabProviderList:
\$(this).addProvider('$curNewznabProvider.getID()', '$curNewznabProvider.name', '$curNewznabProvider.url', '<%= starify(curNewznabProvider.key) %>', '$curNewznabProvider.cat_ids', $int($curNewznabProvider.default), show_nzb_providers); \$(this).addProvider('$curNewznabProvider.get_id()', '$curNewznabProvider.name', '$curNewznabProvider.url', '<%= starify(curNewznabProvider.key) %>', '$curNewznabProvider.cat_ids', $int($curNewznabProvider.default), show_nzb_providers);
#end for #end for
@ -49,7 +49,7 @@
#for $curTorrentRssProvider in $sickbeard.torrentRssProviderList: #for $curTorrentRssProvider in $sickbeard.torrentRssProviderList:
\$(this).addTorrentRssProvider('$curTorrentRssProvider.getID()', '$curTorrentRssProvider.name', '$curTorrentRssProvider.url', '<%= starify(curTorrentRssProvider.cookies) %>'); \$(this).addTorrentRssProvider('$curTorrentRssProvider.get_id()', '$curTorrentRssProvider.name', '$curTorrentRssProvider.url', '<%= starify(curTorrentRssProvider.cookies) %>');
#end for #end for
@ -90,7 +90,7 @@
<p>At least one provider is required but two are recommended.</p> <p>At least one provider is required but two are recommended.</p>
#if $methods_notused #if $methods_notused
<blockquote style="margin: 20px 0"><%= '/'.join(x for x in methods_notused) %> providers can be enabled in <a href="$sbRoot/config/search/">Search Settings</a></blockquote> <blockquote style="margin:20px 0"><%= '/'.join(x for x in methods_notused) %> providers can be enabled in <a href="$sbRoot/config/search/">Search Settings</a></blockquote>
#else #else
<br/> <br/>
#end if #end if
@ -104,11 +104,11 @@
#elif $curProvider.providerType == $GenericProvider.TORRENT and not $sickbeard.USE_TORRENTS #elif $curProvider.providerType == $GenericProvider.TORRENT and not $sickbeard.USE_TORRENTS
#continue #continue
#end if #end if
#set $curName = $curProvider.getID() #set $curName = $curProvider.get_id()
<li class="ui-state-default" id="$curName"> <li class="ui-state-default" id="$curName">
<input type="checkbox" id="enable_$curName" class="provider_enabler" <%= html_checked if curProvider.isEnabled() else '' %>/> <input type="checkbox" id="enable_$curName" class="provider_enabler" <%= html_checked if curProvider.is_enabled() else '' %>/>
<a href="<%= anon_url(curProvider.url) %>" class="imgLink" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;"><img src="$sbRoot/images/providers/$curProvider.imageName()" alt="$curProvider.name" title="$curProvider.name" width="16" height="16" style="vertical-align:middle;"/></a> <a href="<%= anon_url(curProvider.url) %>" class="imgLink" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;"><img src="$sbRoot/images/providers/$curProvider.image_name()" alt="$curProvider.name" title="$curProvider.name" width="16" height="16" style="vertical-align:middle;"/></a>
<span style="vertical-align:middle;">$curProvider.name</span> <span style="vertical-align:middle">$curProvider.name</span>
<%= '*' if not curProvider.supportsBacklog else '' %> <%= '*' if not curProvider.supportsBacklog else '' %>
<span class="ui-icon ui-icon-arrowthick-2-n-s pull-right" style="margin-top:3px"></span> <span class="ui-icon ui-icon-arrowthick-2-n-s pull-right" style="margin-top:3px"></span>
</li> </li>
@ -124,7 +124,7 @@
##<h4 class="note">!</h4><p class="note">Provider is <b>NOT WORKING</b></p> ##<h4 class="note">!</h4><p class="note">Provider is <b>NOT WORKING</b></p>
</div> </div>
<input type="hidden" name="provider_order" id="provider_order" value="<%=" ".join([x.getID()+':'+str(int(x.isEnabled())) for x in sickbeard.providers.sortedProviderList()])%>"/> <input type="hidden" name="provider_order" id="provider_order" value="<%=' '.join([x.get_id()+':'+str(int(x.is_enabled())) for x in sickbeard.providers.sortedProviderList()])%>"/>
<div style="width: 300px; float: right"> <div style="width: 300px; float: right">
<div style="margin: 0px auto; width: 101px"> <div style="margin: 0px auto; width: 101px">
<input type="submit" class="btn config_submitter" value="Save Changes" /> <input type="submit" class="btn config_submitter" value="Save Changes" />
@ -156,7 +156,7 @@
#elif $curProvider.providerType == $GenericProvider.TORRENT and not $sickbeard.USE_TORRENTS #elif $curProvider.providerType == $GenericProvider.TORRENT and not $sickbeard.USE_TORRENTS
#continue #continue
#end if #end if
#if $curProvider.isEnabled() #if $curProvider.is_enabled()
$provider_config_list_enabled.append($curProvider) $provider_config_list_enabled.append($curProvider)
#else #else
$provider_config_list.append($curProvider) $provider_config_list.append($curProvider)
@ -168,14 +168,14 @@
#if $provider_config_list_enabled #if $provider_config_list_enabled
<optgroup label="Enabled..."> <optgroup label="Enabled...">
#for $cur_provider in $provider_config_list_enabled: #for $cur_provider in $provider_config_list_enabled:
<option value="$cur_provider.getID()">$cur_provider.name</option> <option value="$cur_provider.get_id()">$cur_provider.name</option>
#end for #end for
</optgroup> </optgroup>
#end if #end if
#if $provider_config_list #if $provider_config_list
<optgroup label="Not Enabled..."> <optgroup label="Not Enabled...">
#for $cur_provider in $provider_config_list #for $cur_provider in $provider_config_list
<option value="$cur_provider.getID()">$cur_provider.name</option> <option value="$cur_provider.get_id()">$cur_provider.name</option>
#end for #end for
</optgroup> </optgroup>
#end if #end if
@ -187,76 +187,71 @@
</label> </label>
</div> </div>
<!-- start div for editing providers //--> <!-- start div for editing providers //-->
#for $curNewznabProvider in [$curProvider for $curProvider in $sickbeard.newznabProviderList] #for $curNewznabProvider in [$curProvider for $curProvider in $sickbeard.newznabProviderList]
<div class="providerDiv" id="${curNewznabProvider.getID()}Div"> <div class="providerDiv" id="${curNewznabProvider.get_id()}Div">
#if $curNewznabProvider.default and $curNewznabProvider.needs_auth #if $curNewznabProvider.default and $curNewznabProvider.needs_auth
<div class="field-pair"> <div class="field-pair">
<label for="${curNewznabProvider.getID()}_url"> <label for="${curNewznabProvider.get_id()}_url">
<span class="component-title">URL</span> <span class="component-title">URL</span>
<span class="component-desc"> <span class="component-desc">
<input type="text" id="${curNewznabProvider.getID()}_url" value="$curNewznabProvider.url" class="form-control input-sm input350" disabled/> <input type="text" id="${curNewznabProvider.get_id()}_url" value="$curNewznabProvider.url" class="form-control input-sm input350" disabled/>
</span> </span>
</label> </label>
</div> </div>
<div class="field-pair"> <div class="field-pair">
<label for="${curNewznabProvider.getID()}_hash"> <label for="${curNewznabProvider.get_id()}_hash">
<span class="component-title">API key</span> <span class="component-title">API key</span>
<span class="component-desc"> <span class="component-desc">
<input type="text" id="${curNewznabProvider.getID()}_hash" value="<%= starify(curNewznabProvider.key) %>" newznab_name="${curNewznabProvider.getID()}_hash" class="newznab_key form-control input-sm input350" /> <input type="text" id="${curNewznabProvider.get_id()}_hash" value="<%= starify(curNewznabProvider.key) %>" newznab_name="${curNewznabProvider.get_id()}_hash" class="newznab_key form-control input-sm input350" />
<div class="clear-left"><p>get API key from provider website</p></div> <div class="clear-left"><p>get API key from provider website</p></div>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curNewznabProvider, 'enable_recentsearch') and $curNewznabProvider.supportsBacklog:
#if $hasattr($curNewznabProvider, 'enable_recentsearch'):
<div class="field-pair"> <div class="field-pair">
<label for="${curNewznabProvider.getID()}_enable_recentsearch"> <label for="${curNewznabProvider.get_id()}_enable_recentsearch">
<span class="component-title">Enable recent searches</span> <span class="component-title">Enable recent searches</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="${curNewznabProvider.getID()}_enable_recentsearch" id="${curNewznabProvider.getID()}_enable_recentsearch" <%= html_checked if curNewznabProvider.enable_recentsearch else '' %>/> <input type="checkbox" name="${curNewznabProvider.get_id()}_enable_recentsearch" id="${curNewznabProvider.get_id()}_enable_recentsearch" <%= html_checked if curNewznabProvider.enable_recentsearch else '' %>/>
<p>perform recent searches at provider</p> <p>perform recent searches at provider</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curNewznabProvider, 'enable_backlog') and $curNewznabProvider.supportsBacklog:
#if $hasattr($curNewznabProvider, 'enable_backlog'):
<div class="field-pair"> <div class="field-pair">
<label for="${curNewznabProvider.getID()}_enable_backlog"> <label for="${curNewznabProvider.get_id()}_enable_backlog">
<span class="component-title">Enable backlog searches</span> <span class="component-title">Enable backlog searches</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="${curNewznabProvider.getID()}_enable_backlog" id="${curNewznabProvider.getID()}_enable_backlog" <%= html_checked if curNewznabProvider.enable_backlog else '' %>/> <input type="checkbox" name="${curNewznabProvider.get_id()}_enable_backlog" id="${curNewznabProvider.get_id()}_enable_backlog" <%= html_checked if curNewznabProvider.enable_backlog else '' %>/>
<p>perform backlog searches at provider</p> <p>perform backlog searches at provider</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curNewznabProvider, 'search_mode') and $curNewznabProvider.supportsBacklog:
#if $hasattr($curNewznabProvider, 'search_mode'):
<div class="field-pair"> <div class="field-pair">
<span class="component-title">Season search mode</span> <span class="component-title">Season search mode</span>
<span class="component-desc"> <span class="component-desc">
<label class="space-right"> <label class="space-right">
<input type="radio" name="${curNewznabProvider.getID()}_search_mode" id="${curNewznabProvider.getID()}_search_mode_sponly" value="sponly" <%= html_checked if 'sponly' == curNewznabProvider.search_mode else '' %>/>season packs only <input type="radio" name="${curNewznabProvider.get_id()}_search_mode" id="${curNewznabProvider.get_id()}_search_mode_sponly" value="sponly" <%= html_checked if 'sponly' == curNewznabProvider.search_mode else '' %>/>season packs only
</label> </label>
<label> <label>
<input type="radio" name="${curNewznabProvider.getID()}_search_mode" id="${curNewznabProvider.getID()}_search_mode_eponly" value="eponly" <%= html_checked if 'eponly' == curNewznabProvider.search_mode else '' %>/>episodes only <input type="radio" name="${curNewznabProvider.get_id()}_search_mode" id="${curNewznabProvider.get_id()}_search_mode_eponly" value="eponly" <%= html_checked if 'eponly' == curNewznabProvider.search_mode else '' %>/>episodes only
</label> </label>
<p>when searching for complete seasons, search for packs or collect single episodes</p> <p>when searching for complete seasons, search for packs or collect single episodes</p>
</span> </span>
</div> </div>
#end if #end if
#if $hasattr($curNewznabProvider, 'search_fallback') and $curNewznabProvider.supportsBacklog:
#if $hasattr($curNewznabProvider, 'search_fallback'):
<div class="field-pair"> <div class="field-pair">
<label for="${curNewznabProvider.getID()}_search_fallback"> <label for="${curNewznabProvider.get_id()}_search_fallback">
<span class="component-title">Season search fallback</span> <span class="component-title">Season search fallback</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="${curNewznabProvider.getID()}_search_fallback" id="${curNewznabProvider.getID()}_search_fallback" <%= html_checked if curNewznabProvider.search_fallback else '' %>/> <input type="checkbox" name="${curNewznabProvider.get_id()}_search_fallback" id="${curNewznabProvider.get_id()}_search_fallback" <%= html_checked if curNewznabProvider.search_fallback else '' %>/>
<p>run the alternate season search mode when a complete season is not found</p> <p>run the alternate season search mode when a complete season is not found</p>
</span> </span>
</label> </label>
@ -264,218 +259,205 @@
#end if #end if
</div> </div>
#end for #end for
##
##
#for $curNzbProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.NZB and $curProvider not in $sickbeard.newznabProviderList]: #for $curNzbProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.NZB and $curProvider not in $sickbeard.newznabProviderList]:
<div class="providerDiv" id="${curNzbProvider.getID()}Div"> <div class="providerDiv" id="${curNzbProvider.get_id()}Div">
#if $hasattr($curNzbProvider, 'username'): #if $hasattr($curNzbProvider, 'username'):
<div class="field-pair"> <div class="field-pair">
<label for="${curNzbProvider.getID()}_username"> <label for="${curNzbProvider.get_id()}_username">
<span class="component-title">Username</span> <span class="component-title">Username</span>
<span class="component-desc"> <span class="component-desc">
<input type="text" name="${curNzbProvider.getID()}_username" value="$curNzbProvider.username" class="form-control input-sm input350" /> <input type="text" name="${curNzbProvider.get_id()}_username" value="$curNzbProvider.username" class="form-control input-sm input350" />
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curNzbProvider, 'api_key'): #if $hasattr($curNzbProvider, 'api_key'):
<div class="field-pair"> <div class="field-pair">
<label for="${curNzbProvider.getID()}_api_key"> <label for="${curNzbProvider.get_id()}_api_key">
<span class="component-title">API key</span> <span class="component-title">API key</span>
<span class="component-desc"> <span class="component-desc">
<input type="text" name="${curNzbProvider.getID()}_api_key" value="<%= starify(curNzbProvider.api_key) %>" class="form-control input-sm input350" /> #set $field_name = curNzbProvider.get_id() + '_api_key'
<input type="text" name="$field_name" value="<%= starify(curNzbProvider.api_key) %>" class="form-control input-sm input350" />
#if callable(getattr(curNzbProvider, 'ui_string'))
<div class="clear-left"><p>${curNzbProvider.ui_string($field_name)}</p></div>
#end if
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curNzbProvider, 'enable_recentsearch') and $curNzbProvider.supportsBacklog:
#if $hasattr($curNzbProvider, 'enable_recentsearch'):
<div class="field-pair"> <div class="field-pair">
<label for="${curNzbProvider.getID()}_enable_recentsearch"> <label for="${curNzbProvider.get_id()}_enable_recentsearch">
<span class="component-title">Enable recent searches</span> <span class="component-title">Enable recent searches</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="${curNzbProvider.getID()}_enable_recentsearch" id="${curNzbProvider.getID()}_enable_recentsearch" <%= html_checked if curNzbProvider.enable_recentsearch else '' %>/> <input type="checkbox" name="${curNzbProvider.get_id()}_enable_recentsearch" id="${curNzbProvider.get_id()}_enable_recentsearch" <%= html_checked if curNzbProvider.enable_recentsearch else '' %>/>
<p>enable provider to perform recent searches.</p> <p>enable provider to perform recent searches.</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curNzbProvider, 'enable_backlog') and $curNzbProvider.supportsBacklog:
#if $hasattr($curNzbProvider, 'enable_backlog'):
<div class="field-pair"> <div class="field-pair">
<label for="${curNzbProvider.getID()}_enable_backlog"> <label for="${curNzbProvider.get_id()}_enable_backlog">
<span class="component-title">Enable backlog searches</span> <span class="component-title">Enable backlog searches</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="${curNzbProvider.getID()}_enable_backlog" id="${curNzbProvider.getID()}_enable_backlog" <%= html_checked if curNzbProvider.enable_backlog else '' %>/> <input type="checkbox" name="${curNzbProvider.get_id()}_enable_backlog" id="${curNzbProvider.get_id()}_enable_backlog" <%= html_checked if curNzbProvider.enable_backlog else '' %>/>
<p>enable provider to perform backlog searches.</p> <p>enable provider to perform backlog searches.</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curNzbProvider, 'search_mode') and $curNzbProvider.supportsBacklog:
#if $hasattr($curNzbProvider, 'search_fallback'):
<div class="field-pair"> <div class="field-pair">
<label for="${curNzbProvider.getID()}_search_fallback"> <span class="component-title">Season search mode</span>
<span class="component-desc">
<label class="space-right">
<input type="radio" name="${curNzbProvider.get_id()}_search_mode" id="${curNzbProvider.get_id()}_search_mode_sponly" value="sponly" <%= html_checked if 'sponly' == curNzbProvider.search_mode else '' %>/>season packs only
</label>
<label>
<input type="radio" name="${curNzbProvider.get_id()}_search_mode" id="${curNzbProvider.get_id()}_search_mode_eponly" value="eponly" <%= html_checked if 'eponly' == curNzbProvider.search_mode else '' %>/>episodes only
</label>
<p>when searching for complete seasons, search for packs or collect single episodes</p>
</span>
</div>
#end if
#if $hasattr($curNzbProvider, 'search_fallback') and $curNzbProvider.supportsBacklog:
<div class="field-pair">
<label for="${curNzbProvider.get_id()}_search_fallback">
<span class="component-title">Season search fallback</span> <span class="component-title">Season search fallback</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="${curNzbProvider.getID()}_search_fallback" id="${curNzbProvider.getID()}_search_fallback" <%= html_checked if curNzbProvider.search_fallback else '' %>/> <input type="checkbox" name="${curNzbProvider.get_id()}_search_fallback" id="${curNzbProvider.get_id()}_search_fallback" <%= html_checked if curNzbProvider.search_fallback else '' %>/>
<p>when searching for a complete season depending on search mode you may return no results, this helps by restarting the search using the opposite search mode.</p> <p>run the alternate season search mode when a complete season is not found</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if not $curNzbProvider.supportsBacklog:
#if $hasattr($curNzbProvider, 'search_mode'):
<div class="field-pair"> <div class="field-pair">
<label> <span class="component-desc">The latest releases are the focus of this provider, no backlog searching</span>
<span class="component-title">Season search mode</span>
<span class="component-desc">
<p>when searching for complete seasons you can choose to have it look for season packs only, or choose to have it build a complete season from just single episodes.</p>
</span>
</label>
<label>
<span class="component-title"></span>
<span class="component-desc">
<input type="radio" name="${curNzbProvider.getID()}_search_mode" id="${curNzbProvider.getID()}_search_mode_sponly" value="sponly" <%= html_checked if 'sponly' == curNzbProvider.search_mode else '' %>/>season packs only.
</span>
</label>
<label>
<span class="component-title"></span>
<span class="component-desc">
<input type="radio" name="${curNzbProvider.getID()}_search_mode" id="${curNzbProvider.getID()}_search_mode_eponly" value="eponly" <%= html_checked if 'eponly' == curNzbProvider.search_mode else '' %>/>episodes only.
</span>
</label>
</div> </div>
#end if #end if
</div> </div>
#end for #end for
##
##
#for $curTorrentProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.TORRENT]: #for $curTorrentProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.TORRENT]:
<div class="providerDiv" id="${curTorrentProvider.getID()}Div"> <div class="providerDiv" id="${curTorrentProvider.get_id()}Div">
#if $hasattr($curTorrentProvider, 'api_key'): #if $hasattr($curTorrentProvider, 'api_key'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_api_key"> <label for="${curTorrentProvider.get_id()}_api_key">
<span class="component-title">Api key:</span> <span class="component-title">Api key:</span>
<span class="component-desc"> <span class="component-desc">
<input type="text" name="${curTorrentProvider.getID()}_api_key" id="${curTorrentProvider.getID()}_api_key" value="<%= starify(curTorrentProvider.api_key) %>" class="form-control input-sm input350" /> <input type="text" name="${curTorrentProvider.get_id()}_api_key" id="${curTorrentProvider.get_id()}_api_key" value="<%= starify(curTorrentProvider.api_key) %>" class="form-control input-sm input350" />
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'digest'): #if $hasattr($curTorrentProvider, 'digest'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_digest"> <label for="${curTorrentProvider.get_id()}_digest">
<span class="component-title">Digest:</span> <span class="component-title">Digest:</span>
<span class="component-desc"> <span class="component-desc">
<input type="text" name="${curTorrentProvider.getID()}_digest" id="${curTorrentProvider.getID()}_digest" value="$curTorrentProvider.digest" class="form-control input-sm input350" /> <input type="text" name="${curTorrentProvider.get_id()}_digest" id="${curTorrentProvider.get_id()}_digest" value="$curTorrentProvider.digest" class="form-control input-sm input350" />
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'hash'): #if $hasattr($curTorrentProvider, 'hash'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_hash"> <label for="${curTorrentProvider.get_id()}_hash">
<span class="component-title">Hash:</span> <span class="component-title">Hash:</span>
<span class="component-desc"> <span class="component-desc">
<input type="text" name="${curTorrentProvider.getID()}_hash" id="${curTorrentProvider.getID()}_hash" value="$curTorrentProvider.hash" class="form-control input-sm input350" /> <input type="text" name="${curTorrentProvider.get_id()}_hash" id="${curTorrentProvider.get_id()}_hash" value="$curTorrentProvider.hash" class="form-control input-sm input350" />
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'username'): #if $hasattr($curTorrentProvider, 'username'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_username"> <label for="${curTorrentProvider.get_id()}_username">
<span class="component-title">Username:</span> <span class="component-title">Username:</span>
<span class="component-desc"> <span class="component-desc">
<input type="text" name="${curTorrentProvider.getID()}_username" id="${curTorrentProvider.getID()}_username" value="$curTorrentProvider.username" class="form-control input-sm input350" /> <input type="text" name="${curTorrentProvider.get_id()}_username" id="${curTorrentProvider.get_id()}_username" value="$curTorrentProvider.username" class="form-control input-sm input350" />
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'password'): #if $hasattr($curTorrentProvider, 'password'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_password"> <label for="${curTorrentProvider.get_id()}_password">
<span class="component-title">Password:</span> <span class="component-title">Password:</span>
<span class="component-desc"> <span class="component-desc">
<input type="password" name="${curTorrentProvider.getID()}_password" id="${curTorrentProvider.getID()}_password" value="#echo '*' * len($curTorrentProvider.password)#" class="form-control input-sm input350" /> <input type="password" name="${curTorrentProvider.get_id()}_password" id="${curTorrentProvider.get_id()}_password" value="#echo '*' * len($curTorrentProvider.password)#" class="form-control input-sm input350" />
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'passkey'): #if $hasattr($curTorrentProvider, 'passkey'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_passkey"> <label for="${curTorrentProvider.get_id()}_passkey">
<span class="component-title">Passkey:</span> <span class="component-title">Passkey:</span>
<span class="component-desc"> <span class="component-desc">
<input type="text" name="${curTorrentProvider.getID()}_passkey" id="${curTorrentProvider.getID()}_passkey" value="<%= starify(curTorrentProvider.passkey) %>" class="form-control input-sm input350" /> <input type="text" name="${curTorrentProvider.get_id()}_passkey" id="${curTorrentProvider.get_id()}_passkey" value="<%= starify(curTorrentProvider.passkey) %>" class="form-control input-sm input350" />
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, '_seed_ratio') and 'blackhole' != $sickbeard.TORRENT_METHOD:
#if $hasattr($curTorrentProvider, 'ratio'): #set $torrent_method_text = {'blackhole': 'Black hole', 'utorrent': 'uTorrent', 'transmission': 'Transmission', 'deluge': 'Deluge', 'download_station': 'Synology DS', 'rtorrent': 'rTorrent'}
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_ratio"> <label for="${curTorrentProvider.get_id()}_ratio">
<span class="component-title" id="${curTorrentProvider.getID()}_ratio_desc">Seed ratio:</span> <span class="component-title" id="${curTorrentProvider.get_id()}_ratio_desc">Seed until ratio (the goal)</span>
<span class="component-desc"> <span class="component-desc">
<input type="number" step="0.1" name="${curTorrentProvider.getID()}_ratio" id="${curTorrentProvider.getID()}_ratio" value="$curTorrentProvider.ratio" class="form-control input-sm input75" /> <input type="number" step="0.1" name="${curTorrentProvider.get_id()}_ratio" id="${curTorrentProvider.get_id()}_ratio" value="$curTorrentProvider._seed_ratio" class="form-control input-sm input75" />
</span> <p>this ratio is requested of each download sent to $torrent_method_text[$sickbeard.TORRENT_METHOD]</p>
</label> <div class="clear-left"><p>(set -1 to seed forever, or leave blank for the $torrent_method_text[$sickbeard.TORRENT_METHOD] default)</p></div>
<label>
<span class="component-title">&nbsp;</span>
<span class="component-desc">
<p>stop transfer when ratio is reached<br>(-1 SickGear default to seed forever, or leave blank for downloader default)</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'minseed'): #if $hasattr($curTorrentProvider, 'minseed'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_minseed"> <label for="${curTorrentProvider.get_id()}_minseed">
<span class="component-title" id="${curTorrentProvider.getID()}_minseed_desc">Minimum seeders:</span> <span class="component-title" id="${curTorrentProvider.get_id()}_minseed_desc">Minimum seeders</span>
<span class="component-desc"> <span class="component-desc">
<input type="number" name="${curTorrentProvider.getID()}_minseed" id="${curTorrentProvider.getID()}_minseed" value="$curTorrentProvider.minseed" class="form-control input-sm input75" /> <input type="number" name="${curTorrentProvider.get_id()}_minseed" id="${curTorrentProvider.get_id()}_minseed" value="$curTorrentProvider.minseed" class="form-control input-sm input75" />
<p>a release must have to be snatch worthy</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'minleech'): #if $hasattr($curTorrentProvider, 'minleech'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_minleech"> <label for="${curTorrentProvider.get_id()}_minleech">
<span class="component-title" id="${curTorrentProvider.getID()}_minleech_desc">Minimum leechers:</span> <span class="component-title" id="${curTorrentProvider.get_id()}_minleech_desc">Minimum leechers</span>
<span class="component-desc"> <span class="component-desc">
<input type="number" name="${curTorrentProvider.getID()}_minleech" id="${curTorrentProvider.getID()}_minleech" value="$curTorrentProvider.minleech" class="form-control input-sm input75" /> <input type="number" name="${curTorrentProvider.get_id()}_minleech" id="${curTorrentProvider.get_id()}_minleech" value="$curTorrentProvider.minleech" class="form-control input-sm input75" />
<p>a release must have to be snatch worthy</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'proxy'): #if $hasattr($curTorrentProvider, 'proxy'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_proxy"> <label for="${curTorrentProvider.get_id()}_proxy">
<span class="component-title">Access provider via proxy</span> <span class="component-title">Access provider via proxy</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" class="enabler" name="${curTorrentProvider.getID()}_proxy" id="${curTorrentProvider.getID()}_proxy" <%= html_checked if curTorrentProvider.proxy.enabled else '' %>/> <input type="checkbox" class="enabler" name="${curTorrentProvider.get_id()}_proxy" id="${curTorrentProvider.get_id()}_proxy" <%= html_checked if curTorrentProvider.proxy.enabled else '' %>/>
<p>to bypass country blocking mechanisms</p> <p>to bypass country blocking mechanisms</p>
</span> </span>
</label> </label>
</div> </div>
#if $hasattr($curTorrentProvider.proxy, 'url'): #if $hasattr($curTorrentProvider.proxy, 'url'):
<div class="field-pair content_${curTorrentProvider.getID()}_proxy" id="content_${curTorrentProvider.getID()}_proxy"> <div class="field-pair content_${curTorrentProvider.get_id()}_proxy" id="content_${curTorrentProvider.get_id()}_proxy">
<label for="${curTorrentProvider.getID()}_proxy_url"> <label for="${curTorrentProvider.get_id()}_proxy_url">
<span class="component-title">Proxy URL:</span> <span class="component-title">Proxy URL:</span>
<span class="component-desc"> <span class="component-desc">
<select name="${curTorrentProvider.getID()}_proxy_url" id="${curTorrentProvider.getID()}_proxy_url" class="form-control input-sm"> <select name="${curTorrentProvider.get_id()}_proxy_url" id="${curTorrentProvider.get_id()}_proxy_url" class="form-control input-sm">
#for $i in $curTorrentProvider.proxy.urls.keys(): #for $i in $curTorrentProvider.proxy.urls.keys():
<option value="$curTorrentProvider.proxy.urls[$i]" <%= html_selected if curTorrentProvider.proxy.url == curTorrentProvider.proxy.urls[i] else '' %>>$i</option> <option value="$curTorrentProvider.proxy.urls[$i]" <%= html_selected if curTorrentProvider.proxy.url == curTorrentProvider.proxy.urls[i] else '' %>>$i</option>
#end for #end for
@ -485,85 +467,71 @@
</div> </div>
#end if #end if
#end if #end if
#if $hasattr($curTorrentProvider, 'confirmed'): #if $hasattr($curTorrentProvider, 'confirmed'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_confirmed"> <label for="${curTorrentProvider.get_id()}_confirmed">
<span class="component-title">Confirmed download</span> <span class="component-title">Confirmed download</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="${curTorrentProvider.getID()}_confirmed" id="${curTorrentProvider.getID()}_confirmed" <%= html_checked if curTorrentProvider.confirmed else '' %>/> <input type="checkbox" name="${curTorrentProvider.get_id()}_confirmed" id="${curTorrentProvider.get_id()}_confirmed" <%= html_checked if curTorrentProvider.confirmed else '' %>/>
<p>only download torrents from trusted or verified uploaders ?</p> <p>only download torrents from trusted or verified uploaders ?</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'freeleech'): #if $hasattr($curTorrentProvider, 'freeleech'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_freeleech"> <label for="${curTorrentProvider.get_id()}_freeleech">
<span class="component-title">Freeleech</span> <span class="component-title">Freeleech</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="${curTorrentProvider.getID()}_freeleech" id="${curTorrentProvider.getID()}_freeleech" <%= html_checked if curTorrentProvider.freeleech else '' %>/> <input type="checkbox" name="${curTorrentProvider.get_id()}_freeleech" id="${curTorrentProvider.get_id()}_freeleech" <%= html_checked if curTorrentProvider.freeleech else '' %>/>
<p>only download <b>[FreeLeech]</b> torrents.</p> <p>only download <b>[FreeLeech]</b> torrents.</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'enable_recentsearch') and $curTorrentProvider.supportsBacklog:
#if $hasattr($curTorrentProvider, 'enable_recentsearch'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_enable_recentsearch"> <label for="${curTorrentProvider.get_id()}_enable_recentsearch">
<span class="component-title">Enable recent searches</span> <span class="component-title">Enable recent searches</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="${curTorrentProvider.getID()}_enable_recentsearch" id="${curTorrentProvider.getID()}_enable_recentsearch" <%= html_checked if curTorrentProvider.enable_recentsearch else '' %>/> <input type="checkbox" name="${curTorrentProvider.get_id()}_enable_recentsearch" id="${curTorrentProvider.get_id()}_enable_recentsearch" <%= html_checked if curTorrentProvider.enable_recentsearch else '' %>/>
<p>enable provider to perform recent searches.</p> <p>enable provider to perform recent searches.</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'enable_backlog') and $curTorrentProvider.supportsBacklog:
#if $hasattr($curTorrentProvider, 'enable_backlog'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_enable_backlog"> <label for="${curTorrentProvider.get_id()}_enable_backlog">
<span class="component-title">Enable backlog searches</span> <span class="component-title">Enable backlog searches</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="${curTorrentProvider.getID()}_enable_backlog" id="${curTorrentProvider.getID()}_enable_backlog" <%= html_checked if curTorrentProvider.enable_backlog else '' %>/> <input type="checkbox" name="${curTorrentProvider.get_id()}_enable_backlog" id="${curTorrentProvider.get_id()}_enable_backlog" <%= html_checked if curTorrentProvider.enable_backlog else '' %>/>
<p>enable provider to perform backlog searches.</p> <p>enable provider to perform backlog searches.</p>
</span> </span>
</label> </label>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'search_mode') and $curTorrentProvider.supportsBacklog:
#if $hasattr($curTorrentProvider, 'search_fallback'):
<div class="field-pair"> <div class="field-pair">
<label for="${curTorrentProvider.getID()}_search_fallback"> <span class="component-title">Season search mode</span>
<span class="component-title">Season search fallback</span> <span class="component-desc">
<span class="component-desc"> <label class="space-right">
<input type="checkbox" name="${curTorrentProvider.getID()}_search_fallback" id="${curTorrentProvider.getID()}_search_fallback" <%= html_checked if curTorrentProvider.search_fallback else '' %>/> <input type="radio" name="${curTorrentProvider.get_id()}_search_mode" id="${curTorrentProvider.get_id()}_search_mode_sponly" value="sponly" <%= html_checked if 'sponly' == curTorrentProvider.search_mode else '' %>/>season packs only
<p>when searching for a complete season depending on search mode you may return no results, this helps by restarting the search using the opposite search mode.</p> </label>
</span> <label>
</label> <input type="radio" name="${curTorrentProvider.get_id()}_search_mode" id="${curTorrentProvider.get_id()}_search_mode_eponly" value="eponly" <%= html_checked if 'eponly' == curTorrentProvider.search_mode else '' %>/>episodes only
</label>
<p>when searching for complete seasons, search for packs or collect single episodes</p>
</span>
</div> </div>
#end if #end if
#if $hasattr($curTorrentProvider, 'search_fallback') and $curTorrentProvider.supportsBacklog:
#if $hasattr($curTorrentProvider, 'search_mode'):
<div class="field-pair"> <div class="field-pair">
<label> <label for="${curTorrentProvider.get_id()}_search_fallback">
<span class="component-title">Season search mode</span> <span class="component-title">Season search fallback</span>
<span class="component-desc"> <span class="component-desc">
<p>when searching for complete seasons you can choose to have it look for season packs only, or choose to have it build a complete season from just single episodes.</p> <input type="checkbox" name="${curTorrentProvider.get_id()}_search_fallback" id="${curTorrentProvider.get_id()}_search_fallback" <%= html_checked if curTorrentProvider.search_fallback else '' %>/>
</span> <p>run the alternate season search mode when a complete season is not found</p>
</label>
<label>
<span class="component-title"></span>
<span class="component-desc">
<input type="radio" name="${curTorrentProvider.getID()}_search_mode" id="${curTorrentProvider.getID()}_search_mode_sponly" value="sponly" <%= html_checked if 'sponly' == curTorrentProvider.search_mode else '' %>/>season packs only.
</span>
</label>
<label>
<span class="component-title"></span>
<span class="component-desc">
<input type="radio" name="${curTorrentProvider.getID()}_search_mode" id="${curTorrentProvider.getID()}_search_mode_eponly" value="eponly" <%= html_checked if 'eponly' == curTorrentProvider.search_mode else '' %>/>episodes only.
</span> </span>
</label> </label>
</div> </div>

View file

@ -139,9 +139,9 @@
#else #else
#if 0 < $hItem['provider'] #if 0 < $hItem['provider']
#if $curStatus in [SNATCHED, FAILED] #if $curStatus in [SNATCHED, FAILED]
#set $provider = $providers.getProviderClass($generic.GenericProvider.makeID($hItem['provider'])) #set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($hItem['provider']))
#if None is not $provider #if None is not $provider
<img src="$sbRoot/images/providers/<%= provider.imageName() %>" width="16" height="16" /><span>$provider.name</span> <img src="$sbRoot/images/providers/<%= provider.image_name() %>" width="16" height="16" /><span>$provider.name</span>
#else #else
<img src="$sbRoot/images/providers/missing.png" width="16" height="16" title="missing provider" /><span>Missing Provider</span> <img src="$sbRoot/images/providers/missing.png" width="16" height="16" title="missing provider" /><span>Missing Provider</span>
#end if #end if
@ -186,10 +186,10 @@
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($action['action'])) #set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($action['action']))
#set $basename = $os.path.basename($action['resource']) #set $basename = $os.path.basename($action['resource'])
#if $curStatus in [SNATCHED, FAILED] #if $curStatus in [SNATCHED, FAILED]
#set $provider = $providers.getProviderClass($generic.GenericProvider.makeID($action['provider'])) #set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($action['provider']))
#if None is not $provider #if None is not $provider
#set $prov_list += ['<span%s><img class="help" src="%s/images/providers/%s" width="16" height="16" alt="%s" title="%s.. %s: %s" /></span>'\ #set $prov_list += ['<span%s><img class="help" src="%s/images/providers/%s" width="16" height="16" alt="%s" title="%s.. %s: %s" /></span>'\
% (('', ' class="fail"')[FAILED == $curStatus], $sbRoot, $provider.imageName(), $provider.name, % (('', ' class="fail"')[FAILED == $curStatus], $sbRoot, $provider.image_name(), $provider.name,
('%s%s' % ($order, 'th' if $order in [11, 12, 13] or str($order)[-1] not in $ordinal_indicators else $ordinal_indicators[str($order)[-1]]), 'Snatch failed')[FAILED == $curStatus], ('%s%s' % ($order, 'th' if $order in [11, 12, 13] or str($order)[-1] not in $ordinal_indicators else $ordinal_indicators[str($order)[-1]]), 'Snatch failed')[FAILED == $curStatus],
$provider.name, $basename)] $provider.name, $basename)]
#set $order += (0, 1)[SNATCHED == $curStatus] #set $order += (0, 1)[SNATCHED == $curStatus]

View file

@ -72,9 +72,9 @@
<tr> <tr>
<td class="text-nowrap text-left">#echo re.sub('"', '', $hItem['release'])#</td> <td class="text-nowrap text-left">#echo re.sub('"', '', $hItem['release'])#</td>
<td>#echo ($hItem['size'], '?')[-1 == $hItem['size']]#</td> <td>#echo ($hItem['size'], '?')[-1 == $hItem['size']]#</td>
#set $provider = $providers.getProviderClass($generic.GenericProvider.makeID($hItem['provider'])) #set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($hItem['provider']))
#if None is not $provider: #if None is not $provider:
<td><img src="$sbRoot/images/providers/<%= provider.imageName() %>" width="16" height="16" alt="$provider.name" title="$provider.name" /></td> <td><img src="$sbRoot/images/providers/<%= provider.image_name() %>" width="16" height="16" alt="$provider.name" title="$provider.name" /></td>
#else #else
<td><img src="$sbRoot/images/providers/missing.png" width="16" height="16" alt="missing provider" title="missing provider" /></td> <td><img src="$sbRoot/images/providers/missing.png" width="16" height="16" alt="missing provider" title="missing provider" /></td>
#end if #end if

View file

@ -31,7 +31,6 @@ import sys
import os.path import os.path
import uuid import uuid
import base64 import base64
import sickbeard
sys.path.insert(1, os.path.abspath('../lib')) sys.path.insert(1, os.path.abspath('../lib'))
from sickbeard import providers, metadata, config, webserveInit from sickbeard import providers, metadata, config, webserveInit
from sickbeard.providers.generic import GenericProvider from sickbeard.providers.generic import GenericProvider
@ -255,10 +254,6 @@ OMGWTFNZBS = False
OMGWTFNZBS_USERNAME = None OMGWTFNZBS_USERNAME = None
OMGWTFNZBS_APIKEY = None OMGWTFNZBS_APIKEY = None
NEWZBIN = False
NEWZBIN_USERNAME = None
NEWZBIN_PASSWORD = None
SAB_USERNAME = None SAB_USERNAME = None
SAB_PASSWORD = None SAB_PASSWORD = None
SAB_APIKEY = None SAB_APIKEY = None
@ -485,10 +480,12 @@ COOKIE_SECRET = base64.b64encode(uuid.uuid4().bytes + uuid.uuid4().bytes)
__INITIALIZED__ = False __INITIALIZED__ = False
def get_backlog_cycle_time(): def get_backlog_cycle_time():
cycletime = RECENTSEARCH_FREQUENCY * 2 + 7 cycletime = RECENTSEARCH_FREQUENCY * 2 + 7
return max([cycletime, 720]) return max([cycletime, 720])
def initialize(consoleLogging=True): def initialize(consoleLogging=True):
with INIT_LOCK: with INIT_LOCK:
@ -525,7 +522,7 @@ def initialize(consoleLogging=True):
USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \ USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \
USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \ USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \
USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, METADATA_KODI, metadata_provider_dict, \ USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, METADATA_KODI, metadata_provider_dict, \
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, POSTPONE_IF_SYNC_FILES, recentSearchScheduler, NFO_RENAME, \ GIT_PATH, MOVE_ASSOCIATED_FILES, POSTPONE_IF_SYNC_FILES, recentSearchScheduler, NFO_RENAME, \
GUI_NAME, DEFAULT_HOME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, EPISODE_VIEW_LAYOUT, EPISODE_VIEW_SORT, EPISODE_VIEW_DISPLAY_PAUSED, EPISODE_VIEW_MISSED_RANGE, FUZZY_DATING, TRIM_ZERO, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, THEME_NAME, \ GUI_NAME, DEFAULT_HOME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, EPISODE_VIEW_LAYOUT, EPISODE_VIEW_SORT, EPISODE_VIEW_DISPLAY_PAUSED, EPISODE_VIEW_MISSED_RANGE, FUZZY_DATING, TRIM_ZERO, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, THEME_NAME, \
POSTER_SORTBY, POSTER_SORTDIR, \ POSTER_SORTBY, POSTER_SORTDIR, \
METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, REQUIRE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \ METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, REQUIRE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
@ -542,7 +539,6 @@ def initialize(consoleLogging=True):
CheckSection(CFG, 'General') CheckSection(CFG, 'General')
CheckSection(CFG, 'Blackhole') CheckSection(CFG, 'Blackhole')
CheckSection(CFG, 'Newzbin')
CheckSection(CFG, 'SABnzbd') CheckSection(CFG, 'SABnzbd')
CheckSection(CFG, 'NZBget') CheckSection(CFG, 'NZBget')
CheckSection(CFG, 'XBMC') CheckSection(CFG, 'XBMC')
@ -587,7 +583,7 @@ def initialize(consoleLogging=True):
CACHE_DIR = ACTUAL_CACHE_DIR CACHE_DIR = ACTUAL_CACHE_DIR
if not helpers.makeDir(CACHE_DIR): if not helpers.makeDir(CACHE_DIR):
logger.log(u"!!! Creating local cache dir failed, using system default", logger.ERROR) logger.log(u'!!! Creating local cache dir failed, using system default', logger.ERROR)
CACHE_DIR = None CACHE_DIR = None
# clean cache folders # clean cache folders
@ -604,7 +600,7 @@ def initialize(consoleLogging=True):
TRIM_ZERO = bool(check_setting_int(CFG, 'GUI', 'trim_zero', 0)) TRIM_ZERO = bool(check_setting_int(CFG, 'GUI', 'trim_zero', 0))
DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x') DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x')
TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p') TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p')
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S", u"") TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u':%S', u'')
TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'network') TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'network')
DISPLAY_BACKGROUND = bool(check_setting_int(CFG, 'General', 'display_background', 0)) DISPLAY_BACKGROUND = bool(check_setting_int(CFG, 'General', 'display_background', 0))
DISPLAY_BACKGROUND_TRANSPARENT = check_setting_str(CFG, 'General', 'display_background_transparent', 'transparent') DISPLAY_BACKGROUND_TRANSPARENT = check_setting_str(CFG, 'General', 'display_background_transparent', 'transparent')
@ -618,7 +614,7 @@ def initialize(consoleLogging=True):
LOG_DIR = os.path.normpath(os.path.join(DATA_DIR, ACTUAL_LOG_DIR)) LOG_DIR = os.path.normpath(os.path.join(DATA_DIR, ACTUAL_LOG_DIR))
if not helpers.makeDir(LOG_DIR): if not helpers.makeDir(LOG_DIR):
logger.log(u"!!! No log folder, logging to screen only!", logger.ERROR) logger.log(u'!!! No log folder, logging to screen only!', logger.ERROR)
FILE_LOGGING_PRESET = check_setting_str(CFG, 'General', 'file_logging_preset', 'DB') FILE_LOGGING_PRESET = check_setting_str(CFG, 'General', 'file_logging_preset', 'DB')
@ -635,7 +631,7 @@ def initialize(consoleLogging=True):
WEB_HOST = check_setting_str(CFG, 'General', 'web_host', '0.0.0.0') WEB_HOST = check_setting_str(CFG, 'General', 'web_host', '0.0.0.0')
WEB_IPV6 = bool(check_setting_int(CFG, 'General', 'web_ipv6', 0)) WEB_IPV6 = bool(check_setting_int(CFG, 'General', 'web_ipv6', 0))
WEB_ROOT = check_setting_str(CFG, 'General', 'web_root', '').rstrip("/") WEB_ROOT = check_setting_str(CFG, 'General', 'web_root', '').rstrip('/')
WEB_LOG = bool(check_setting_int(CFG, 'General', 'web_log', 0)) WEB_LOG = bool(check_setting_int(CFG, 'General', 'web_log', 0))
ENCRYPTION_VERSION = check_setting_int(CFG, 'General', 'encryption_version', 0) ENCRYPTION_VERSION = check_setting_int(CFG, 'General', 'encryption_version', 0)
WEB_USERNAME = check_setting_str(CFG, 'General', 'web_username', '') WEB_USERNAME = check_setting_str(CFG, 'General', 'web_username', '')
@ -767,10 +763,6 @@ def initialize(consoleLogging=True):
NZBS_UID = check_setting_str(CFG, 'NZBs', 'nzbs_uid', '') NZBS_UID = check_setting_str(CFG, 'NZBs', 'nzbs_uid', '')
NZBS_HASH = check_setting_str(CFG, 'NZBs', 'nzbs_hash', '') NZBS_HASH = check_setting_str(CFG, 'NZBs', 'nzbs_hash', '')
NEWZBIN = bool(check_setting_int(CFG, 'Newzbin', 'newzbin', 0))
NEWZBIN_USERNAME = check_setting_str(CFG, 'Newzbin', 'newzbin_username', '')
NEWZBIN_PASSWORD = check_setting_str(CFG, 'Newzbin', 'newzbin_password', '')
SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '') SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '')
SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '') SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '')
SAB_APIKEY = check_setting_str(CFG, 'SABnzbd', 'sab_apikey', '') SAB_APIKEY = check_setting_str(CFG, 'SABnzbd', 'sab_apikey', '')
@ -840,7 +832,7 @@ def initialize(consoleLogging=True):
PROWL_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Prowl', 'prowl_notify_ondownload', 0)) PROWL_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Prowl', 'prowl_notify_ondownload', 0))
PROWL_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Prowl', 'prowl_notify_onsubtitledownload', 0)) PROWL_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Prowl', 'prowl_notify_onsubtitledownload', 0))
PROWL_API = check_setting_str(CFG, 'Prowl', 'prowl_api', '') PROWL_API = check_setting_str(CFG, 'Prowl', 'prowl_api', '')
PROWL_PRIORITY = check_setting_str(CFG, 'Prowl', 'prowl_priority', "0") PROWL_PRIORITY = check_setting_str(CFG, 'Prowl', 'prowl_priority', '0')
USE_TWITTER = bool(check_setting_int(CFG, 'Twitter', 'use_twitter', 0)) USE_TWITTER = bool(check_setting_int(CFG, 'Twitter', 'use_twitter', 0))
TWITTER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_onsnatch', 0)) TWITTER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_onsnatch', 0))
@ -924,7 +916,7 @@ def initialize(consoleLogging=True):
NMA_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'NMA', 'nma_notify_ondownload', 0)) NMA_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'NMA', 'nma_notify_ondownload', 0))
NMA_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'NMA', 'nma_notify_onsubtitledownload', 0)) NMA_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'NMA', 'nma_notify_onsubtitledownload', 0))
NMA_API = check_setting_str(CFG, 'NMA', 'nma_api', '') NMA_API = check_setting_str(CFG, 'NMA', 'nma_api', '')
NMA_PRIORITY = check_setting_str(CFG, 'NMA', 'nma_priority', "0") NMA_PRIORITY = check_setting_str(CFG, 'NMA', 'nma_priority', '0')
USE_PUSHALOT = bool(check_setting_int(CFG, 'Pushalot', 'use_pushalot', 0)) USE_PUSHALOT = bool(check_setting_int(CFG, 'Pushalot', 'use_pushalot', 0))
PUSHALOT_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsnatch', 0)) PUSHALOT_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsnatch', 0))
@ -1019,119 +1011,91 @@ def initialize(consoleLogging=True):
torrentRssProviderList = providers.getTorrentRssProviderList(TORRENTRSS_DATA) torrentRssProviderList = providers.getTorrentRssProviderList(TORRENTRSS_DATA)
# dynamically load provider settings # dynamically load provider settings
for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if for torrent_prov in [curProvider for curProvider in providers.sortedProviderList()
curProvider.providerType == GenericProvider.TORRENT]: if GenericProvider.TORRENT == curProvider.providerType]:
curTorrentProvider.enabled = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), prov_id = torrent_prov.get_id()
curTorrentProvider.getID(), 0)) prov_id_uc = torrent_prov.get_id().upper()
if hasattr(curTorrentProvider, 'api_key'): torrent_prov.enabled = bool(check_setting_int(CFG, prov_id_uc, prov_id, 0))
curTorrentProvider.api_key = check_setting_str(CFG, curTorrentProvider.getID().upper(), if hasattr(torrent_prov, 'api_key'):
curTorrentProvider.getID() + '_api_key', '') torrent_prov.api_key = check_setting_str(CFG, prov_id_uc, prov_id + '_api_key', '')
if hasattr(curTorrentProvider, 'hash'): if hasattr(torrent_prov, 'hash'):
curTorrentProvider.hash = check_setting_str(CFG, curTorrentProvider.getID().upper(), torrent_prov.hash = check_setting_str(CFG, prov_id_uc, prov_id + '_hash', '')
curTorrentProvider.getID() + '_hash', '') if hasattr(torrent_prov, 'digest'):
if hasattr(curTorrentProvider, 'digest'): torrent_prov.digest = check_setting_str(CFG, prov_id_uc, prov_id + '_digest', '')
curTorrentProvider.digest = check_setting_str(CFG, curTorrentProvider.getID().upper(), if hasattr(torrent_prov, 'username'):
curTorrentProvider.getID() + '_digest', '') torrent_prov.username = check_setting_str(CFG, prov_id_uc, prov_id + '_username', '')
if hasattr(curTorrentProvider, 'username'): if hasattr(torrent_prov, 'password'):
curTorrentProvider.username = check_setting_str(CFG, curTorrentProvider.getID().upper(), torrent_prov.password = check_setting_str(CFG, prov_id_uc, prov_id + '_password', '')
curTorrentProvider.getID() + '_username', '') if hasattr(torrent_prov, 'passkey'):
if hasattr(curTorrentProvider, 'password'): torrent_prov.passkey = check_setting_str(CFG, prov_id_uc, prov_id + '_passkey', '')
curTorrentProvider.password = check_setting_str(CFG, curTorrentProvider.getID().upper(), if hasattr(torrent_prov, 'proxy'):
curTorrentProvider.getID() + '_password', '') torrent_prov.proxy.enabled = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_proxy', 0))
if hasattr(curTorrentProvider, 'passkey'): if hasattr(torrent_prov.proxy, 'url'):
curTorrentProvider.passkey = check_setting_str(CFG, curTorrentProvider.getID().upper(), torrent_prov.proxy.url = check_setting_str(CFG, prov_id_uc, prov_id + '_proxy_url', '')
curTorrentProvider.getID() + '_passkey', '') if hasattr(torrent_prov, 'confirmed'):
if hasattr(curTorrentProvider, 'proxy'): torrent_prov.confirmed = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_confirmed', 0))
curTorrentProvider.proxy.enabled = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), if hasattr(torrent_prov, 'options'):
curTorrentProvider.getID() + '_proxy', 0)) torrent_prov.options = check_setting_str(CFG, prov_id_uc, prov_id + '_options', '')
if hasattr(curTorrentProvider.proxy, 'url'): if hasattr(torrent_prov, 'ratio'):
curTorrentProvider.proxy.url = check_setting_str(CFG, curTorrentProvider.getID().upper(), torrent_prov.ratio = check_setting_str(CFG, prov_id_uc, prov_id + '_ratio', '')
curTorrentProvider.getID() + '_proxy_url', '') if hasattr(torrent_prov, 'minseed'):
if hasattr(curTorrentProvider, 'confirmed'): torrent_prov.minseed = check_setting_int(CFG, prov_id_uc, prov_id + '_minseed', 0)
curTorrentProvider.confirmed = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), if hasattr(torrent_prov, 'minleech'):
curTorrentProvider.getID() + '_confirmed', 0)) torrent_prov.minleech = check_setting_int(CFG, prov_id_uc, prov_id + '_minleech', 0)
if hasattr(curTorrentProvider, 'options'): if hasattr(torrent_prov, 'freeleech'):
curTorrentProvider.options = check_setting_str(CFG, curTorrentProvider.getID().upper(), torrent_prov.freeleech = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_freeleech', 0))
curTorrentProvider.getID() + '_options', '') if hasattr(torrent_prov, 'search_mode'):
if hasattr(curTorrentProvider, 'ratio'): torrent_prov.search_mode = check_setting_str(CFG, prov_id_uc, prov_id + '_search_mode', 'eponly')
curTorrentProvider.ratio = check_setting_str(CFG, curTorrentProvider.getID().upper(), if hasattr(torrent_prov, 'search_fallback'):
curTorrentProvider.getID() + '_ratio', '') torrent_prov.search_fallback = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_search_fallback', 0))
if hasattr(curTorrentProvider, 'minseed'): if hasattr(torrent_prov, 'enable_recentsearch'):
curTorrentProvider.minseed = check_setting_int(CFG, curTorrentProvider.getID().upper(), torrent_prov.enable_recentsearch = bool(check_setting_int(CFG, prov_id_uc,
curTorrentProvider.getID() + '_minseed', 0) prov_id + '_enable_recentsearch', 1))
if hasattr(curTorrentProvider, 'minleech'): if hasattr(torrent_prov, 'enable_backlog'):
curTorrentProvider.minleech = check_setting_int(CFG, curTorrentProvider.getID().upper(), torrent_prov.enable_backlog = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_enable_backlog', 1))
curTorrentProvider.getID() + '_minleech', 0)
if hasattr(curTorrentProvider, 'freeleech'):
curTorrentProvider.freeleech = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_freeleech', 0))
if hasattr(curTorrentProvider, 'search_mode'):
curTorrentProvider.search_mode = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_search_mode',
'eponly')
if hasattr(curTorrentProvider, 'search_fallback'):
curTorrentProvider.search_fallback = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_search_fallback',
0))
if hasattr(curTorrentProvider, 'enable_recentsearch'): for nzb_prov in [curProvider for curProvider in providers.sortedProviderList()
curTorrentProvider.enable_recentsearch = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), if GenericProvider.NZB == curProvider.providerType]:
curTorrentProvider.getID() + prov_id = nzb_prov.get_id()
'_enable_recentsearch', 1)) prov_id_uc = nzb_prov.get_id().upper()
if hasattr(curTorrentProvider, 'enable_backlog'): nzb_prov.enabled = bool(
curTorrentProvider.enable_backlog = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), check_setting_int(CFG, prov_id_uc, prov_id, 0))
curTorrentProvider.getID() + '_enable_backlog', if hasattr(nzb_prov, 'api_key'):
1)) nzb_prov.api_key = check_setting_str(CFG, prov_id_uc, prov_id + '_api_key', '')
if hasattr(nzb_prov, 'username'):
for curNzbProvider in [curProvider for curProvider in providers.sortedProviderList() if nzb_prov.username = check_setting_str(CFG, prov_id_uc, prov_id + '_username', '')
curProvider.providerType == GenericProvider.NZB]: if hasattr(nzb_prov, 'search_mode'):
curNzbProvider.enabled = bool( nzb_prov.search_mode = check_setting_str(CFG, prov_id_uc, prov_id + '_search_mode', 'eponly')
check_setting_int(CFG, curNzbProvider.getID().upper(), curNzbProvider.getID(), 0)) if hasattr(nzb_prov, 'search_fallback'):
if hasattr(curNzbProvider, 'api_key'): nzb_prov.search_fallback = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_search_fallback', 0))
curNzbProvider.api_key = check_setting_str(CFG, curNzbProvider.getID().upper(), if hasattr(nzb_prov, 'enable_recentsearch'):
curNzbProvider.getID() + '_api_key', '') nzb_prov.enable_recentsearch = bool(check_setting_int(CFG, prov_id_uc,
if hasattr(curNzbProvider, 'username'): prov_id + '_enable_recentsearch', 1))
curNzbProvider.username = check_setting_str(CFG, curNzbProvider.getID().upper(), if hasattr(nzb_prov, 'enable_backlog'):
curNzbProvider.getID() + '_username', '') nzb_prov.enable_backlog = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_enable_backlog', 1))
if hasattr(curNzbProvider, 'search_mode'):
curNzbProvider.search_mode = check_setting_str(CFG, curNzbProvider.getID().upper(),
curNzbProvider.getID() + '_search_mode',
'eponly')
if hasattr(curNzbProvider, 'search_fallback'):
curNzbProvider.search_fallback = bool(check_setting_int(CFG, curNzbProvider.getID().upper(),
curNzbProvider.getID() + '_search_fallback',
0))
if hasattr(curNzbProvider, 'enable_recentsearch'):
curNzbProvider.enable_recentsearch = bool(check_setting_int(CFG, curNzbProvider.getID().upper(),
curNzbProvider.getID() + '_enable_recentsearch',
1))
if hasattr(curNzbProvider, 'enable_backlog'):
curNzbProvider.enable_backlog = bool(check_setting_int(CFG, curNzbProvider.getID().upper(),
curNzbProvider.getID() + '_enable_backlog',
1))
if not os.path.isfile(CONFIG_FILE): if not os.path.isfile(CONFIG_FILE):
logger.log(u"Unable to find '" + CONFIG_FILE + "', all settings will be default!", logger.DEBUG) logger.log(u'Unable to find \'' + CONFIG_FILE + '\', all settings will be default!', logger.DEBUG)
save_config() save_config()
# start up all the threads # start up all the threads
logger.sb_log_instance.initLogging(consoleLogging=consoleLogging) logger.sb_log_instance.initLogging(consoleLogging=consoleLogging)
# initialize the main SB database # initialize the main SB database
myDB = db.DBConnection() my_db = db.DBConnection()
db.MigrationCode(myDB) db.MigrationCode(my_db)
# initialize the cache database # initialize the cache database
myDB = db.DBConnection('cache.db') my_db = db.DBConnection('cache.db')
db.upgradeDatabase(myDB, cache_db.InitialSchema) db.upgradeDatabase(my_db, cache_db.InitialSchema)
# initialize the failed downloads database # initialize the failed downloads database
myDB = db.DBConnection('failed.db') my_db = db.DBConnection('failed.db')
db.upgradeDatabase(myDB, failed_db.InitialSchema) db.upgradeDatabase(my_db, failed_db.InitialSchema)
# fix up any db problems # fix up any db problems
myDB = db.DBConnection() my_db = db.DBConnection()
db.sanityCheckDatabase(myDB, mainDB.MainSanityCheck) db.sanityCheckDatabase(my_db, mainDB.MainSanityCheck)
# migrate the config if it needs it # migrate the config if it needs it
migrator = ConfigMigrator(CFG) migrator = ConfigMigrator(CFG)
@ -1147,7 +1111,7 @@ def initialize(consoleLogging=True):
(METADATA_TIVO, metadata.tivo), (METADATA_TIVO, metadata.tivo),
(METADATA_MEDE8ER, metadata.mede8er), (METADATA_MEDE8ER, metadata.mede8er),
(METADATA_KODI, metadata.kodi), (METADATA_KODI, metadata.kodi),
]: ]:
(cur_metadata_config, cur_metadata_class) = cur_metadata_tuple (cur_metadata_config, cur_metadata_class) = cur_metadata_tuple
tmp_provider = cur_metadata_class.metadata_class() tmp_provider = cur_metadata_class.metadata_class()
tmp_provider.set_config(cur_metadata_config) tmp_provider.set_config(cur_metadata_config)
@ -1169,7 +1133,7 @@ def initialize(consoleLogging=True):
cycleTime=datetime.timedelta(hours=1), cycleTime=datetime.timedelta(hours=1),
threadName='SHOWUPDATER', threadName='SHOWUPDATER',
start_time=datetime.time(hour=SHOW_UPDATE_HOUR), start_time=datetime.time(hour=SHOW_UPDATE_HOUR),
prevent_cycle_run=sickbeard.showQueueScheduler.action.isShowUpdateRunning) # 3 AM prevent_cycle_run=showQueueScheduler.action.isShowUpdateRunning) # 3 AM
# searchers # searchers
searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(), searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
@ -1178,18 +1142,18 @@ def initialize(consoleLogging=True):
update_interval = datetime.timedelta(minutes=RECENTSEARCH_FREQUENCY) update_interval = datetime.timedelta(minutes=RECENTSEARCH_FREQUENCY)
recentSearchScheduler = scheduler.Scheduler(searchRecent.RecentSearcher(), recentSearchScheduler = scheduler.Scheduler(searchRecent.RecentSearcher(),
cycleTime=update_interval, cycleTime=update_interval,
threadName='RECENTSEARCHER', threadName='RECENTSEARCHER',
run_delay=update_now if RECENTSEARCH_STARTUP run_delay=update_now if RECENTSEARCH_STARTUP
else datetime.timedelta(minutes=5), else datetime.timedelta(minutes=5),
prevent_cycle_run=sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress) prevent_cycle_run=searchQueueScheduler.action.is_recentsearch_in_progress)
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(), backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()), cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()),
threadName='BACKLOG', threadName='BACKLOG',
run_delay=update_now if BACKLOG_STARTUP run_delay=update_now if BACKLOG_STARTUP
else datetime.timedelta(minutes=10), else datetime.timedelta(minutes=10),
prevent_cycle_run=sickbeard.searchQueueScheduler.action.is_standard_backlog_in_progress) prevent_cycle_run=searchQueueScheduler.action.is_standard_backlog_in_progress)
search_intervals = {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60} search_intervals = {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60}
if CHECK_PROPERS_INTERVAL in search_intervals: if CHECK_PROPERS_INTERVAL in search_intervals:
@ -1204,7 +1168,7 @@ def initialize(consoleLogging=True):
threadName='FINDPROPERS', threadName='FINDPROPERS',
start_time=run_at, start_time=run_at,
run_delay=update_interval, run_delay=update_interval,
prevent_cycle_run=sickbeard.searchQueueScheduler.action.is_propersearch_in_progress) prevent_cycle_run=searchQueueScheduler.action.is_propersearch_in_progress)
# processors # processors
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(), autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
@ -1290,52 +1254,52 @@ def halt():
if __INITIALIZED__: if __INITIALIZED__:
logger.log(u"Aborting all threads") logger.log(u'Aborting all threads')
events.stop.set() events.stop.set()
logger.log(u"Waiting for the EVENTS thread to exit") logger.log(u'Waiting for the EVENTS thread to exit')
try: try:
events.join(10) events.join(10)
except: except:
pass pass
recentSearchScheduler.stop.set() recentSearchScheduler.stop.set()
logger.log(u"Waiting for the RECENTSEARCH thread to exit") logger.log(u'Waiting for the RECENTSEARCH thread to exit')
try: try:
recentSearchScheduler.join(10) recentSearchScheduler.join(10)
except: except:
pass pass
backlogSearchScheduler.stop.set() backlogSearchScheduler.stop.set()
logger.log(u"Waiting for the BACKLOG thread to exit") logger.log(u'Waiting for the BACKLOG thread to exit')
try: try:
backlogSearchScheduler.join(10) backlogSearchScheduler.join(10)
except: except:
pass pass
showUpdateScheduler.stop.set() showUpdateScheduler.stop.set()
logger.log(u"Waiting for the SHOWUPDATER thread to exit") logger.log(u'Waiting for the SHOWUPDATER thread to exit')
try: try:
showUpdateScheduler.join(10) showUpdateScheduler.join(10)
except: except:
pass pass
versionCheckScheduler.stop.set() versionCheckScheduler.stop.set()
logger.log(u"Waiting for the VERSIONCHECKER thread to exit") logger.log(u'Waiting for the VERSIONCHECKER thread to exit')
try: try:
versionCheckScheduler.join(10) versionCheckScheduler.join(10)
except: except:
pass pass
showQueueScheduler.stop.set() showQueueScheduler.stop.set()
logger.log(u"Waiting for the SHOWQUEUE thread to exit") logger.log(u'Waiting for the SHOWQUEUE thread to exit')
try: try:
showQueueScheduler.join(10) showQueueScheduler.join(10)
except: except:
pass pass
searchQueueScheduler.stop.set() searchQueueScheduler.stop.set()
logger.log(u"Waiting for the SEARCHQUEUE thread to exit") logger.log(u'Waiting for the SEARCHQUEUE thread to exit')
try: try:
searchQueueScheduler.join(10) searchQueueScheduler.join(10)
except: except:
@ -1343,7 +1307,7 @@ def halt():
if PROCESS_AUTOMATICALLY: if PROCESS_AUTOMATICALLY:
autoPostProcesserScheduler.stop.set() autoPostProcesserScheduler.stop.set()
logger.log(u"Waiting for the POSTPROCESSER thread to exit") logger.log(u'Waiting for the POSTPROCESSER thread to exit')
try: try:
autoPostProcesserScheduler.join(10) autoPostProcesserScheduler.join(10)
except: except:
@ -1351,7 +1315,7 @@ def halt():
if USE_TRAKT: if USE_TRAKT:
traktCheckerScheduler.stop.set() traktCheckerScheduler.stop.set()
logger.log(u"Waiting for the TRAKTCHECKER thread to exit") logger.log(u'Waiting for the TRAKTCHECKER thread to exit')
try: try:
traktCheckerScheduler.join(10) traktCheckerScheduler.join(10)
except: except:
@ -1359,7 +1323,7 @@ def halt():
if DOWNLOAD_PROPERS: if DOWNLOAD_PROPERS:
properFinderScheduler.stop.set() properFinderScheduler.stop.set()
logger.log(u"Waiting for the PROPERFINDER thread to exit") logger.log(u'Waiting for the PROPERFINDER thread to exit')
try: try:
properFinderScheduler.join(10) properFinderScheduler.join(10)
except: except:
@ -1367,7 +1331,7 @@ def halt():
if USE_SUBTITLES: if USE_SUBTITLES:
subtitlesFinderScheduler.stop.set() subtitlesFinderScheduler.stop.set()
logger.log(u"Waiting for the SUBTITLESFINDER thread to exit") logger.log(u'Waiting for the SUBTITLESFINDER thread to exit')
try: try:
subtitlesFinderScheduler.join(10) subtitlesFinderScheduler.join(10)
except: except:
@ -1375,7 +1339,7 @@ def halt():
if ADBA_CONNECTION: if ADBA_CONNECTION:
ADBA_CONNECTION.logout() ADBA_CONNECTION.logout()
logger.log(u"Waiting for the ANIDB CONNECTION thread to exit") logger.log(u'Waiting for the ANIDB CONNECTION thread to exit')
try: try:
ADBA_CONNECTION.join(10) ADBA_CONNECTION.join(10)
except: except:
@ -1387,7 +1351,7 @@ def halt():
def sig_handler(signum=None, frame=None): def sig_handler(signum=None, frame=None):
if type(signum) != type(None): if type(signum) != type(None):
logger.log(u"Signal %i caught, saving and exiting..." % int(signum)) logger.log(u'Signal %i caught, saving and exiting...' % int(signum))
events.put(events.SystemEvent.SHUTDOWN) events.put(events.SystemEvent.SHUTDOWN)
@ -1395,12 +1359,12 @@ def saveAll():
global showList global showList
# write all shows # write all shows
logger.log(u"Saving all shows to the database") logger.log(u'Saving all shows to the database')
for show in showList: for show in showList:
show.saveToDB() show.saveToDB()
# save config # save config
logger.log(u"Saving config file to disk") logger.log(u'Saving config file to disk')
save_config() save_config()
@ -1408,7 +1372,7 @@ def restart(soft=True):
if soft: if soft:
halt() halt()
saveAll() saveAll()
logger.log(u"Re-initializing all data") logger.log(u'Re-initializing all data')
initialize() initialize()
else: else:
events.put(events.SystemEvent.RESTART) events.put(events.SystemEvent.RESTART)
@ -1517,7 +1481,7 @@ def save_config():
new_config['General']['keep_processed_dir'] = int(KEEP_PROCESSED_DIR) new_config['General']['keep_processed_dir'] = int(KEEP_PROCESSED_DIR)
new_config['General']['process_method'] = PROCESS_METHOD new_config['General']['process_method'] = PROCESS_METHOD
new_config['General']['move_associated_files'] = int(MOVE_ASSOCIATED_FILES) new_config['General']['move_associated_files'] = int(MOVE_ASSOCIATED_FILES)
new_config['General']['postpone_if_sync_files'] = int (POSTPONE_IF_SYNC_FILES) new_config['General']['postpone_if_sync_files'] = int(POSTPONE_IF_SYNC_FILES)
new_config['General']['nfo_rename'] = int(NFO_RENAME) new_config['General']['nfo_rename'] = int(NFO_RENAME)
new_config['General']['process_automatically'] = int(PROCESS_AUTOMATICALLY) new_config['General']['process_automatically'] = int(PROCESS_AUTOMATICALLY)
new_config['General']['unpack'] = int(UNPACK) new_config['General']['unpack'] = int(UNPACK)
@ -1537,99 +1501,74 @@ def save_config():
new_config['Blackhole']['torrent_dir'] = TORRENT_DIR new_config['Blackhole']['torrent_dir'] = TORRENT_DIR
# dynamically save provider settings # dynamically save provider settings
for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if for torrent_prov in [curProvider for curProvider in providers.sortedProviderList()
curProvider.providerType == GenericProvider.TORRENT]: if GenericProvider.TORRENT == curProvider.providerType]:
new_config[curTorrentProvider.getID().upper()] = {} prov_id = torrent_prov.get_id()
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID()] = int(curTorrentProvider.enabled) prov_id_uc = torrent_prov.get_id().upper()
if hasattr(curTorrentProvider, 'digest'): new_config[prov_id_uc] = {}
new_config[curTorrentProvider.getID().upper()][ new_config[prov_id_uc][prov_id] = int(torrent_prov.enabled)
curTorrentProvider.getID() + '_digest'] = curTorrentProvider.digest if hasattr(torrent_prov, 'digest'):
if hasattr(curTorrentProvider, 'hash'): new_config[prov_id_uc][prov_id + '_digest'] = torrent_prov.digest
new_config[curTorrentProvider.getID().upper()][ if hasattr(torrent_prov, 'hash'):
curTorrentProvider.getID() + '_hash'] = curTorrentProvider.hash new_config[prov_id_uc][prov_id + '_hash'] = torrent_prov.hash
if hasattr(curTorrentProvider, 'api_key'): if hasattr(torrent_prov, 'api_key'):
new_config[curTorrentProvider.getID().upper()][ new_config[prov_id_uc][prov_id + '_api_key'] = torrent_prov.api_key
curTorrentProvider.getID() + '_api_key'] = curTorrentProvider.api_key if hasattr(torrent_prov, 'username'):
if hasattr(curTorrentProvider, 'username'): new_config[prov_id_uc][prov_id + '_username'] = torrent_prov.username
new_config[curTorrentProvider.getID().upper()][ if hasattr(torrent_prov, 'password'):
curTorrentProvider.getID() + '_username'] = curTorrentProvider.username new_config[prov_id_uc][prov_id + '_password'] = helpers.encrypt(torrent_prov.password, ENCRYPTION_VERSION)
if hasattr(curTorrentProvider, 'password'): if hasattr(torrent_prov, 'passkey'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_password'] = helpers.encrypt( new_config[prov_id_uc][prov_id + '_passkey'] = torrent_prov.passkey
curTorrentProvider.password, ENCRYPTION_VERSION) if hasattr(torrent_prov, 'confirmed'):
if hasattr(curTorrentProvider, 'passkey'): new_config[prov_id_uc][prov_id + '_confirmed'] = int(torrent_prov.confirmed)
new_config[curTorrentProvider.getID().upper()][ if hasattr(torrent_prov, 'ratio'):
curTorrentProvider.getID() + '_passkey'] = curTorrentProvider.passkey new_config[prov_id_uc][prov_id + '_ratio'] = torrent_prov.ratio
if hasattr(curTorrentProvider, 'confirmed'): if hasattr(torrent_prov, 'minseed'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_confirmed'] = int( new_config[prov_id_uc][prov_id + '_minseed'] = int(torrent_prov.minseed)
curTorrentProvider.confirmed) if hasattr(torrent_prov, 'minleech'):
if hasattr(curTorrentProvider, 'ratio'): new_config[prov_id_uc][prov_id + '_minleech'] = int(torrent_prov.minleech)
new_config[curTorrentProvider.getID().upper()][ if hasattr(torrent_prov, 'options'):
curTorrentProvider.getID() + '_ratio'] = curTorrentProvider.ratio new_config[prov_id_uc][prov_id + '_options'] = torrent_prov.options
if hasattr(curTorrentProvider, 'minseed'): if hasattr(torrent_prov, 'proxy'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_minseed'] = int( new_config[prov_id_uc][prov_id + '_proxy'] = int(torrent_prov.proxy.enabled)
curTorrentProvider.minseed) if hasattr(torrent_prov.proxy, 'url'):
if hasattr(curTorrentProvider, 'minleech'): new_config[prov_id_uc][prov_id + '_proxy_url'] = torrent_prov.proxy.url
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_minleech'] = int( if hasattr(torrent_prov, 'freeleech'):
curTorrentProvider.minleech) new_config[prov_id_uc][prov_id + '_freeleech'] = int(torrent_prov.freeleech)
if hasattr(curTorrentProvider, 'options'): if hasattr(torrent_prov, 'search_mode'):
new_config[curTorrentProvider.getID().upper()][ new_config[prov_id_uc][prov_id + '_search_mode'] = torrent_prov.search_mode
curTorrentProvider.getID() + '_options'] = curTorrentProvider.options if hasattr(torrent_prov, 'search_fallback'):
if hasattr(curTorrentProvider, 'proxy'): new_config[prov_id_uc][prov_id + '_search_fallback'] = int(torrent_prov.search_fallback)
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_proxy'] = int( if hasattr(torrent_prov, 'enable_recentsearch'):
curTorrentProvider.proxy.enabled) new_config[prov_id_uc][prov_id + '_enable_recentsearch'] = int(torrent_prov.enable_recentsearch)
if hasattr(curTorrentProvider.proxy, 'url'): if hasattr(torrent_prov, 'enable_backlog'):
new_config[curTorrentProvider.getID().upper()][ new_config[prov_id_uc][prov_id + '_enable_backlog'] = int(torrent_prov.enable_backlog)
curTorrentProvider.getID() + '_proxy_url'] = curTorrentProvider.proxy.url
if hasattr(curTorrentProvider, 'freeleech'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_freeleech'] = int(
curTorrentProvider.freeleech)
if hasattr(curTorrentProvider, 'search_mode'):
new_config[curTorrentProvider.getID().upper()][
curTorrentProvider.getID() + '_search_mode'] = curTorrentProvider.search_mode
if hasattr(curTorrentProvider, 'search_fallback'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_search_fallback'] = int(
curTorrentProvider.search_fallback)
if hasattr(curTorrentProvider, 'enable_recentsearch'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_enable_recentsearch'] = int(
curTorrentProvider.enable_recentsearch)
if hasattr(curTorrentProvider, 'enable_backlog'):
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_enable_backlog'] = int(
curTorrentProvider.enable_backlog)
for curNzbProvider in [curProvider for curProvider in providers.sortedProviderList() if for nzb_prov in [curProvider for curProvider in providers.sortedProviderList()
curProvider.providerType == GenericProvider.NZB]: if GenericProvider.NZB == curProvider.providerType]:
new_config[curNzbProvider.getID().upper()] = {} prov_id = nzb_prov.get_id()
new_config[curNzbProvider.getID().upper()][curNzbProvider.getID()] = int(curNzbProvider.enabled) prov_id_uc = nzb_prov.get_id().upper()
new_config[prov_id_uc] = {}
new_config[prov_id_uc][prov_id] = int(nzb_prov.enabled)
if hasattr(curNzbProvider, 'api_key'): if hasattr(nzb_prov, 'api_key'):
new_config[curNzbProvider.getID().upper()][ new_config[prov_id_uc][prov_id + '_api_key'] = nzb_prov.api_key
curNzbProvider.getID() + '_api_key'] = curNzbProvider.api_key if hasattr(nzb_prov, 'username'):
if hasattr(curNzbProvider, 'username'): new_config[prov_id_uc][prov_id + '_username'] = nzb_prov.username
new_config[curNzbProvider.getID().upper()][ if hasattr(nzb_prov, 'search_mode'):
curNzbProvider.getID() + '_username'] = curNzbProvider.username new_config[prov_id_uc][prov_id + '_search_mode'] = nzb_prov.search_mode
if hasattr(curNzbProvider, 'search_mode'): if hasattr(nzb_prov, 'search_fallback'):
new_config[curNzbProvider.getID().upper()][ new_config[prov_id_uc][prov_id + '_search_fallback'] = int(nzb_prov.search_fallback)
curNzbProvider.getID() + '_search_mode'] = curNzbProvider.search_mode if hasattr(nzb_prov, 'enable_recentsearch'):
if hasattr(curNzbProvider, 'search_fallback'): new_config[prov_id_uc][prov_id + '_enable_recentsearch'] = int(nzb_prov.enable_recentsearch)
new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_search_fallback'] = int( if hasattr(nzb_prov, 'enable_backlog'):
curNzbProvider.search_fallback) new_config[prov_id_uc][prov_id + '_enable_backlog'] = int(nzb_prov.enable_backlog)
if hasattr(curNzbProvider, 'enable_recentsearch'):
new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_enable_recentsearch'] = int(
curNzbProvider.enable_recentsearch)
if hasattr(curNzbProvider, 'enable_backlog'):
new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_enable_backlog'] = int(
curNzbProvider.enable_backlog)
new_config['NZBs'] = {} new_config['NZBs'] = {}
new_config['NZBs']['nzbs'] = int(NZBS) new_config['NZBs']['nzbs'] = int(NZBS)
new_config['NZBs']['nzbs_uid'] = NZBS_UID new_config['NZBs']['nzbs_uid'] = NZBS_UID
new_config['NZBs']['nzbs_hash'] = NZBS_HASH new_config['NZBs']['nzbs_hash'] = NZBS_HASH
new_config['Newzbin'] = {}
new_config['Newzbin']['newzbin'] = int(NEWZBIN)
new_config['Newzbin']['newzbin_username'] = NEWZBIN_USERNAME
new_config['Newzbin']['newzbin_password'] = helpers.encrypt(NEWZBIN_PASSWORD, ENCRYPTION_VERSION)
new_config['SABnzbd'] = {} new_config['SABnzbd'] = {}
new_config['SABnzbd']['sab_username'] = SAB_USERNAME new_config['SABnzbd']['sab_username'] = SAB_USERNAME
new_config['SABnzbd']['sab_password'] = helpers.encrypt(SAB_PASSWORD, ENCRYPTION_VERSION) new_config['SABnzbd']['sab_password'] = helpers.encrypt(SAB_PASSWORD, ENCRYPTION_VERSION)
@ -1881,41 +1820,41 @@ def save_config():
new_config.write() new_config.write()
def launchBrowser(startPort=None): def launchBrowser(start_port=None):
if not startPort: if not start_port:
startPort = WEB_PORT start_port = WEB_PORT
if ENABLE_HTTPS: if ENABLE_HTTPS:
browserURL = 'https://localhost:%d%s' % (startPort, WEB_ROOT) browser_url = 'https://localhost:%d%s' % (start_port, WEB_ROOT)
else: else:
browserURL = 'http://localhost:%d%s' % (startPort, WEB_ROOT) browser_url = 'http://localhost:%d%s' % (start_port, WEB_ROOT)
try: try:
webbrowser.open(browserURL, 2, 1) webbrowser.open(browser_url, 2, 1)
except: except:
try: try:
webbrowser.open(browserURL, 1, 1) webbrowser.open(browser_url, 1, 1)
except: except:
logger.log(u"Unable to launch a browser", logger.ERROR) logger.log(u'Unable to launch a browser', logger.ERROR)
def getEpList(epIDs, showid=None): def getEpList(ep_ids, showid=None):
if epIDs == None or len(epIDs) == 0: if None is ep_ids or 0 == len(ep_ids):
return [] return []
query = "SELECT * FROM tv_episodes WHERE indexerid in (%s)" % (",".join(['?'] * len(epIDs)),) query = 'SELECT * FROM tv_episodes WHERE indexerid in (%s)' % (','.join(['?'] * len(ep_ids)),)
params = epIDs params = ep_ids
if showid != None: if None is not showid:
query += " AND showid = ?" query += ' AND showid = ?'
params.append(showid) params.append(showid)
myDB = db.DBConnection() my_db = db.DBConnection()
sqlResults = myDB.select(query, params) sql_results = my_db.select(query, params)
epList = [] ep_list = []
for curEp in sqlResults: for curEp in sql_results:
curShowObj = helpers.findCertainShow(showList, int(curEp["showid"])) cur_show_obj = helpers.findCertainShow(showList, int(curEp['showid']))
curEpObj = curShowObj.getEpisode(int(curEp["season"]), int(curEp["episode"])) cur_ep_obj = cur_show_obj.getEpisode(int(curEp['season']), int(curEp['episode']))
epList.append(curEpObj) ep_list.append(cur_ep_obj)
return epList return ep_list

View file

@ -168,7 +168,7 @@ class GenericClient(object):
try: try:
# Sets per provider seed ratio # Sets per provider seed ratio
result.ratio = result.provider.seedRatio() result.ratio = result.provider.seed_ratio()
result = self._get_torrent_hash(result) result = self._get_torrent_hash(result)

View file

@ -721,7 +721,7 @@ class ConfigMigrator():
for curProvider in providers.sortedProviderList(): for curProvider in providers.sortedProviderList():
if hasattr(curProvider, 'enable_recentsearch'): if hasattr(curProvider, 'enable_recentsearch'):
curProvider.enable_recentsearch = bool(check_setting_int( curProvider.enable_recentsearch = bool(check_setting_int(
self.config_obj, curProvider.getID().upper(), curProvider.getID() + '_enable_dailysearch', 1)) self.config_obj, curProvider.get_id().upper(), curProvider.get_id() + '_enable_dailysearch', 1))
def _migrate_v7(self): def _migrate_v7(self):
sickbeard.EPISODE_VIEW_LAYOUT = check_setting_str(self.config_obj, 'GUI', 'coming_eps_layout', 'banner') sickbeard.EPISODE_VIEW_LAYOUT = check_setting_str(self.config_obj, 'GUI', 'coming_eps_layout', 'banner')

View file

@ -100,10 +100,6 @@ class EpisodeNotFoundException(SickBeardException):
"The episode wasn't found on the Indexer" "The episode wasn't found on the Indexer"
class NewzbinAPIThrottled(SickBeardException):
"Newzbin has throttled us, deal with it"
class ShowDirNotFoundException(SickBeardException): class ShowDirNotFoundException(SickBeardException):
"The show dir doesn't exist" "The show dir doesn't exist"

View file

@ -1400,7 +1400,7 @@ def check_port(host, port, timeout=1.0):
def clear_unused_providers(): def clear_unused_providers():
providers = [x.cache.providerID for x in sickbeard.providers.sortedProviderList() if x.isActive()] providers = [x.cache.providerID for x in sickbeard.providers.sortedProviderList() if x.is_active()]
if providers: if providers:
myDB = db.DBConnection('cache.db') myDB = db.DBConnection('cache.db')

View file

@ -104,7 +104,7 @@ def sendNZB(nzb, proper=False):
else: else:
if nzb.resultType == "nzb": if nzb.resultType == "nzb":
genProvider = GenericProvider("") genProvider = GenericProvider("")
data = genProvider.getURL(nzb.url) data = genProvider.get_url(nzb.url)
if (data == None): if (data == None):
return False return False
nzbcontent64 = standard_b64encode(data) nzbcontent64 = standard_b64encode(data)

View file

@ -66,14 +66,14 @@ def _getProperList():
# for each provider get a list of the # for each provider get a list of the
origThreadName = threading.currentThread().name origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()] providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
for curProvider in providers: for curProvider in providers:
threading.currentThread().name = origThreadName + ' :: [' + curProvider.name + ']' threading.currentThread().name = origThreadName + ' :: [' + curProvider.name + ']'
logger.log(u'Searching for any new PROPER releases from ' + curProvider.name) logger.log(u'Searching for any new PROPER releases from ' + curProvider.name)
try: try:
curPropers = curProvider.findPropers(search_date) curPropers = curProvider.find_propers(search_date)
except exceptions.AuthException as e: except exceptions.AuthException as e:
logger.log(u'Authentication error: ' + ex(e), logger.ERROR) logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
continue continue
@ -243,8 +243,9 @@ def _downloadPropers(properList):
epObj = showObj.getEpisode(curProper.season, curProper.episode) epObj = showObj.getEpisode(curProper.season, curProper.episode)
# make the result object # make the result object
result = curProper.provider.getResult([epObj]) result = curProper.provider.get_result([epObj], curProper.url)
result.url = curProper.url if None is result:
continue
result.name = curProper.name result.name = curProper.name
result.quality = curProper.quality result.quality = curProper.quality
result.version = curProper.version result.version = curProper.version

View file

@ -53,7 +53,7 @@ from sickbeard import logger
def sortedProviderList(): def sortedProviderList():
initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList
providerDict = dict(zip([x.getID() for x in initialList], initialList)) providerDict = dict(zip([x.get_id() for x in initialList], initialList))
newList = [] newList = []
@ -207,7 +207,7 @@ def getProviderModule(name):
def getProviderClass(id): def getProviderClass(id):
providerMatch = [x for x in providerMatch = [x for x in
sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if
x.getID() == id] x.get_id() == id]
if len(providerMatch) != 1: if len(providerMatch) != 1:
return None return None

View file

@ -47,13 +47,13 @@ class AlphaRatioProvider(generic.TorrentProvider):
self.username, self.password, self.minseed, self.minleech = 4 * [None] self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.cache = AlphaRatioCache(self) self.cache = AlphaRatioCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'session' in self.session.cookies logged_in = lambda: 'session' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'keeplogged': '1', 'login': 'Login'} login_params = {'username': self.username, 'password': self.password, 'keeplogged': '1', 'login': 'Login'}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -66,10 +66,10 @@ class AlphaRatioProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -82,7 +82,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
search_string = unidecode(search_string) search_string = unidecode(search_string)
search_url = self.urls['search'] % search_string search_url = self.urls['search'] % search_string
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -124,7 +124,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -16,73 +16,67 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. # along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import urllib
import datetime import datetime
import urllib
import generic from . import generic
from sickbeard import classes, show_name_helpers, logger, tvcache from sickbeard import classes, show_name_helpers, logger, tvcache
class animenzb(generic.NZBProvider): class AnimeNZBProvider(generic.NZBProvider):
def __init__(self): def __init__(self):
generic.NZBProvider.__init__(self, 'animenzb', True, True) generic.NZBProvider.__init__(self, 'AnimeNZB', anime_only=True)
self.cache = animenzbCache(self)
self.url = 'http://animenzb.com/' self.url = 'http://animenzb.com/'
self.cache = AnimeNZBCache(self)
def _get_season_search_strings(self, ep_obj): def _get_season_search_strings(self, ep_obj):
return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
def _get_episode_search_strings(self, ep_obj, add_string=''): def _get_episode_search_strings(self, ep_obj, add_string=''):
return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)] return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_string, search_mode='eponly', epcount=0, age=0):
if self.show and not self.show.is_anime:
logger.log(u'%s is not an anime skipping ...' % self.show.name)
return []
params = { results = []
'cat': 'anime', if self.show and not self.show.is_anime:
'q': search_string.encode('utf-8'), return results
'max': '100'
} params = {'cat': 'anime',
'q': search_string.encode('utf-8'),
'max': '100'}
search_url = self.url + 'rss?' + urllib.urlencode(params) search_url = self.url + 'rss?' + urllib.urlencode(params)
logger.log(u'Search url: %s' % search_url, logger.DEBUG) logger.log(u'Search url: %s' % search_url, logger.DEBUG)
data = self.cache.getRSSFeed(search_url) data = self.cache.getRSSFeed(search_url)
if not data: if data and 'entries' in data:
return []
if 'entries' in data:
items = data.entries items = data.entries
results = []
for curItem in items: for curItem in items:
(title, url) = self._get_title_and_url(curItem) (title, url) = self._get_title_and_url(curItem)
if title and url: if title and url:
results.append(curItem) results.append(curItem)
else: else:
logger.log( logger.log(u'The data returned from %s is incomplete, this result is unusable' % self.name,
u'The data returned from %s is incomplete, this result is unusable' % self.name, logger.DEBUG)
logger.DEBUG)
return results return results
return [] def find_propers(self, date=None):
def findPropers(self, date=None):
results = [] results = []
for item in self._do_search('v2|v3|v4|v5'):
for item in self._doSearch('v2|v3|v4|v5'):
(title, url) = self._get_title_and_url(item) (title, url) = self._get_title_and_url(item)
if item.has_key('published_parsed') and item['published_parsed']: if 'published_parsed' in item and item['published_parsed']:
result_date = item.published_parsed result_date = item.published_parsed
if result_date: if result_date:
result_date = datetime.datetime(*result_date[0:6]) result_date = datetime.datetime(*result_date[0:6])
@ -97,32 +91,25 @@ class animenzb(generic.NZBProvider):
return results return results
class animenzbCache(tvcache.TVCache): class AnimeNZBCache(tvcache.TVCache):
def __init__(self, provider): def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider)
tvcache.TVCache.__init__(self, provider) self.minTime = 20 # cache update frequency
self.minTime = 20
def _getRSSData(self): def _getRSSData(self):
params = { params = {'cat': 'anime'.encode('utf-8'),
'cat': 'anime'.encode('utf-8'), 'max': '100'.encode('utf-8')}
'max': '100'.encode('utf-8')
}
rss_url = self.provider.url + 'rss?' + urllib.urlencode(params) rss_url = self.provider.url + 'rss?' + urllib.urlencode(params)
logger.log(u'%s cache update URL: %s' % (self.provider.name, rss_url), logger.DEBUG) logger.log(u'%s cache update URL: %s' % (self.provider.name, rss_url), logger.DEBUG)
data = self.getRSSFeed(rss_url) data = self.getRSSFeed(rss_url)
if data and 'entries' in data: if data and 'entries' in data:
return data.entries return data.entries
else: return []
return []
provider = animenzb() provider = AnimeNZBProvider()

View file

@ -15,6 +15,7 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import datetime import datetime
import time import time
@ -52,10 +53,10 @@ class BeyondHDProvider(generic.TorrentProvider):
logger.log(u'Incorrect authentication credentials for %s : %s' % (self.name, data_json['error']), logger.DEBUG) logger.log(u'Incorrect authentication credentials for %s : %s' % (self.name, data_json['error']), logger.DEBUG)
raise AuthException('Authentication credentials for %s are incorrect, check your config' % self.name) raise AuthException('Authentication credentials for %s are incorrect, check your config' % self.name)
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._checkAuth(): if not self._check_auth():
return results return results
for mode in search_params.keys(): for mode in search_params.keys():
@ -72,9 +73,9 @@ class BeyondHDProvider(generic.TorrentProvider):
search_url = self.urls['cache'] % (self.passkey, self.categories[mode]) search_url = self.urls['cache'] % (self.passkey, self.categories[mode])
if 'Cache' != mode: if 'Cache' != mode:
search_url += self.urls['search'] % search_string search_url += self.urls['search'] % re.sub('[\.\s]+', ' ', search_string)
data_json = self.getURL(search_url, json=True) data_json = self.get_url(search_url, json=True)
cnt = len(results) cnt = len(results)
if data_json and 'results' in data_json and self._check_auth_from_data(data_json): if data_json and 'results' in data_json and self._check_auth_from_data(data_json):
@ -97,7 +98,7 @@ class BeyondHDProvider(generic.TorrentProvider):
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, scene=False, use_or=False) return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, scene=False, use_or=False)
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date, ['proper', 'repack']) return self._find_propers(search_date, ['proper', 'repack'])

View file

@ -43,13 +43,13 @@ class BitSoupProvider(generic.TorrentProvider):
self.username, self.password, self.minseed, self.minleech = 4 * [None] self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.cache = BitSoupCache(self) self.cache = BitSoupCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'ssl': 'yes'} login_params = {'username': self.username, 'password': self.password, 'ssl': 'yes'}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -62,10 +62,10 @@ class BitSoupProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -78,7 +78,7 @@ class BitSoupProvider(generic.TorrentProvider):
search_string = unidecode(search_string) search_string = unidecode(search_string)
search_url = self.urls['search'] % (search_string, self.categories) search_url = self.urls['search'] % (search_string, self.categories)
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -121,7 +121,7 @@ class BitSoupProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -43,7 +43,7 @@ class BTNProvider(generic.TorrentProvider):
def _check_auth_from_data(self, data_json): def _check_auth_from_data(self, data_json):
if data_json is None: if data_json is None:
return self._checkAuth() return self._check_auth()
if 'api-error' not in data_json: if 'api-error' not in data_json:
return True return True
@ -52,9 +52,9 @@ class BTNProvider(generic.TorrentProvider):
logger.DEBUG) logger.DEBUG)
raise AuthException('Your authentication credentials for %s are incorrect, check your config.' % self.name) raise AuthException('Your authentication credentials for %s are incorrect, check your config.' % self.name)
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
self._checkAuth() self._check_auth()
params = {} params = {}
@ -236,14 +236,14 @@ class BTNProvider(generic.TorrentProvider):
return to_return return to_return
def findPropers(self, search_date=None): def find_propers(self, search_date=None):
results = [] results = []
search_terms = ['%.proper.%', '%.repack.%'] search_terms = ['%.proper.%', '%.repack.%']
for term in search_terms: for term in search_terms:
for item in self._doSearch({'release': term}, age=4 * 24 * 60 * 60): for item in self._do_search({'release': term}, age=4 * 24 * 60 * 60):
if item['Time']: if item['Time']:
try: try:
result_date = datetime.datetime.fromtimestamp(float(item['Time'])) result_date = datetime.datetime.fromtimestamp(float(item['Time']))
@ -273,7 +273,7 @@ class BTNProvider(generic.TorrentProvider):
% self.name, logger.WARNING) % self.name, logger.WARNING)
seconds_since_last_update = 86400 seconds_since_last_update = 86400
return self._doSearch(search_params=None, age=seconds_since_last_update) return self._do_search(search_params=None, age=seconds_since_last_update)
class BTNCache(tvcache.TVCache): class BTNCache(tvcache.TVCache):

View file

@ -42,13 +42,13 @@ class FreshOnTVProvider(generic.TorrentProvider):
self.freeleech = False self.freeleech = False
self.cache = FreshOnTVCache(self) self.cache = FreshOnTVCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'login': 'Do it!'} login_params = {'username': self.username, 'password': self.password, 'login': 'Do it!'}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -64,10 +64,10 @@ class FreshOnTVProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -83,7 +83,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
# returns top 15 results by default, expandable in user profile to 100 # returns top 15 results by default, expandable in user profile to 100
search_url = self.urls['search'] % (freeleech, search_string) search_url = self.urls['search'] % (freeleech, search_string)
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -129,7 +129,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -28,6 +28,7 @@ from base64 import b16encode, b32decode
import sickbeard import sickbeard
import requests import requests
import requests.cookies
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
from sickbeard.exceptions import SickBeardException, AuthException, ex from sickbeard.exceptions import SickBeardException, AuthException, ex
@ -42,8 +43,8 @@ class HaltParseException(SickBeardException):
class GenericProvider: class GenericProvider:
NZB = "nzb" NZB = 'nzb'
TORRENT = "torrent" TORRENT = 'torrent'
def __init__(self, name, supports_backlog=False, anime_only=False): def __init__(self, name, supports_backlog=False, anime_only=False):
# these need to be set in the subclass # these need to be set in the subclass
@ -67,85 +68,86 @@ class GenericProvider:
self.headers = { self.headers = {
# Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases, # Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
#otherwise session might be broken and download fail, asking again for authentication # otherwise session might be broken and download fail, asking again for authentication
#'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'} # 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
'User-Agent': USER_AGENT} 'User-Agent': USER_AGENT}
def getID(self): def get_id(self):
return GenericProvider.makeID(self.name) return GenericProvider.make_id(self.name)
@staticmethod @staticmethod
def makeID(name): def make_id(name):
return re.sub("[^\w\d_]", "_", name.strip().lower()) return re.sub('[^\w\d_]', '_', name.strip().lower())
def imageName(self, *default_name): def image_name(self, *default_name):
for name in ['%s.%s' % (self.getID(), image_ext) for image_ext in ['png', 'gif', 'jpg']]: for name in ['%s.%s' % (self.get_id(), image_ext) for image_ext in ['png', 'gif', 'jpg']]:
if ek.ek(os.path.isfile, if ek.ek(os.path.isfile,
ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', name)): ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', name)):
return name return name
return '%s.png' % ('newznab', default_name[0])[any(default_name)] return '%s.png' % ('newznab', default_name[0])[any(default_name)]
def _checkAuth(self): def _check_auth(self):
return True return True
def _doLogin(self): def _do_login(self):
return True return True
def isActive(self): def is_active(self):
if self.providerType == GenericProvider.NZB and sickbeard.USE_NZBS: if GenericProvider.NZB == self.providerType and sickbeard.USE_NZBS:
return self.isEnabled() return self.is_enabled()
elif self.providerType == GenericProvider.TORRENT and sickbeard.USE_TORRENTS: elif GenericProvider.TORRENT == self.providerType and sickbeard.USE_TORRENTS:
return self.isEnabled() return self.is_enabled()
else: else:
return False return False
def isEnabled(self): def is_enabled(self):
""" """
This should be overridden and should return the config setting eg. sickbeard.MYPROVIDER This should be overridden and should return the config setting eg. sickbeard.MYPROVIDER
""" """
return self.enabled return self.enabled
def getResult(self, episodes): def get_result(self, episodes, url):
""" """
Returns a result of the correct type for this provider Returns a result of the correct type for this provider
""" """
if self.providerType == GenericProvider.NZB: if GenericProvider.NZB == self.providerType:
result = classes.NZBSearchResult(episodes) result = classes.NZBSearchResult(episodes)
elif self.providerType == GenericProvider.TORRENT: elif GenericProvider.TORRENT == self.providerType:
result = classes.TorrentSearchResult(episodes) result = classes.TorrentSearchResult(episodes)
else: else:
result = classes.SearchResult(episodes) result = classes.SearchResult(episodes)
result.provider = self result.provider = self
result.url = url
return result return result
def getURL(self, url, post_data=None, params=None, timeout=30, json=False): def get_url(self, url, post_data=None, params=None, timeout=30, json=False):
""" """
By default this is just a simple urlopen call but this method should be overridden By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies) for providers with special URL requirements (like cookies)
""" """
# check for auth # check for auth
if not self._doLogin(): if not self._do_login():
return return
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout, return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json) session=self.session, json=json)
def downloadResult(self, result): def download_result(self, result):
""" """
Save the result to disk. Save the result to disk.
""" """
# check for auth # check for auth
if not self._doLogin(): if not self._do_login():
return False return False
if self.providerType == GenericProvider.TORRENT: if GenericProvider.TORRENT == self.providerType:
try: try:
torrent_hash = re.findall('urn:btih:([0-9a-f]{32,40})', result.url)[0].upper() torrent_hash = re.findall('urn:btih:([0-9a-f]{32,40})', result.url)[0].upper()
@ -153,7 +155,7 @@ class GenericProvider:
torrent_hash = b16encode(b32decode(torrent_hash)).lower() torrent_hash = b16encode(b32decode(torrent_hash)).lower()
if not torrent_hash: if not torrent_hash:
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR)
return False return False
urls = ['https://%s/%s.torrent' % (u, torrent_hash) urls = ['https://%s/%s.torrent' % (u, torrent_hash)
@ -163,7 +165,7 @@ class GenericProvider:
filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR, filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
helpers.sanitizeFileName(result.name) + '.' + self.providerType) helpers.sanitizeFileName(result.name) + '.' + self.providerType)
elif self.providerType == GenericProvider.NZB: elif GenericProvider.NZB == self.providerType:
urls = [result.url] urls = [result.url]
filename = ek.ek(os.path.join, sickbeard.NZB_DIR, filename = ek.ek(os.path.join, sickbeard.NZB_DIR,
@ -173,19 +175,19 @@ class GenericProvider:
for url in urls: for url in urls:
if helpers.download_file(url, filename, session=self.session): if helpers.download_file(url, filename, session=self.session):
logger.log(u"Downloading a result from " + self.name + " at " + url) logger.log(u'Downloading a result from ' + self.name + ' at ' + url)
if self.providerType == GenericProvider.TORRENT: if GenericProvider.TORRENT == self.providerType:
logger.log(u"Saved magnet link to " + filename, logger.MESSAGE) logger.log(u'Saved magnet link to ' + filename, logger.MESSAGE)
else: else:
logger.log(u"Saved result to " + filename, logger.MESSAGE) logger.log(u'Saved result to ' + filename, logger.MESSAGE)
if self._verify_download(filename): if self._verify_download(filename):
return True return True
elif ek.ek(os.path.isfile, filename): elif ek.ek(os.path.isfile, filename):
ek.ek(os.remove, filename) ek.ek(os.remove, filename)
logger.log(u"Failed to download result", logger.ERROR) logger.log(u'Failed to download result', logger.ERROR)
return False return False
def _verify_download(self, file_name=None): def _verify_download(self, file_name=None):
@ -194,7 +196,7 @@ class GenericProvider:
""" """
# primitive verification of torrents, just make sure we didn't get a text file or something # primitive verification of torrents, just make sure we didn't get a text file or something
if self.providerType == GenericProvider.TORRENT: if GenericProvider.TORRENT == self.providerType:
parser = createParser(file_name) parser = createParser(file_name)
if parser: if parser:
mime_type = parser._getMimeType() mime_type = parser._getMimeType()
@ -202,18 +204,18 @@ class GenericProvider:
parser.stream._input.close() parser.stream._input.close()
except: except:
pass pass
if mime_type == 'application/x-bittorrent': if 'application/x-bittorrent' == mime_type:
return True return True
logger.log(u"Result is not a valid torrent file", logger.WARNING) logger.log(u'Result is not a valid torrent file', logger.WARNING)
return False return False
return True return True
def searchRSS(self, episodes): def search_rss(self, episodes):
return self.cache.findNeededEpisodes(episodes) return self.cache.findNeededEpisodes(episodes)
def getQuality(self, item, anime=False): def get_quality(self, item, anime=False):
""" """
Figures out the quality of the given RSS item node Figures out the quality of the given RSS item node
@ -225,13 +227,13 @@ class GenericProvider:
quality = Quality.sceneQuality(title, anime) quality = Quality.sceneQuality(title, anime)
return quality return quality
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
return [] return []
def _get_season_search_strings(self, episode): def _get_season_search_strings(self, episode):
return [] return []
def _get_episode_search_strings(self, eb_obj, add_string=''): def _get_episode_search_strings(self, *args, **kwargs):
return [] return []
def _get_title_and_url(self, item): def _get_title_and_url(self, item):
@ -259,223 +261,216 @@ class GenericProvider:
pass pass
if title: if title:
title = re.sub(r'\s+', '.', u'' + title) title = re.sub(r'\s+', '.', u'%s' % title)
if url: if url:
url = url.replace('&amp;', '&') url = str(url).replace('&amp;', '&')
return title, url return title, url
def findSearchResults(self, show, episodes, search_mode, manualSearch=False): def find_search_results(self, show, episodes, search_mode, manual_search=False):
self._checkAuth() self._check_auth()
self.show = show self.show = show
results = {} results = {}
itemList = [] item_list = []
searched_scene_season = None searched_scene_season = None
for epObj in episodes: for ep_obj in episodes:
# search cache for episode result # search cache for episode result
cacheResult = self.cache.searchCache(epObj, manualSearch) cache_result = self.cache.searchCache(ep_obj, manual_search)
if cacheResult: if cache_result:
if epObj.episode not in results: if ep_obj.episode not in results:
results[epObj.episode] = cacheResult results[ep_obj.episode] = cache_result
else: else:
results[epObj.episode].extend(cacheResult) results[ep_obj.episode].extend(cache_result)
# found result, search next episode # found result, search next episode
continue continue
# skip if season already searched # skip if season already searched
if len(episodes) > 1 and searched_scene_season == epObj.scene_season: if 1 < len(episodes) and ep_obj.scene_season == searched_scene_season:
continue continue
# mark season searched for season pack searches so we can skip later on # mark season searched for season pack searches so we can skip later on
searched_scene_season = epObj.scene_season searched_scene_season = ep_obj.scene_season
if 'sponly' == search_mode: if 'sponly' == search_mode:
# get season search results # get season search results
for curString in self._get_season_search_strings(epObj): for curString in self._get_season_search_strings(ep_obj):
itemList += self._doSearch(curString, search_mode, len(episodes)) item_list += self._do_search(curString, search_mode, len(episodes))
else: else:
# get single episode search results # get single episode search results
for curString in self._get_episode_search_strings(epObj): for curString in self._get_episode_search_strings(ep_obj):
itemList += self._doSearch(curString, 'eponly', len(episodes)) item_list += self._do_search(curString, 'eponly', len(episodes))
# if we found what we needed already from cache then return results and exit # if we found what we needed already from cache then return results and exit
if len(results) == len(episodes): if len(results) == len(episodes):
return results return results
# sort list by quality # sort list by quality
if len(itemList): if len(item_list):
items = {} items = {}
itemsUnknown = [] items_unknown = []
for item in itemList: for item in item_list:
quality = self.getQuality(item, anime=show.is_anime) quality = self.get_quality(item, anime=show.is_anime)
if quality == Quality.UNKNOWN: if Quality.UNKNOWN == quality:
itemsUnknown += [item] items_unknown += [item]
else: else:
if quality not in items: if quality not in items:
items[quality] = [item] items[quality] = [item]
else: else:
items[quality].append(item) items[quality].append(item)
itemList = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)])) item_list = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)]))
itemList += itemsUnknown if itemsUnknown else [] item_list += items_unknown if items_unknown else []
# filter results # filter results
cl = [] cl = []
for item in itemList: for item in item_list:
(title, url) = self._get_title_and_url(item) (title, url) = self._get_title_and_url(item)
# parse the file name # parse the file name
try: try:
myParser = NameParser(False, convert=True) parser = NameParser(False, convert=True)
parse_result = myParser.parse(title) parse_result = parser.parse(title)
except InvalidNameException: except InvalidNameException:
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG) logger.log(u'Unable to parse the filename ' + title + ' into a valid episode', logger.DEBUG)
continue continue
except InvalidShowException: except InvalidShowException:
logger.log(u'No show name or scene exception matched the parsed filename ' + title, logger.DEBUG) logger.log(u'No show name or scene exception matched the parsed filename ' + title, logger.DEBUG)
continue continue
showObj = parse_result.show show_obj = parse_result.show
quality = parse_result.quality quality = parse_result.quality
release_group = parse_result.release_group release_group = parse_result.release_group
version = parse_result.version version = parse_result.version
addCacheEntry = False add_cache_entry = False
if not (showObj.air_by_date or showObj.sports): if not (show_obj.air_by_date or show_obj.sports):
if search_mode == 'sponly': if 'sponly' == search_mode:
if len(parse_result.episode_numbers): if len(parse_result.episode_numbers):
logger.log( logger.log(u'This is supposed to be a season pack search but the result ' + title
u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it", + u' is not a valid season pack, skipping it', logger.DEBUG)
logger.DEBUG) add_cache_entry = True
addCacheEntry = True if len(parse_result.episode_numbers)\
if len(parse_result.episode_numbers) and ( and (parse_result.season_number not in set([ep.season for ep in episodes])
parse_result.season_number not in set([ep.season for ep in episodes]) or not [ep for ep in episodes if or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
ep.scene_episode in parse_result.episode_numbers]): logger.log(u'The result ' + title + u' doesn\'t seem to be a valid episode that we are trying'
logger.log( + u' to snatch, ignoring', logger.DEBUG)
u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", add_cache_entry = True
logger.DEBUG)
addCacheEntry = True
else: else:
if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in if not len(parse_result.episode_numbers)\
episodes if and parse_result.season_number\
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: and not [ep for ep in episodes
logger.log( if ep.season == parse_result.season_number
u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring", and ep.episode in parse_result.episode_numbers]:
logger.DEBUG) logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying'
addCacheEntry = True + u' to snatch, ignoring', logger.DEBUG)
add_cache_entry = True
elif len(parse_result.episode_numbers) and not [ep for ep in episodes if elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
logger.log( logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying'
u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", + u' to snatch, ignoring', logger.DEBUG)
logger.DEBUG) add_cache_entry = True
addCacheEntry = True
if not addCacheEntry: if not add_cache_entry:
# we just use the existing info for normal searches # we just use the existing info for normal searches
actual_season = parse_result.season_number actual_season = parse_result.season_number
actual_episodes = parse_result.episode_numbers actual_episodes = parse_result.episode_numbers
else: else:
if not (parse_result.is_air_by_date): if not parse_result.is_air_by_date:
logger.log( logger.log(u'This is supposed to be a date search but the result ' + title
u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it", + u' didn\'t parse as one, skipping it', logger.DEBUG)
logger.DEBUG) add_cache_entry = True
addCacheEntry = True
else: else:
airdate = parse_result.air_date.toordinal() airdate = parse_result.air_date.toordinal()
myDB = db.DBConnection() my_db = db.DBConnection()
sql_results = myDB.select( sql_results = my_db.select('SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show_obj.indexerid, airdate])
[showObj.indexerid, airdate])
if len(sql_results) != 1: if 1 != len(sql_results):
logger.log( logger.log(u'Tried to look up the date for the episode ' + title + ' but the database didn\'t'
u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", + u' give proper results, skipping it', logger.WARNING)
logger.WARNING) add_cache_entry = True
addCacheEntry = True
if not addCacheEntry: if not add_cache_entry:
actual_season = int(sql_results[0]["season"]) actual_season = int(sql_results[0]['season'])
actual_episodes = [int(sql_results[0]["episode"])] actual_episodes = [int(sql_results[0]['episode'])]
# add parsed result to cache for usage later on # add parsed result to cache for usage later on
if addCacheEntry: if add_cache_entry:
logger.log(u"Adding item from search to cache: " + title, logger.DEBUG) logger.log(u'Adding item from search to cache: ' + title, logger.DEBUG)
ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) ci = self.cache.add_cache_entry(title, url, parse_result=parse_result)
if ci is not None: if None is not ci:
cl.append(ci) cl.append(ci)
continue continue
# make sure we want the episode # make sure we want the episode
wantEp = True want_ep = True
for epNo in actual_episodes: for epNo in actual_episodes:
if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch): if not show_obj.wantEpisode(actual_season, epNo, quality, manual_search):
wantEp = False want_ep = False
break break
if not wantEp: if not want_ep:
logger.log( logger.log(u'Ignoring result %s because we don\'t want an episode that is %s'
u"Ignoring result " + title + " because we don't want an episode that is " + % (title, Quality.qualityStrings[quality]), logger.DEBUG)
Quality.qualityStrings[
quality], logger.DEBUG)
continue continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG) logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG)
# make a result object # make a result object
epObj = [] ep_obj = []
for curEp in actual_episodes: for curEp in actual_episodes:
epObj.append(showObj.getEpisode(actual_season, curEp)) ep_obj.append(show_obj.getEpisode(actual_season, curEp))
result = self.getResult(epObj) result = self.get_result(ep_obj, url)
result.show = showObj if None is result:
result.url = url continue
result.show = show_obj
result.name = title result.name = title
result.quality = quality result.quality = quality
result.release_group = release_group result.release_group = release_group
result.content = None result.content = None
result.version = version result.version = version
if len(epObj) == 1: if 1 == len(ep_obj):
epNum = epObj[0].episode ep_num = ep_obj[0].episode
logger.log(u"Single episode result.", logger.DEBUG) logger.log(u'Single episode result.', logger.DEBUG)
elif len(epObj) > 1: elif 1 < len(ep_obj):
epNum = MULTI_EP_RESULT ep_num = MULTI_EP_RESULT
logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str( logger.log(u'Separating multi-episode result to check for later - result contains episodes: '
parse_result.episode_numbers), logger.DEBUG) + str(parse_result.episode_numbers), logger.DEBUG)
elif len(epObj) == 0: elif 0 == len(ep_obj):
epNum = SEASON_RESULT ep_num = SEASON_RESULT
logger.log(u"Separating full season result to check for later", logger.DEBUG) logger.log(u'Separating full season result to check for later', logger.DEBUG)
if epNum not in results: if ep_num not in results:
results[epNum] = [result] results[ep_num] = [result]
else: else:
results[epNum].append(result) results[ep_num].append(result)
# check if we have items to add to cache # check if we have items to add to cache
if len(cl) > 0: if 0 < len(cl):
myDB = self.cache._getDB() my_db = self.cache.get_db()
myDB.mass_action(cl) my_db.mass_action(cl)
return results return results
def findPropers(self, search_date=None): def find_propers(self, search_date=None):
results = self.cache.listPropers(search_date) results = self.cache.listPropers(search_date)
return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
results] results]
def seedRatio(self): def seed_ratio(self):
''' """
Provider should override this value if custom seed ratio enabled Provider should override this value if custom seed ratio enabled
It should return the value of the provider seed ratio It should return the value of the provider seed ratio
''' """
return '' return ''
@staticmethod @staticmethod
@ -489,6 +484,30 @@ class GenericProvider:
logger.log(u'%s in response from %s' % (('No %s items' % mode, logger.log(u'%s in response from %s' % (('No %s items' % mode,
'%s %s item%s' % (count, mode, maybe_plural(count)))[0 < count], url)) '%s %s item%s' % (count, mode, maybe_plural(count)))[0 < count], url))
def check_auth_cookie(self):
if hasattr(self, 'cookies'):
cookies = self.cookies
if not re.match('^(\w+=\w+[;\s]*)+$', cookies):
return False
cj = requests.utils.add_dict_to_cookiejar(self.session.cookies,
dict([x.strip().split('=') for x in cookies.split(';')
if x != ''])),
for item in cj:
if not isinstance(item, requests.cookies.RequestsCookieJar):
return False
return True
def _check_cookie(self):
if self.check_auth_cookie():
return True, None
return False, 'Cookies not correctly formatted key=value pairs e.g. uid=xx;pass=yy)'
class NZBProvider(GenericProvider): class NZBProvider(GenericProvider):
@ -497,9 +516,30 @@ class NZBProvider(GenericProvider):
self.providerType = GenericProvider.NZB self.providerType = GenericProvider.NZB
def imageName(self): def image_name(self):
return GenericProvider.imageName(self, 'newznab') return GenericProvider.image_name(self, 'newznab')
def maybe_apikey(self):
if hasattr(self, 'needs_auth'):
if hasattr(self, 'key') and 0 < len(self.key):
return self.key
if hasattr(self, 'api_key') and 0 < len(self.api_key):
return self.api_key
return None
return False
def _check_auth(self):
has_key = self.maybe_apikey()
if has_key:
return has_key
if None is has_key:
raise AuthException('%s for %s is empty in config provider options'
% ('API key' + ('', ' and/or Username')[hasattr(self, 'username')], self.name))
return GenericProvider._check_auth(self)
def _find_propers(self, search_date=None): def _find_propers(self, search_date=None):
@ -508,7 +548,7 @@ class NZBProvider(GenericProvider):
cache_results] cache_results]
index = 0 index = 0
alt_search = ('nzbs_org' == self.getID()) alt_search = ('nzbs_org' == self.get_id())
term_items_found = False term_items_found = False
do_search_alt = False do_search_alt = False
@ -534,7 +574,7 @@ class NZBProvider(GenericProvider):
else: else:
index += 1 index += 1
for item in self._doSearch(search_params, age=4): for item in self._do_search(search_params, age=4):
(title, url) = self._get_title_and_url(item) (title, url) = self._get_title_and_url(item)
@ -549,7 +589,7 @@ class NZBProvider(GenericProvider):
logger.log(u'Unable to figure out the date for entry %s, skipping it', title) logger.log(u'Unable to figure out the date for entry %s, skipping it', title)
continue continue
if not search_date or result_date > search_date: if not search_date or search_date < result_date:
search_result = classes.Proper(title, url, result_date, self.show) search_result = classes.Proper(title, url, result_date, self.show)
results.append(search_result) results.append(search_result)
term_items_found = True term_items_found = True
@ -569,15 +609,15 @@ class TorrentProvider(GenericProvider):
self._seed_ratio = None self._seed_ratio = None
def imageName(self): def image_name(self):
return GenericProvider.imageName(self, 'torrent') return GenericProvider.image_name(self, 'torrent')
def seedRatio(self): def seed_ratio(self):
return self._seed_ratio return self._seed_ratio
def getQuality(self, item, anime=False): def get_quality(self, item, anime=False):
if isinstance(item, tuple): if isinstance(item, tuple):
name = item[0] name = item[0]
@ -655,7 +695,7 @@ class TorrentProvider(GenericProvider):
search_params += [crop.sub(r'\1', '%s %s' % (name, detail) + ('', ' ' + x)[any(x)]) for x in append] search_params += [crop.sub(r'\1', '%s %s' % (name, detail) + ('', ' ' + x)[any(x)]) for x in append]
return search_params return search_params
def _checkAuth(self): def _check_auth(self):
if hasattr(self, 'username') and hasattr(self, 'password'): if hasattr(self, 'username') and hasattr(self, 'password'):
if self.username and self.password: if self.username and self.password:
@ -674,7 +714,7 @@ class TorrentProvider(GenericProvider):
return True return True
setting = 'Passkey' setting = 'Passkey'
else: else:
return GenericProvider._checkAuth(self) return GenericProvider._check_auth(self)
raise AuthException('%s for %s is empty in config provider options' % (setting, self.name)) raise AuthException('%s for %s is empty in config provider options' % (setting, self.name))
@ -720,7 +760,7 @@ class TorrentProvider(GenericProvider):
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term)) proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
search_string = self._get_episode_search_strings(cur_ep, add_string=proper_term) search_string = self._get_episode_search_strings(cur_ep, add_string=proper_term)
for item in self._doSearch(search_string[0]): for item in self._do_search(search_string[0]):
title, url = self._get_title_and_url(item) title, url = self._get_title_and_url(item)
if not proper_check.search(title): if not proper_check.search(title):
continue continue
@ -741,4 +781,4 @@ class TorrentProvider(GenericProvider):
def get_cache_data(self, *args, **kwargs): def get_cache_data(self, *args, **kwargs):
search_params = {'Cache': ['']} search_params = {'Cache': ['']}
return self._doSearch(search_params) return self._do_search(search_params)

View file

@ -44,13 +44,13 @@ class GFTrackerProvider(generic.TorrentProvider):
self.username, self.password, self.minseed, self.minleech = 4 * [None] self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.cache = GFTrackerCache(self) self.cache = GFTrackerCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'gft_uid' in self.session.cookies and 'gft_pass' in self.session.cookies logged_in = lambda: 'gft_uid' in self.session.cookies and 'gft_pass' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
helpers.getURL(self.urls['login_get'], session=self.session) helpers.getURL(self.urls['login_get'], session=self.session)
login_params = {'username': self.username, 'password': self.password} login_params = {'username': self.username, 'password': self.password}
response = helpers.getURL(self.urls['login_post'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login_post'], post_data=login_params, session=self.session)
@ -61,16 +61,16 @@ class GFTrackerProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'details', 'get': 'download', rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'details', 'get': 'download',
'seeders': '(^\d+)', 'leechers': '(\d+)$'}.items()) 'seeders': r'(^\d+)', 'leechers': r'(\d+)$'}.items())
for mode in search_params.keys(): for mode in search_params.keys():
for search_string in search_params[mode]: for search_string in search_params[mode]:
@ -81,7 +81,7 @@ class GFTrackerProvider(generic.TorrentProvider):
if 'Cache' != mode: if 'Cache' != mode:
search_url += self.urls['search'] % search_string search_url += self.urls['search'] % search_string
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -126,7 +126,7 @@ class GFTrackerProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -73,27 +73,27 @@ class HDBitsProvider(generic.TorrentProvider):
return title, url return title, url
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
self._checkAuth() self._check_auth()
logger.log(u'Search url: %s search_params: %s' % (self.urls['search'], search_params), logger.DEBUG) logger.log(u'Search url: %s search_params: %s' % (self.urls['search'], search_params), logger.DEBUG)
response_json = self.getURL(self.urls['search'], post_data=search_params, json=True) response_json = self.get_url(self.urls['search'], post_data=search_params, json=True)
if response_json and 'data' in response_json and self.check_auth_from_data(response_json): if response_json and 'data' in response_json and self.check_auth_from_data(response_json):
return response_json['data'] return response_json['data']
logger.log(u'Resulting JSON from %s isn\'t correct, not parsing it' % self.name, logger.ERROR) logger.log(u'Resulting JSON from %s isn\'t correct, not parsing it' % self.name, logger.ERROR)
return [] return []
def findPropers(self, search_date=None): def find_propers(self, search_date=None):
results = [] results = []
search_terms = [' proper ', ' repack '] search_terms = [' proper ', ' repack ']
for term in search_terms: for term in search_terms:
for item in self._doSearch(self._build_search_strings(search_term=term)): for item in self._do_search(self._build_search_strings(search_term=term)):
if item['utadded']: if item['utadded']:
try: try:
result_date = datetime.datetime.fromtimestamp(int(item['utadded'])) result_date = datetime.datetime.fromtimestamp(int(item['utadded']))
@ -142,9 +142,9 @@ class HDBitsProvider(generic.TorrentProvider):
def get_cache_data(self): def get_cache_data(self):
self._checkAuth() self._check_auth()
response_json = self.getURL(self.urls['search'], post_data=self._build_search_strings(), json=True) response_json = self.get_url(self.urls['search'], post_data=self._build_search_strings(), json=True)
if response_json and 'data' in response_json and self.check_auth_from_data(response_json): if response_json and 'data' in response_json and self.check_auth_from_data(response_json):
return response_json['data'] return response_json['data']

View file

@ -42,13 +42,13 @@ class IPTorrentsProvider(generic.TorrentProvider):
self.freeleech = False self.freeleech = False
self.cache = IPTorrentsCache(self) self.cache = IPTorrentsCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'login': 'submit'} login_params = {'username': self.username, 'password': self.password, 'login': 'submit'}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -58,10 +58,10 @@ class IPTorrentsProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -76,7 +76,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
search_string = unidecode(search_string) search_string = unidecode(search_string)
search_url = '%s%s' % (self.urls['search'] % (freeleech, search_string), search_url = '%s%s' % (self.urls['search'] % (freeleech, search_string),
(';o=seeders', '')['Cache' == mode]) (';o=seeders', '')['Cache' == mode])
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -120,7 +120,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date, '') return self._find_propers(search_date, '')

View file

@ -55,7 +55,7 @@ class KATProvider(generic.TorrentProvider):
file_name = None file_name = None
data = self.getURL(torrent_link) data = self.get_url(torrent_link)
if not data: if not data:
return None return None
@ -144,7 +144,7 @@ class KATProvider(generic.TorrentProvider):
return [{'Episode': self._build_search_strings(ep_detail, append=(add_string, '')[self.show.anime])}] return [{'Episode': self._build_search_strings(ep_detail, append=(add_string, '')[self.show.anime])}]
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -216,7 +216,7 @@ class KATProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date, '') return self._find_propers(search_date, '')

View file

@ -44,13 +44,13 @@ class MoreThanProvider(generic.TorrentProvider):
self.username, self.password, self.minseed, self.minleech = 4 * [None] self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.cache = MoreThanCache(self) self.cache = MoreThanCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'session' in self.session.cookies logged_in = lambda: 'session' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'login': 'submit'} login_params = {'username': self.username, 'password': self.password, 'login': 'submit'}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -63,10 +63,10 @@ class MoreThanProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -82,7 +82,7 @@ class MoreThanProvider(generic.TorrentProvider):
search_url = self.urls['search'] % search_string search_url = self.urls['search'] % search_string
# fetches 15 results by default, and up to 100 if allowed in user profile # fetches 15 results by default, and up to 100 if allowed in user profile
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
if not html or self._has_no_results(html): if not html or self._has_no_results(html):
@ -133,7 +133,7 @@ class MoreThanProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -43,18 +43,10 @@ class NewznabProvider(generic.NZBProvider):
self.default = False self.default = False
self.cache = NewznabCache(self) self.cache = NewznabCache(self)
def _checkAuth(self):
if self.needs_auth and not self.key:
logger.log(u'Incorrect authentication credentials for %s : API key is missing' % self.name, logger.DEBUG)
raise AuthException('Your authentication credentials for %s are missing, check your config.' % self.name)
return True
def check_auth_from_data(self, data): def check_auth_from_data(self, data):
if data is None: if data is None:
return self._checkAuth() return self._check_auth()
if 'error' in data.feed: if 'error' in data.feed:
code = data.feed['error']['code'] code = data.feed['error']['code']
@ -84,22 +76,20 @@ class NewznabProvider(generic.NZBProvider):
""" """
return_categories = [] return_categories = []
self._checkAuth() api_key = self._check_auth()
params = {'t': 'caps'} params = {'t': 'caps'}
if self.needs_auth and self.key: if isinstance(api_key, basestring):
params['apikey'] = self.key params['apikey'] = api_key
try: categories = self.get_url('%s/api' % self.url, params=params, timeout=10)
categories = self.getURL('%s/api' % self.url, params=params, timeout=10) if not categories:
except: logger.log(u'Error getting html for [%s/api?%s]' %
logger.log(u'Error getting html for [%s]' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items())), logger.DEBUG)
('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items()))), logger.DEBUG)
return (False, return_categories, 'Error getting html for [%s]' % return (False, return_categories, 'Error getting html for [%s]' %
('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items())))) ('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items()))))
xml_categories = helpers.parse_xml(categories) xml_categories = helpers.parse_xml(categories)
if not xml_categories: if not xml_categories:
logger.log(u'Error parsing xml for [%s]' % self.name, logger.DEBUG) logger.log(u'Error parsing xml for [%s]' % self.name, logger.DEBUG)
return False, return_categories, 'Error parsing xml for [%s]' % self.name return False, return_categories, 'Error parsing xml for [%s]' % self.name
@ -117,7 +107,7 @@ class NewznabProvider(generic.NZBProvider):
def config_str(self): def config_str(self):
return '%s|%s|%s|%s|%i|%s|%i|%i|%i' \ return '%s|%s|%s|%s|%i|%s|%i|%i|%i' \
% (self.name or '', self.url or '', self.key or '', self.cat_ids or '', self.enabled, % (self.name or '', self.url or '', self.maybe_apikey() or '', self.cat_ids or '', self.enabled,
self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog) self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog)
def _get_season_search_strings(self, ep_obj): def _get_season_search_strings(self, ep_obj):
@ -154,7 +144,7 @@ class NewznabProvider(generic.NZBProvider):
return to_return return to_return
def _get_episode_search_strings(self, ep_obj, add_string=''): def _get_episode_search_strings(self, ep_obj):
to_return = [] to_return = []
params = {} params = {}
@ -169,8 +159,7 @@ class NewznabProvider(generic.NZBProvider):
params['ep'] = '%i' % int( params['ep'] = '%i' % int(
ep_obj.scene_absolute_number if int(ep_obj.scene_absolute_number) > 0 else ep_obj.scene_episode) ep_obj.scene_absolute_number if int(ep_obj.scene_absolute_number) > 0 else ep_obj.scene_episode)
else: else:
params['season'] = ep_obj.scene_season params['season'], params['ep'] = ep_obj.scene_season, ep_obj.scene_episode
params['ep'] = ep_obj.scene_episode
# search # search
rid = helpers.mapIndexersToShow(ep_obj.show)[2] rid = helpers.mapIndexersToShow(ep_obj.show)[2]
@ -203,9 +192,9 @@ class NewznabProvider(generic.NZBProvider):
return to_return return to_return
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
self._checkAuth() api_key = self._check_auth()
if 'rid' not in search_params and 'q' not in search_params: if 'rid' not in search_params and 'q' not in search_params:
logger.log('Error no rid or search term given.') logger.log('Error no rid or search term given.')
@ -233,8 +222,8 @@ class NewznabProvider(generic.NZBProvider):
if search_params: if search_params:
params.update(search_params) params.update(search_params)
if self.needs_auth and self.key: if isinstance(api_key, basestring):
params['apikey'] = self.key params['apikey'] = api_key
results = [] results = []
offset = total = 0 offset = total = 0
@ -288,7 +277,7 @@ class NewznabProvider(generic.NZBProvider):
% (items, helpers.maybe_plural(items), params['limit']), logger.DEBUG) % (items, helpers.maybe_plural(items), params['limit']), logger.DEBUG)
return results return results
def findPropers(self, search_date=None): def find_propers(self, search_date=None):
return self._find_propers(search_date) return self._find_propers(search_date)
@ -305,8 +294,9 @@ class NewznabCache(tvcache.TVCache):
'cat': self.provider.cat_ids + ',5060,5070', 'cat': self.provider.cat_ids + ',5060,5070',
'attrs': 'rageid'} 'attrs': 'rageid'}
if self.provider.needs_auth and self.provider.key: has_apikey = self.provider.maybe_apikey()
params['apikey'] = self.provider.key if has_apikey:
params['apikey'] = has_apikey
rss_url = '%sapi?%s' % (self.provider.url, urllib.urlencode(params)) rss_url = '%sapi?%s' % (self.provider.url, urllib.urlencode(params))
@ -314,13 +304,14 @@ class NewznabCache(tvcache.TVCache):
return self.getRSSFeed(rss_url) return self.getRSSFeed(rss_url)
def _checkAuth(self, *data):
return self.provider.check_auth_from_data(data[0])
def updateCache(self): def updateCache(self):
if self.shouldUpdate() and self._checkAuth(None): if self.shouldUpdate():
try:
self._checkAuth()
except Exception:
return []
data = self._getRSSData() data = self._getRSSData()
# as long as the http request worked we count this as an update # as long as the http request worked we count this as an update
@ -332,7 +323,7 @@ class NewznabCache(tvcache.TVCache):
self.setLastUpdate() self.setLastUpdate()
if self._checkAuth(data): if self.provider.check_auth_from_data(data):
items = data.entries items = data.entries
cl = [] cl = []
for item in items: for item in items:
@ -341,7 +332,7 @@ class NewznabCache(tvcache.TVCache):
cl.append(ci) cl.append(ci)
if 0 < len(cl): if 0 < len(cl):
my_db = self._getDB() my_db = self.get_db()
my_db.mass_action(cl) my_db.mass_action(cl)
else: else:
@ -376,4 +367,4 @@ class NewznabCache(tvcache.TVCache):
url = self._translateLinkURL(url) url = self._translateLinkURL(url)
logger.log(u'Attempting to add item from RSS to cache: ' + title, logger.DEBUG) logger.log(u'Attempting to add item from RSS to cache: ' + title, logger.DEBUG)
return self._addCacheEntry(title, url, indexer_id=tvrageid) return self.add_cache_entry(title, url, indexer_id=tvrageid)

View file

@ -17,113 +17,82 @@
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import urllib import urllib
import re
import generic from . import generic
from sickbeard import logger, tvcache, show_name_helpers from sickbeard import logger, tvcache, show_name_helpers
from sickbeard.common import Quality
class NyaaProvider(generic.TorrentProvider): class NyaaProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, 'NyaaTorrents', True, True) generic.TorrentProvider.__init__(self, 'NyaaTorrents', anime_only=True)
self.ratio = None
self.cache = NyaaCache(self)
self.url = 'http://www.nyaa.se/' self.url = 'http://www.nyaa.se/'
def getQuality(self, item, anime=False): self.cache = NyaaCache(self)
title = item.title
quality = Quality.sceneQuality(title, anime)
return quality
def findSearchResults(self, show, episodes, search_mode, manualSearch=False): def _do_search(self, search_string, search_mode='eponly', epcount=0, age=0):
return generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch)
def _get_season_search_strings(self, ep_obj): results = []
return show_name_helpers.makeSceneShowSearchStrings(self.show)
def _get_episode_search_strings(self, ep_obj, add_string=''):
return self._get_season_search_strings(ep_obj)
def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0):
if self.show and not self.show.is_anime: if self.show and not self.show.is_anime:
logger.log(u"" + str(self.show.name) + " is not an anime skipping " + str(self.name)) return results
return []
params = { params = {'term': search_string.encode('utf-8'),
"term": search_string.encode('utf-8'), 'cats': '1_37', # Limit to English-translated Anime (for now)
"cats": '1_37', # Limit to English-translated Anime (for now) # 'sort': '2', # Sort Descending By Seeders
"sort": '2', # Sort Descending By Seeders }
}
searchURL = self.url + '?page=rss&' + urllib.urlencode(params) search_url = self.url + '?page=rss&' + urllib.urlencode(params)
logger.log(u"Search string: " + searchURL, logger.DEBUG) logger.log(u'Search string: ' + search_url, logger.DEBUG)
data = self.cache.getRSSFeed(searchURL) data = self.cache.getRSSFeed(search_url)
if not data: if data and 'entries' in data:
return []
if 'entries' in data:
items = data.entries items = data.entries
results = []
for curItem in items: for curItem in items:
(title, url) = self._get_title_and_url(curItem) title, url = self._get_title_and_url(curItem)
if title and url: if title and url:
results.append(curItem) results.append(curItem)
else: else:
logger.log( logger.log(u'The data returned from ' + self.name + ' is incomplete, this result is unusable',
u"The data returned from " + self.name + " is incomplete, this result is unusable", logger.DEBUG)
logger.DEBUG)
return results return results
return [] def find_search_results(self, show, episodes, search_mode, manual_search=False):
def _get_title_and_url(self, item): return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search)
return generic.TorrentProvider._get_title_and_url(self, item) def _get_season_search_strings(self, ep_obj, **kwargs):
def _extract_name_from_filename(self, filename): return show_name_helpers.makeSceneShowSearchStrings(self.show)
name_regex = '(.*?)\.?(\[.*]|\d+\.TPB)\.torrent$'
logger.log(u"Comparing " + name_regex + " against " + filename, logger.DEBUG)
match = re.match(name_regex, filename, re.I)
if match:
return match.group(1)
return None
def seedRatio(self): def _get_episode_search_strings(self, ep_obj, **kwargs):
return self.ratio
return self._get_season_search_strings(ep_obj)
class NyaaCache(tvcache.TVCache): class NyaaCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# only poll NyaaTorrents every 15 minutes max def __init__(self, this_provider):
self.minTime = 15 tvcache.TVCache.__init__(self, this_provider)
self.minTime = 15 # cache update frequency
def _getRSSData(self): def _getRSSData(self):
params = { params = {'page': 'rss', # Use RSS page
"page": 'rss', # Use RSS page 'order': '1', # Sort Descending By Date
"order": '1', # Sort Descending By Date 'cats': '1_37'} # Limit to English-translated Anime (for now)
"cats": '1_37', # Limit to English-translated Anime (for now)
}
url = self.provider.url + '?' + urllib.urlencode(params) url = self.provider.url + '?' + urllib.urlencode(params)
logger.log(u'NyaaTorrents cache update URL: ' + url, logger.DEBUG)
logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG)
data = self.getRSSFeed(url) data = self.getRSSFeed(url)
if data and 'entries' in data: if data and 'entries' in data:
return data.entries return data.entries
else: return []
return []
provider = NyaaProvider() provider = NyaaProvider()

View file

@ -16,117 +16,202 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import urllib import re
from datetime import datetime from datetime import datetime
import time
import traceback
import generic import generic
import sickbeard import sickbeard
import urllib
from sickbeard import tvcache, classes, logger, show_name_helpers from sickbeard import tvcache, classes, logger, show_name_helpers
from sickbeard.exceptions import AuthException from sickbeard.exceptions import AuthException
from sickbeard.rssfeeds import RSSFeeds
from sickbeard.bs4_parser import BS4Parser
try:
import xml.etree.cElementTree as etree
except ImportError:
import elementtree.ElementTree as etree
try:
import json
except ImportError:
from lib import simplejson as json
class OmgwtfnzbsProvider(generic.NZBProvider): class OmgwtfnzbsProvider(generic.NZBProvider):
def __init__(self): def __init__(self):
generic.NZBProvider.__init__(self, 'omgwtfnzbs', True, False) generic.NZBProvider.__init__(self, 'omgwtfnzbs')
self.username = None
self.api_key = None
self.cache = OmgwtfnzbsCache(self)
self.url = 'https://omgwtfnzbs.org/' self.url = 'https://omgwtfnzbs.org/'
def _checkAuth(self): self.url_base = 'https://omgwtfnzbs.org/'
self.urls = {'config_provider_home_uri': self.url_base,
'cache': 'https://rss.omgwtfnzbs.org/rss-download.php?%s',
'search': self.url_base + 'json/?%s',
'get': self.url_base + '%s',
'cache_html': self.url_base + 'browse.php?cat=tv%s',
'search_html': self.url_base + 'browse.php?cat=tv&search=%s'}
if not self.username or not self.api_key: self.url = self.urls['config_provider_home_uri']
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
return True self.needs_auth = True
self.username, self.api_key, self.cookies = 3 * [None]
self.cache = OmgwtfnzbsCache(self)
def _checkAuthFromData(self, parsed_data, is_XML=True): def _check_auth_from_data(self, parsed_data, is_xml=True):
if parsed_data is None: if parsed_data is None:
return self._checkAuth() return self._check_auth()
if is_XML: if is_xml:
# provider doesn't return xml on error # provider doesn't return xml on error
return True return True
else: else:
parsedJSON = parsed_data data_json = parsed_data
if 'notice' in parsedJSON: if 'notice' in data_json:
description_text = parsedJSON.get('notice') description_text = data_json.get('notice')
if 'information is incorrect' in parsedJSON.get('notice'): if 'information is incorrect' in data_json.get('notice'):
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text), logger.log(u'Incorrect authentication credentials for ' + self.name + ' : ' + str(description_text),
logger.DEBUG) logger.DEBUG)
raise AuthException( raise AuthException(
"Your authentication credentials for " + self.name + " are incorrect, check your config.") 'Your authentication credentials for ' + self.name + ' are incorrect, check your config.')
elif '0 results matched your terms' in parsedJSON.get('notice'): elif '0 results matched your terms' in data_json.get('notice'):
return True return True
else: else:
logger.log(u"Unknown error given from " + self.name + " : " + str(description_text), logger.DEBUG) logger.log(u'Unknown error given from ' + self.name + ' : ' + str(description_text), logger.DEBUG)
return False return False
return True return True
def _get_season_search_strings(self, ep_obj): def _get_season_search_strings(self, ep_obj):
return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
def _get_episode_search_strings(self, ep_obj, add_string=''): def _get_episode_search_strings(self, ep_obj):
return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)] return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
def _get_title_and_url(self, item): def _get_title_and_url(self, item):
return (item['release'], item['getnzb'])
def _doSearch(self, search, search_mode='eponly', epcount=0, retention=0): return item['release'], item['getnzb']
self._checkAuth() def get_result(self, episodes, url):
params = {'user': self.username, result = None
'api': self.api_key, if url and False is self._init_api():
'eng': 1, data = self.get_url(url)
'catid': '19,20', # SD,HD if data:
'retention': sickbeard.USENET_RETENTION, if '</nzb>' not in data or 'seem to be logged in' in data:
'search': search} logger.log(u'Failed nzb data response: %s' % data, logger.DEBUG)
return result
result = classes.NZBDataSearchResult(episodes)
result.extraInfo += [data]
if retention or not params['retention']: if None is result:
params['retention'] = retention result = classes.NZBSearchResult(episodes)
result.url = url
search_url = 'https://api.omgwtfnzbs.org/json/?' + urllib.urlencode(params) result.provider = self
logger.log(u"Search url: " + search_url, logger.DEBUG)
parsedJSON = self.getURL(search_url, json=True) return result
if not parsedJSON:
return []
if self._checkAuthFromData(parsedJSON, is_XML=False): def get_cache_data(self):
results = []
for item in parsedJSON: api_key = self._init_api()
if 'release' in item and 'getnzb' in item: if False is api_key:
results.append(item) return self.search_html()
if None is not api_key:
params = {'user': self.username,
'api': api_key,
'eng': 1,
'catid': '19,20'} # SD,HD
return results rss_url = self.urls['cache'] % urllib.urlencode(params)
logger.log(self.name + u' cache update URL: ' + rss_url, logger.DEBUG)
data = RSSFeeds(self).get_feed(rss_url)
if data and 'entries' in data:
return data.entries
return [] return []
def findPropers(self, search_date=None): def _do_search(self, search, search_mode='eponly', epcount=0, retention=0):
api_key = self._init_api()
if False is api_key:
return self.search_html(search)
results = []
if None is not api_key:
params = {'user': self.username,
'api': api_key,
'eng': 1,
'catid': '19,20', # SD,HD
'retention': (sickbeard.USENET_RETENTION, retention)[retention or not sickbeard.USENET_RETENTION],
'search': search}
search_url = self.urls['search'] % urllib.urlencode(params)
logger.log(u'Search url: ' + search_url, logger.DEBUG)
data_json = self.get_url(search_url, json=True)
if data_json and self._check_auth_from_data(data_json, is_xml=False):
for item in data_json:
if 'release' in item and 'getnzb' in item:
results.append(item)
return results
def search_html(self, search=''):
results = []
if None is self.cookies:
return results
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': r'send\?', 'nuked': r'\bnuked',
'cat': 'cat=(?:19|20)'}.items())
mode = ('search', 'cache')['' == search]
search_url = self.urls[mode + '_html'] % search
html = self.get_url(search_url)
cnt = len(results)
try:
if not html:
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'id': 'table_table'})
torrent_rows = []
if torrent_table:
torrent_rows = torrent_table.find('tbody').find_all('tr')
if 1 > len(torrent_rows):
raise generic.HaltParseException
for tr in torrent_rows:
try:
if tr.find('img', src=rc['nuked']) or not tr.find('a', href=rc['cat']):
continue
title = tr.find('a', href=rc['info'])['title']
download_url = tr.find('a', href=rc['get'])
age = tr.find_all('td')[-1]['data-sort']
except (AttributeError, TypeError):
continue
if title and download_url and age:
results.append({'release': title, 'getnzb': self.urls['get'] % download_url['href'].lstrip('/'),
'usenetage': int(age.strip())})
except generic.HaltParseException:
time.sleep(1.1)
pass
except Exception:
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_result(mode, len(results) - cnt, search_url)
return results
def find_propers(self, search_date=None):
search_terms = ['.PROPER.', '.REPACK.'] search_terms = ['.PROPER.', '.REPACK.']
results = [] results = []
for term in search_terms: for term in search_terms:
for item in self._doSearch(term, retention=4): for item in self._do_search(term, retention=4):
if 'usenetage' in item: if 'usenetage' in item:
title, url = self._get_title_and_url(item) title, url = self._get_title_and_url(item)
@ -140,47 +225,40 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
return results return results
def _init_api(self):
try:
api_key = self._check_auth()
if not api_key.startswith('cookie:'):
return api_key
except Exception:
return None
self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', api_key)
success, msg = self._check_cookie()
if not success:
logger.log(u'%s: %s' % (msg, self.cookies), logger.WARNING)
self.cookies = None
return None
return False
@staticmethod
def ui_string(key):
result = ''
if 'omgwtfnzbs_api_key' == key:
result = 'Or use... \'cookie: cookname=xx; cookpass=yy\''
return result
class OmgwtfnzbsCache(tvcache.TVCache): class OmgwtfnzbsCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider) def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider)
self.minTime = 20 self.minTime = 20
def _get_title_and_url(self, item):
"""
Retrieves the title and URL data from the item XML node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns: A tuple containing two strings representing title and URL respectively
"""
title = item.title if item.title else None
if title:
title = u'' + title
title = title.replace(' ', '.')
url = item.link if item.link else None
if url:
url = url.replace('&amp;', '&')
return (title, url)
def _getRSSData(self): def _getRSSData(self):
params = {'user': provider.username,
'api': provider.api_key,
'eng': 1,
'catid': '19,20'} # SD,HD
rss_url = 'https://rss.omgwtfnzbs.org/rss-download.php?' + urllib.urlencode(params) return self.provider.get_cache_data()
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
data = self.getRSSFeed(rss_url)
if data and 'entries' in data:
return data.entries
else:
return []
provider = OmgwtfnzbsProvider() provider = OmgwtfnzbsProvider()

View file

@ -39,14 +39,14 @@ class PiSexyProvider(generic.TorrentProvider):
self.username, self.password, self.minseed, self.minleech = 4 * [None] self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.cache = PiSexyCache(self) self.cache = PiSexyCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies and\ logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies and\
'pcode' in self.session.cookies and 'pisexy' in self.session.cookies 'pcode' in self.session.cookies and 'pisexy' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password} login_params = {'username': self.username, 'password': self.password}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -59,24 +59,24 @@ class PiSexyProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
rc = dict((k, re.compile('(?i)' + v)) rc = dict((k, re.compile('(?i)' + v))
for (k, v) in {'info': 'download', 'get': 'download', 'valid_cat': 'cat=(?:0|50[12])', for (k, v) in {'info': 'download', 'get': 'download', 'valid_cat': 'cat=(?:0|50[12])',
'title': 'Download\s([^\s]+).*', 'seeders': '(^\d+)', 'leechers': '(\d+)$'}.items()) 'title': r'Download\s([^\s]+).*', 'seeders': r'(^\d+)', 'leechers': r'(\d+)$'}.items())
for mode in search_params.keys(): for mode in search_params.keys():
for search_string in search_params[mode]: for search_string in search_params[mode]:
if isinstance(search_string, unicode): if isinstance(search_string, unicode):
search_string = unidecode(search_string) search_string = unidecode(search_string)
search_url = self.urls['search'] % search_string search_url = self.urls['search'] % search_string
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -129,7 +129,7 @@ class PiSexyProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -55,7 +55,7 @@ class RarbgProvider(generic.TorrentProvider):
self.request_throttle = datetime.datetime.now() self.request_throttle = datetime.datetime.now()
self.cache = RarbgCache(self) self.cache = RarbgCache(self)
def _doLogin(self, reset=False): def _do_login(self, reset=False):
if not reset and self.token and self.token_expiry and datetime.datetime.now() < self.token_expiry: if not reset and self.token and self.token_expiry and datetime.datetime.now() < self.token_expiry:
return True return True
@ -69,10 +69,10 @@ class RarbgProvider(generic.TorrentProvider):
logger.log(u'No usable API token returned from: %s' % self.urls['api_token'], logger.ERROR) logger.log(u'No usable API token returned from: %s' % self.urls['api_token'], logger.ERROR)
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(reset=True): if not self._do_login(reset=True):
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -122,7 +122,7 @@ class RarbgProvider(generic.TorrentProvider):
time_out += 1 time_out += 1
time.sleep(1) time.sleep(1)
data = self.getURL(search_url % {'ranked': int(self.confirmed), 'token': self.token}, json=True) data = self.get_url(search_url % {'ranked': int(self.confirmed), 'token': self.token}, json=True)
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14) self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3) self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3)
@ -131,7 +131,7 @@ class RarbgProvider(generic.TorrentProvider):
continue continue
elif 2 == data['error_code']: # Invalid token set elif 2 == data['error_code']: # Invalid token set
if self._doLogin(reset=True): if self._do_login(reset=True):
continue continue
self._log_result(mode_base, len(items[mode_base]) - cnt, search_url) self._log_result(mode_base, len(items[mode_base]) - cnt, search_url)
return results return results
@ -160,7 +160,7 @@ class RarbgProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date, '{{.proper.|.repack.}}') return self._find_propers(search_date, '{{.proper.|.repack.}}')

View file

@ -41,9 +41,9 @@ class TorrentRssProvider(generic.TorrentProvider):
self.feeder = RSSFeeds(self) self.feeder = RSSFeeds(self)
self.cache = TorrentRssCache(self) self.cache = TorrentRssCache(self)
def imageName(self): def image_name(self):
return generic.GenericProvider.imageName(self, 'torrentrss') return generic.GenericProvider.image_name(self, 'torrentrss')
def config_str(self): def config_str(self):
return '%s|%s|%s|%d|%s|%d|%d|%d' % (self.name or '', return '%s|%s|%s|%d|%s|%d|%d|%d' % (self.name or '',
@ -81,9 +81,9 @@ class TorrentRssProvider(generic.TorrentProvider):
def validate_feed(self): def validate_feed(self):
succ, err_msg = self.feeder.check_cookie() success, err_msg = self._check_cookie()
if not succ: if not success:
return succ, err_msg return success, err_msg
try: try:
items = self.get_cache_data() items = self.get_cache_data()
@ -96,7 +96,7 @@ class TorrentRssProvider(generic.TorrentProvider):
if re.search('urn:btih:([0-9a-f]{32,40})', url): if re.search('urn:btih:([0-9a-f]{32,40})', url):
break break
else: else:
torrent_file = self.getURL(url) torrent_file = self.get_url(url)
try: try:
bdecode(torrent_file) bdecode(torrent_file)
break break

View file

@ -44,13 +44,13 @@ class SCCProvider(generic.TorrentProvider):
self.username, self.password, self.minseed, self.minleech = 4 * [None] self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.cache = SCCCache(self) self.cache = SCCCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'submit': 'come on in'} login_params = {'username': self.username, 'password': self.password, 'submit': 'come on in'}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
@ -61,12 +61,12 @@ class SCCProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
if not self._doLogin(): if not self._do_login():
return results return results
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items()) rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
@ -83,7 +83,7 @@ class SCCProvider(generic.TorrentProvider):
self.urls['nonscene'] % search_string] self.urls['nonscene'] % search_string]
for search_url in searches: for search_url in searches:
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -128,7 +128,7 @@ class SCCProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -44,13 +44,13 @@ class SpeedCDProvider(generic.TorrentProvider):
self.freeleech = False self.freeleech = False
self.cache = SpeedCDCache(self) self.cache = SpeedCDCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'inSpeed_speedian' in self.session.cookies logged_in = lambda: 'inSpeed_speedian' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password} login_params = {'username': self.username, 'password': self.password}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -63,10 +63,10 @@ class SpeedCDProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -78,7 +78,7 @@ class SpeedCDProvider(generic.TorrentProvider):
post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 4, 'jxw': 'b', 'search': search_string}, post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 4, 'jxw': 'b', 'search': search_string},
**self.categories[mode]) **self.categories[mode])
data_json = self.getURL(self.urls['search'], post_data=post_data, json=True) data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
if not data_json: if not data_json:
@ -111,7 +111,7 @@ class SpeedCDProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -15,6 +15,7 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import datetime import datetime
from . import generic from . import generic
@ -33,17 +34,17 @@ class StrikeProvider(generic.TorrentProvider):
self.minseed, self.minleech = 2 * [None] self.minseed, self.minleech = 2 * [None]
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._checkAuth(): if not self._check_auth():
return results return results
for mode in search_params.keys(): for mode in search_params.keys():
for search_string in search_params[mode]: for search_string in search_params[mode]:
search_url = self.urls['search'] % search_string.replace(' ', '+') search_url = self.urls['search'] % re.sub('[\.\s]+', ' ', search_string)
data_json = self.getURL(search_url, json=True) data_json = self.get_url(search_url, json=True)
cnt = len(results) cnt = len(results)
try: try:
@ -66,7 +67,7 @@ class StrikeProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -36,8 +36,8 @@ class ThePirateBayProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, 'The Pirate Bay') generic.TorrentProvider.__init__(self, 'The Pirate Bay')
self.urls = {'config_provider_home_uri': ['https://thepiratebay.gd/', 'https://thepiratebay.mn/', self.urls = {'config_provider_home_uri': ['https://thepiratebay.se/', 'https://thepiratebay.gd/',
'https://thepiratebay.am/', 'https://thepiratebay.vg/', 'https://thepiratebay.mn/', 'https://thepiratebay.vg/',
'https://thepiratebay.la/'], 'https://thepiratebay.la/'],
'search': 'search/%s/0/7/200', 'search': 'search/%s/0/7/200',
'cache': 'tv/latest/'} # order by seed 'cache': 'tv/latest/'} # order by seed
@ -57,7 +57,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
has_signature = False has_signature = False
details_url = '/ajax_details_filelist.php?id=%s' % torrent_id details_url = '/ajax_details_filelist.php?id=%s' % torrent_id
for idx, url in enumerate(self.urls['config_provider_home_uri']): for idx, url in enumerate(self.urls['config_provider_home_uri']):
data = self.getURL(url + details_url) data = self.get_url(url + details_url)
if data and re.search(r'<title>The\sPirate\sBay', data[33:200:]): if data and re.search(r'<title>The\sPirate\sBay', data[33:200:]):
has_signature = True has_signature = True
break break
@ -144,13 +144,13 @@ class ThePirateBayProvider(generic.TorrentProvider):
return [{'Episode': self._build_search_strings(ep_detail, append=(add_string, '')[self.show.anime])}] return [{'Episode': self._build_search_strings(ep_detail, append=(add_string, '')[self.show.anime])}]
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
rc = dict((k, re.compile('(?i)' + v)) rc = dict((k, re.compile('(?i)' + v))
for (k, v) in {'info': 'detail', 'get': 'download[^"]+magnet', 'tid': '.*/(\d{5,}).*', for (k, v) in {'info': 'detail', 'get': 'download[^"]+magnet', 'tid': r'.*/(\d{5,}).*',
'verify': '(?:helper|moderator|trusted|vip)'}.items()) 'verify': '(?:helper|moderator|trusted|vip)'}.items())
has_signature = False has_signature = False
for mode in search_params.keys(): for mode in search_params.keys():
@ -165,7 +165,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
log_url = u'(%s/%s): %s' % (idx + 1, len(self.urls['config_provider_home_uri']), search_url) log_url = u'(%s/%s): %s' % (idx + 1, len(self.urls['config_provider_home_uri']), search_url)
html = self.getURL(search_url) html = self.get_url(search_url)
if html and re.search(r'Pirate\sBay', html[33:7632:]): if html and re.search(r'Pirate\sBay', html[33:7632:]):
has_signature = True has_signature = True
break break
@ -227,7 +227,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date, '') return self._find_propers(search_date, '')

View file

@ -16,140 +16,86 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import urllib
import traceback import traceback
import urllib
import generic from . import generic
from sickbeard import logger, tvcache, show_name_helpers from sickbeard import logger, tvcache, show_name_helpers
from sickbeard.common import Quality
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
class TokyoToshokanProvider(generic.TorrentProvider): class TokyoToshokanProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, 'TokyoToshokan', True, True) generic.TorrentProvider.__init__(self, 'TokyoToshokan', anime_only=True)
self.ratio = None
self.cache = TokyoToshokanCache(self)
self.url = 'http://tokyotosho.info/' self.url = 'http://tokyotosho.info/'
def _get_title_and_url(self, item): self.cache = TokyoToshokanCache(self)
title, url = item def _do_search(self, search_string, search_mode='eponly', epcount=0, age=0):
if title:
title = u'' + title
title = title.replace(' ', '.')
if url:
url = url.replace('&amp;', '&')
return (title, url)
def seedRatio(self):
return self.ratio
def getQuality(self, item, anime=False):
quality = Quality.sceneQuality(item[0], anime)
return quality
def findSearchResults(self, show, episodes, search_mode, manualSearch=False):
return generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch)
def _get_season_search_strings(self, ep_obj):
return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
def _get_episode_search_strings(self, ep_obj, add_string=''):
return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0):
if self.show and not self.show.is_anime:
logger.log(u"" + str(self.show.name) + " is not an anime skipping " + str(self.name))
return []
params = {
"terms": search_string.encode('utf-8'),
"type": 1, # get anime types
}
searchURL = self.url + 'search.php?' + urllib.urlencode(params)
data = self.getURL(searchURL)
logger.log(u"Search string: " + searchURL, logger.DEBUG)
if not data:
return []
results = [] results = []
try: if self.show and not self.show.is_anime:
with BS4Parser(data, features=["html5lib", "permissive"]) as soup: return results
torrent_table = soup.find('table', attrs={'class': 'listing'})
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
if torrent_rows:
if torrent_rows[0].find('td', attrs={'class': 'centertext'}):
a = 1
else:
a = 0
for top, bottom in zip(torrent_rows[a::2], torrent_rows[a::2]): params = {'terms': search_string.encode('utf-8'),
title = top.find('td', attrs={'class': 'desc-top'}).text 'type': 1} # get anime types
url = top.find('td', attrs={'class': 'desc-top'}).find('a')['href']
if not title or not url: search_url = self.url + 'search.php?' + urllib.urlencode(params)
continue logger.log(u'Search string: ' + search_url, logger.DEBUG)
item = title.lstrip(), url html = self.get_url(search_url)
results.append(item) if html:
try:
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'class': 'listing'})
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
if torrent_rows:
a = (0, 1)[torrent_rows[0].find('td', attrs={'class': 'centertext'})]
except Exception as e: for top, bottom in zip(torrent_rows[a::2], torrent_rows[a::2]):
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) title = top.find('td', attrs={'class': 'desc-top'}).text
url = top.find('td', attrs={'class': 'desc-top'}).find('a')['href']
if title and url:
results.append((title.lstrip(), url))
except Exception:
logger.log(u'Failed to parsing ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
return results return results
def find_search_results(self, show, episodes, search_mode, manual_search=False):
return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search)
def _get_season_search_strings(self, ep_obj, **kwargs):
return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
def _get_episode_search_strings(self, ep_obj, **kwargs):
return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
class TokyoToshokanCache(tvcache.TVCache): class TokyoToshokanCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# only poll NyaaTorrents every 15 minutes max def __init__(self, this_provider):
self.minTime = 15 tvcache.TVCache.__init__(self, this_provider)
def _get_title_and_url(self, item): self.minTime = 15 # cache update frequency
"""
Retrieves the title and URL data from the item XML node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns: A tuple containing two strings representing title and URL respectively
"""
title = item.title if item.title else None
if title:
title = u'' + title
title = title.replace(' ', '.')
url = item.link if item.link else None
if url:
url = url.replace('&amp;', '&')
return (title, url)
def _getRSSData(self): def _getRSSData(self):
params = { params = {'filter': '1'}
"filter": '1',
}
url = self.provider.url + 'rss.php?' + urllib.urlencode(params) url = self.provider.url + 'rss.php?' + urllib.urlencode(params)
logger.log(u'TokyoToshokan cache update URL: ' + url, logger.DEBUG)
logger.log(u"TokyoToshokan cache update URL: " + url, logger.DEBUG)
data = self.getRSSFeed(url) data = self.getRSSFeed(url)
if data and 'entries' in data: if data and 'entries' in data:
return data.entries return data.entries
else: return []
return []
provider = TokyoToshokanProvider() provider = TokyoToshokanProvider()

View file

@ -43,13 +43,13 @@ class TorrentBytesProvider(generic.TorrentProvider):
self.username, self.password, self.minseed, self.minleech = 4 * [None] self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.cache = TorrentBytesCache(self) self.cache = TorrentBytesCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'login': 'Log in!'} login_params = {'username': self.username, 'password': self.password, 'login': 'Log in!'}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -62,10 +62,10 @@ class TorrentBytesProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -77,7 +77,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
search_string = unidecode(search_string) search_string = unidecode(search_string)
search_url = self.urls['search'] % (search_string, self.categories) search_url = self.urls['search'] % (search_string, self.categories)
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -121,7 +121,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -44,13 +44,13 @@ class TorrentDayProvider(generic.TorrentProvider):
self.freeleech = False self.freeleech = False
self.cache = TorrentDayCache(self) self.cache = TorrentDayCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'submit.x': 0, 'submit.y': 0} login_params = {'username': self.username, 'password': self.password, 'submit.x': 0, 'submit.y': 0}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -63,10 +63,10 @@ class TorrentDayProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -80,7 +80,7 @@ class TorrentDayProvider(generic.TorrentProvider):
if self.freeleech: if self.freeleech:
post_data.update({'free': 'on'}) post_data.update({'free': 'on'})
data_json = self.getURL(self.urls['search'], post_data=post_data, json=True) data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
if not data_json: if not data_json:
@ -111,7 +111,7 @@ class TorrentDayProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date, '') return self._find_propers(search_date, '')

View file

@ -44,13 +44,13 @@ class TorrentLeechProvider(generic.TorrentProvider):
self.username, self.password, self.minseed, self.minleech = 4 * [None] self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.cache = TorrentLeechCache(self) self.cache = TorrentLeechCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'tluid' in self.session.cookies and 'tlpass' in self.session.cookies logged_in = lambda: 'tluid' in self.session.cookies and 'tlpass' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'remember_me': 'on', 'login': 'submit'} login_params = {'username': self.username, 'password': self.password, 'remember_me': 'on', 'login': 'submit'}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -60,10 +60,10 @@ class TorrentLeechProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -80,7 +80,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
else: else:
search_url = self.urls['search'] % (search_string, self.categories) search_url = self.urls['search'] % (search_string, self.categories)
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -123,7 +123,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -46,13 +46,13 @@ class TorrentShackProvider(generic.TorrentProvider):
self.username, self.password, self.minseed, self.minleech = 4 * [None] self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.cache = TorrentShackCache(self) self.cache = TorrentShackCache(self)
def _doLogin(self): def _do_login(self):
logged_in = lambda: 'session' in self.session.cookies logged_in = lambda: 'session' in self.session.cookies
if logged_in(): if logged_in():
return True return True
if self._checkAuth(): if self._check_auth():
login_params = {'username': self.username, 'password': self.password, 'keeplogged': '1', 'login': 'Login'} login_params = {'username': self.username, 'password': self.password, 'keeplogged': '1', 'login': 'Login'}
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session) response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
if response and logged_in(): if response and logged_in():
@ -65,10 +65,10 @@ class TorrentShackProvider(generic.TorrentProvider):
return False return False
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
results = [] results = []
if not self._doLogin(): if not self._do_login():
return results return results
items = {'Season': [], 'Episode': [], 'Cache': []} items = {'Season': [], 'Episode': [], 'Cache': []}
@ -83,7 +83,7 @@ class TorrentShackProvider(generic.TorrentProvider):
# fetch 15 results by default, and up to 100 if allowed in user profile # fetch 15 results by default, and up to 100 if allowed in user profile
search_url = self.urls['search'] % search_string search_url = self.urls['search'] % search_string
html = self.getURL(search_url) html = self.get_url(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -128,7 +128,7 @@ class TorrentShackProvider(generic.TorrentProvider):
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)

View file

@ -48,9 +48,9 @@ class ToTVProvider(generic.TorrentProvider):
logger.DEBUG) logger.DEBUG)
raise AuthException('Your authentication credentials for %s are incorrect, check your config.' % self.name) raise AuthException('Your authentication credentials for %s are incorrect, check your config.' % self.name)
def _doSearch(self, search_params, mode='eponly', epcount=0, age=0): def _do_search(self, search_params, mode='eponly', epcount=0, age=0):
self._checkAuth() self._check_auth()
self.headers.update({'X-Authorization': self.api_key}) self.headers.update({'X-Authorization': self.api_key})
results = [] results = []
params = {'limit': 100} params = {'limit': 100}
@ -62,7 +62,7 @@ class ToTVProvider(generic.TorrentProvider):
search_url = self.urls['search'] % urllib.urlencode(params) search_url = self.urls['search'] % urllib.urlencode(params)
data_json = self.getURL(search_url, json=True) data_json = self.get_url(search_url, json=True)
cnt = len(results) cnt = len(results)
if data_json and 'results' in data_json and self._check_auth_from_data(data_json): if data_json and 'results' in data_json and self._check_auth_from_data(data_json):
@ -82,7 +82,7 @@ class ToTVProvider(generic.TorrentProvider):
self._log_result(mode, len(results) - cnt, search_url) self._log_result(mode, len(results) - cnt, search_url)
return results return results
def findPropers(self, search_date=datetime.datetime.today()): def find_propers(self, search_date=datetime.datetime.today()):
return self._find_propers(search_date) return self._find_propers(search_date)
@ -113,7 +113,7 @@ class ToTVProvider(generic.TorrentProvider):
def get_cache_data(self, *args, **kwargs): def get_cache_data(self, *args, **kwargs):
return self._doSearch({}) return self._do_search({})
class ToTVCache(tvcache.TVCache): class ToTVCache(tvcache.TVCache):

View file

@ -66,7 +66,7 @@ class WombleCache(tvcache.TVCache):
cl.append(ci) cl.append(ci)
if 0 < len(cl): if 0 < len(cl):
my_db = self._getDB() my_db = self.get_db()
my_db.mass_action(cl) my_db.mass_action(cl)
# set last updated # set last updated

View file

@ -3,9 +3,6 @@
# This file is part of SickGear. # This file is part of SickGear.
# #
import re
import requests
import requests.cookies
from feedparser import feedparser from feedparser import feedparser
from sickbeard import helpers, logger from sickbeard import helpers, logger
@ -20,28 +17,10 @@ class RSSFeeds:
def _check_auth_cookie(self): def _check_auth_cookie(self):
if self.provider and hasattr(self.provider, 'cookies'): if self.provider:
cookies = self.provider.cookies return self.provider.check_auth_cookie()
if not re.match('^(\w+=\w+[;\s]*)+$', cookies):
return False
cj = requests.utils.add_dict_to_cookiejar(self.provider.session.cookies,
dict([x.strip().split('=') for x in cookies.split(';')
if x != ''])),
for item in cj:
if not isinstance(item, requests.cookies.RequestsCookieJar):
return False
return True return True
def check_cookie(self):
if self._check_auth_cookie():
return True, None
return False, 'Cookies not correctly formatted key=value pairs e.g. uid=xx;pass=yy): ' + self.provider.cookies
def get_feed(self, url, request_headers=None): def get_feed(self, url, request_headers=None):
if not self._check_auth_cookie(): if not self._check_auth_cookie():

View file

@ -59,17 +59,8 @@ def sendNZB(nzb):
# if it's a normal result we just pass SAB the URL # if it's a normal result we just pass SAB the URL
if nzb.resultType == "nzb": if nzb.resultType == "nzb":
# for newzbin results send the ID to sab specifically params['mode'] = 'addurl'
if nzb.provider.getID() == 'newzbin': params['name'] = nzb.url
id = nzb.provider.getIDFromURL(nzb.url)
if not id:
logger.log("Unable to send NZB to SABnzbd, can't find ID in URL " + str(nzb.url), logger.ERROR)
return False
params['mode'] = 'addid'
params['name'] = id
else:
params['mode'] = 'addurl'
params['name'] = nzb.url
# if we get a raw data result we want to upload it to SAB # if we get a raw data result we want to upload it to SAB
elif nzb.resultType == "nzbdata": elif nzb.resultType == "nzbdata":

View file

@ -60,7 +60,7 @@ def _downloadResult(result):
# nzbs with an URL can just be downloaded from the provider # nzbs with an URL can just be downloaded from the provider
if result.resultType == "nzb": if result.resultType == "nzb":
newResult = resProvider.downloadResult(result) newResult = resProvider.download_result(result)
# if it's an nzb data result # if it's an nzb data result
elif result.resultType == "nzbdata": elif result.resultType == "nzbdata":
@ -82,7 +82,7 @@ def _downloadResult(result):
logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR) logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR)
newResult = False newResult = False
elif resProvider.providerType == "torrent": elif resProvider.providerType == "torrent":
newResult = resProvider.downloadResult(result) newResult = resProvider.download_result(result)
else: else:
logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR) logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR)
newResult = False newResult = False
@ -134,7 +134,7 @@ def snatchEpisode(result, endStatus=SNATCHED):
else: else:
# make sure we have the torrent file content # make sure we have the torrent file content
if not result.content and not result.url.startswith('magnet'): if not result.content and not result.url.startswith('magnet'):
result.content = result.provider.getURL(result.url) result.content = result.provider.get_url(result.url)
if not result.content: if not result.content:
logger.log(u'Torrent content failed to download from ' + result.url, logger.ERROR) logger.log(u'Torrent content failed to download from ' + result.url, logger.ERROR)
return False return False
@ -401,12 +401,12 @@ def searchForNeededEpisodes(episodes):
origThreadName = threading.currentThread().name origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_recentsearch] providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
for curProvider in providers: for curProvider in providers:
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
curFoundResults = curProvider.searchRSS(episodes) curFoundResults = curProvider.search_rss(episodes)
didSearch = True didSearch = True
@ -435,7 +435,7 @@ def searchForNeededEpisodes(episodes):
if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole": if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole":
bestResult.content = None bestResult.content = None
if not bestResult.url.startswith('magnet'): if not bestResult.url.startswith('magnet'):
bestResult.content = bestResult.provider.getURL(bestResult.url) bestResult.content = bestResult.provider.get_url(bestResult.url)
if not bestResult.content: if not bestResult.content:
continue continue
@ -451,7 +451,7 @@ def searchForNeededEpisodes(episodes):
return foundResults.values() return foundResults.values()
def searchProviders(show, episodes, manualSearch=False): def searchProviders(show, episodes, manual_search=False):
foundResults = {} foundResults = {}
finalResults = [] finalResults = []
@ -459,7 +459,7 @@ def searchProviders(show, episodes, manualSearch=False):
origThreadName = threading.currentThread().name origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_backlog] providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog]
for providerNum, curProvider in enumerate(providers): for providerNum, curProvider in enumerate(providers):
if curProvider.anime_only and not show.is_anime: if curProvider.anime_only and not show.is_anime:
logger.log(u"" + str(show.name) + " is not an anime, skipping", logger.DEBUG) logger.log(u"" + str(show.name) + " is not an anime, skipping", logger.DEBUG)
@ -482,7 +482,7 @@ def searchProviders(show, episodes, manualSearch=False):
try: try:
curProvider.cache._clearCache() curProvider.cache._clearCache()
searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch) searchResults = curProvider.find_search_results(show, episodes, search_mode, manual_search)
except exceptions.AuthException as e: except exceptions.AuthException as e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR) logger.log(u"Authentication error: " + ex(e), logger.ERROR)
break break
@ -703,7 +703,7 @@ def searchProviders(show, episodes, manualSearch=False):
if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole": if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole":
bestResult.content = None bestResult.content = None
if not bestResult.url.startswith('magnet'): if not bestResult.url.startswith('magnet'):
bestResult.content = bestResult.provider.getURL(bestResult.url) bestResult.content = bestResult.provider.get_url(bestResult.url)
if not bestResult.content: if not bestResult.content:
continue continue

View file

@ -291,7 +291,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
logger.log('Updating provider caches with recent upload data') logger.log('Updating provider caches with recent upload data')
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_recentsearch] providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
for curProvider in providers: for curProvider in providers:
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation # spawn separate threads for each provider so we don't need to wait for providers with slow network operation
threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName + threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName +

View file

@ -48,16 +48,16 @@ class TVCache:
def __init__(self, provider): def __init__(self, provider):
self.provider = provider self.provider = provider
self.providerID = self.provider.getID() self.providerID = self.provider.get_id()
self.providerDB = None self.providerDB = None
self.minTime = 10 self.minTime = 10
def _getDB(self): def get_db(self):
return CacheDBConnection(self.providerID) return CacheDBConnection(self.providerID)
def _clearCache(self): def _clearCache(self):
if self.shouldClearCache(): if self.shouldClearCache():
myDB = self._getDB() myDB = self.get_db()
myDB.action('DELETE FROM provider_cache WHERE provider = ?', [self.providerID]) myDB.action('DELETE FROM provider_cache WHERE provider = ?', [self.providerID])
def _get_title_and_url(self, item): def _get_title_and_url(self, item):
@ -69,7 +69,7 @@ class TVCache:
return data return data
def _checkAuth(self): def _checkAuth(self):
return self.provider._checkAuth() return self.provider._check_auth()
def _checkItemAuth(self, title, url): def _checkItemAuth(self, title, url):
return True return True
@ -102,7 +102,7 @@ class TVCache:
cl.append(ci) cl.append(ci)
if len(cl) > 0: if len(cl) > 0:
myDB = self._getDB() myDB = self.get_db()
myDB.mass_action(cl) myDB.mass_action(cl)
return [] return []
@ -125,7 +125,7 @@ class TVCache:
url = self._translateLinkURL(url) url = self._translateLinkURL(url)
logger.log(u'Attempting to add item to cache: ' + title, logger.DEBUG) logger.log(u'Attempting to add item to cache: ' + title, logger.DEBUG)
return self._addCacheEntry(title, url) return self.add_cache_entry(title, url)
else: else:
logger.log( logger.log(
@ -134,7 +134,7 @@ class TVCache:
return None return None
def _getLastUpdate(self): def _getLastUpdate(self):
myDB = self._getDB() myDB = self.get_db()
sqlResults = myDB.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID]) sqlResults = myDB.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID])
if sqlResults: if sqlResults:
@ -147,7 +147,7 @@ class TVCache:
return datetime.datetime.fromtimestamp(lastTime) return datetime.datetime.fromtimestamp(lastTime)
def _getLastSearch(self): def _getLastSearch(self):
myDB = self._getDB() myDB = self.get_db()
sqlResults = myDB.select('SELECT time FROM lastSearch WHERE provider = ?', [self.providerID]) sqlResults = myDB.select('SELECT time FROM lastSearch WHERE provider = ?', [self.providerID])
if sqlResults: if sqlResults:
@ -163,7 +163,7 @@ class TVCache:
if not toDate: if not toDate:
toDate = datetime.datetime.today() toDate = datetime.datetime.today()
myDB = self._getDB() myDB = self.get_db()
myDB.upsert('lastUpdate', myDB.upsert('lastUpdate',
{'time': int(time.mktime(toDate.timetuple()))}, {'time': int(time.mktime(toDate.timetuple()))},
{'provider': self.providerID}) {'provider': self.providerID})
@ -172,7 +172,7 @@ class TVCache:
if not toDate: if not toDate:
toDate = datetime.datetime.today() toDate = datetime.datetime.today()
myDB = self._getDB() myDB = self.get_db()
myDB.upsert('lastSearch', myDB.upsert('lastSearch',
{'time': int(time.mktime(toDate.timetuple()))}, {'time': int(time.mktime(toDate.timetuple()))},
{'provider': self.providerID}) {'provider': self.providerID})
@ -196,7 +196,7 @@ class TVCache:
return True return True
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): def add_cache_entry(self, name, url, parse_result=None, indexer_id=0):
# check if we passed in a parsed result or should we try and create one # check if we passed in a parsed result or should we try and create one
if not parse_result: if not parse_result:
@ -256,7 +256,7 @@ class TVCache:
return [] return []
def listPropers(self, date=None, delimiter='.'): def listPropers(self, date=None, delimiter='.'):
myDB = self._getDB() myDB = self.get_db()
sql = "SELECT * FROM provider_cache WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%' AND provider = ?" sql = "SELECT * FROM provider_cache WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%' AND provider = ?"
if date != None: if date != None:
@ -268,7 +268,7 @@ class TVCache:
neededEps = {} neededEps = {}
cl = [] cl = []
myDB = self._getDB() myDB = self.get_db()
if type(episode) != list: if type(episode) != list:
sqlResults = myDB.select( sqlResults = myDB.select(
'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ?', 'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ?',
@ -331,9 +331,10 @@ class TVCache:
logger.log(u'Found result ' + title + ' at ' + url) logger.log(u'Found result ' + title + ' at ' + url)
result = self.provider.getResult([epObj]) result = self.provider.get_result([epObj], url)
if None is result:
continue
result.show = showObj result.show = showObj
result.url = url
result.name = title result.name = title
result.quality = curQuality result.quality = curQuality
result.release_group = curReleaseGroup result.release_group = curReleaseGroup

View file

@ -3914,14 +3914,14 @@ class ConfigProviders(Config):
if not name: if not name:
return json.dumps({'error': 'No Provider Name specified'}) return json.dumps({'error': 'No Provider Name specified'})
providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) providerDict = dict(zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
tempProvider = newznab.NewznabProvider(name, '') tempProvider = newznab.NewznabProvider(name, '')
if tempProvider.getID() in providerDict: if tempProvider.get_id() in providerDict:
return json.dumps({'error': 'Provider Name already exists as ' + providerDict[tempProvider.getID()].name}) return json.dumps({'error': 'Provider Name already exists as ' + providerDict[tempProvider.get_id()].name})
else: else:
return json.dumps({'success': tempProvider.getID()}) return json.dumps({'success': tempProvider.get_id()})
def saveNewznabProvider(self, name, url, key=''): def saveNewznabProvider(self, name, url, key=''):
@ -3942,12 +3942,12 @@ class ConfigProviders(Config):
else: else:
providerDict[name].needs_auth = True providerDict[name].needs_auth = True
return providerDict[name].getID() + '|' + providerDict[name].config_str() return providerDict[name].get_id() + '|' + providerDict[name].config_str()
else: else:
newProvider = newznab.NewznabProvider(name, url, key=key) newProvider = newznab.NewznabProvider(name, url, key=key)
sickbeard.newznabProviderList.append(newProvider) sickbeard.newznabProviderList.append(newProvider)
return newProvider.getID() + '|' + newProvider.config_str() return newProvider.get_id() + '|' + newProvider.config_str()
def getNewznabCategories(self, name, url, key): def getNewznabCategories(self, name, url, key):
''' '''
@ -3969,7 +3969,7 @@ class ConfigProviders(Config):
return json.dumps({'success' : False, 'error': error}) return json.dumps({'success' : False, 'error': error})
#Get list with Newznabproviders #Get list with Newznabproviders
#providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) #providerDict = dict(zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
#Get newznabprovider obj with provided name #Get newznabprovider obj with provided name
tempProvider= newznab.NewznabProvider(name, url, key) tempProvider= newznab.NewznabProvider(name, url, key)
@ -3980,7 +3980,7 @@ class ConfigProviders(Config):
def deleteNewznabProvider(self, nnid): def deleteNewznabProvider(self, nnid):
providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) providerDict = dict(zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
if nnid not in providerDict or providerDict[nnid].default: if nnid not in providerDict or providerDict[nnid].default:
return '0' return '0'
@ -3999,16 +3999,16 @@ class ConfigProviders(Config):
return json.dumps({'error': 'Invalid name specified'}) return json.dumps({'error': 'Invalid name specified'})
providerDict = dict( providerDict = dict(
zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) zip([x.get_id() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList))
tempProvider = rsstorrent.TorrentRssProvider(name, url, cookies) tempProvider = rsstorrent.TorrentRssProvider(name, url, cookies)
if tempProvider.getID() in providerDict: if tempProvider.get_id() in providerDict:
return json.dumps({'error': 'Exists as ' + providerDict[tempProvider.getID()].name}) return json.dumps({'error': 'Exists as ' + providerDict[tempProvider.get_id()].name})
else: else:
(succ, errMsg) = tempProvider.validate_feed() (succ, errMsg) = tempProvider.validate_feed()
if succ: if succ:
return json.dumps({'success': tempProvider.getID()}) return json.dumps({'success': tempProvider.get_id()})
else: else:
return json.dumps({'error': errMsg}) return json.dumps({'error': errMsg})
@ -4024,17 +4024,17 @@ class ConfigProviders(Config):
providerDict[name].url = config.clean_url(url) providerDict[name].url = config.clean_url(url)
providerDict[name].cookies = cookies providerDict[name].cookies = cookies
return providerDict[name].getID() + '|' + providerDict[name].config_str() return providerDict[name].get_id() + '|' + providerDict[name].config_str()
else: else:
newProvider = rsstorrent.TorrentRssProvider(name, url, cookies) newProvider = rsstorrent.TorrentRssProvider(name, url, cookies)
sickbeard.torrentRssProviderList.append(newProvider) sickbeard.torrentRssProviderList.append(newProvider)
return newProvider.getID() + '|' + newProvider.config_str() return newProvider.get_id() + '|' + newProvider.config_str()
def deleteTorrentRssProvider(self, id): def deleteTorrentRssProvider(self, id):
providerDict = dict( providerDict = dict(
zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) zip([x.get_id() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList))
if id not in providerDict: if id not in providerDict:
return '0' return '0'
@ -4055,7 +4055,7 @@ class ConfigProviders(Config):
provider_list = [] provider_list = []
newznabProviderDict = dict( newznabProviderDict = dict(
zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList)) zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
finishedNames = [] finishedNames = []
@ -4074,7 +4074,7 @@ class ConfigProviders(Config):
newProvider = newznab.NewznabProvider(cur_name, cur_url, key=cur_key) newProvider = newznab.NewznabProvider(cur_name, cur_url, key=cur_key)
cur_id = newProvider.getID() cur_id = newProvider.get_id()
# if it already exists then update it # if it already exists then update it
if cur_id in newznabProviderDict: if cur_id in newznabProviderDict:
@ -4118,11 +4118,11 @@ class ConfigProviders(Config):
# delete anything that is missing # delete anything that is missing
for curProvider in sickbeard.newznabProviderList: for curProvider in sickbeard.newznabProviderList:
if curProvider.getID() not in finishedNames: if curProvider.get_id() not in finishedNames:
sickbeard.newznabProviderList.remove(curProvider) sickbeard.newznabProviderList.remove(curProvider)
torrentRssProviderDict = dict( torrentRssProviderDict = dict(
zip([x.getID() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList)) zip([x.get_id() for x in sickbeard.torrentRssProviderList], sickbeard.torrentRssProviderList))
finishedNames = [] finishedNames = []
if torrentrss_string: if torrentrss_string:
@ -4139,7 +4139,7 @@ class ConfigProviders(Config):
newProvider = rsstorrent.TorrentRssProvider(curName, curURL, curCookies) newProvider = rsstorrent.TorrentRssProvider(curName, curURL, curCookies)
curID = newProvider.getID() curID = newProvider.get_id()
# if it already exists then update it # if it already exists then update it
if curID in torrentRssProviderDict: if curID in torrentRssProviderDict:
@ -4154,7 +4154,7 @@ class ConfigProviders(Config):
# delete anything that is missing # delete anything that is missing
for curProvider in sickbeard.torrentRssProviderList: for curProvider in sickbeard.torrentRssProviderList:
if curProvider.getID() not in finishedNames: if curProvider.get_id() not in finishedNames:
sickbeard.torrentRssProviderList.remove(curProvider) sickbeard.torrentRssProviderList.remove(curProvider)
# do the enable/disable # do the enable/disable
@ -4163,7 +4163,7 @@ class ConfigProviders(Config):
curEnabled = config.to_int(curEnabled) curEnabled = config.to_int(curEnabled)
curProvObj = [x for x in sickbeard.providers.sortedProviderList() if curProvObj = [x for x in sickbeard.providers.sortedProviderList() if
x.getID() == curProvider and hasattr(x, 'enabled')] x.get_id() == curProvider and hasattr(x, 'enabled')]
if curProvObj: if curProvObj:
curProvObj[0].enabled = bool(curEnabled) curProvObj[0].enabled = bool(curEnabled)
@ -4179,31 +4179,31 @@ class ConfigProviders(Config):
if hasattr(curTorrentProvider, 'minseed'): if hasattr(curTorrentProvider, 'minseed'):
try: try:
curTorrentProvider.minseed = int(str(kwargs[curTorrentProvider.getID() + '_minseed']).strip()) curTorrentProvider.minseed = int(str(kwargs[curTorrentProvider.get_id() + '_minseed']).strip())
except: except:
curTorrentProvider.minseed = 0 curTorrentProvider.minseed = 0
if hasattr(curTorrentProvider, 'minleech'): if hasattr(curTorrentProvider, 'minleech'):
try: try:
curTorrentProvider.minleech = int(str(kwargs[curTorrentProvider.getID() + '_minleech']).strip()) curTorrentProvider.minleech = int(str(kwargs[curTorrentProvider.get_id() + '_minleech']).strip())
except: except:
curTorrentProvider.minleech = 0 curTorrentProvider.minleech = 0
if hasattr(curTorrentProvider, 'ratio'): if hasattr(curTorrentProvider, 'ratio'):
try: try:
curTorrentProvider.ratio = str(kwargs[curTorrentProvider.getID() + '_ratio']).strip() curTorrentProvider.ratio = str(kwargs[curTorrentProvider.get_id() + '_ratio']).strip()
except: except:
curTorrentProvider.ratio = None curTorrentProvider.ratio = None
if hasattr(curTorrentProvider, 'digest'): if hasattr(curTorrentProvider, 'digest'):
try: try:
curTorrentProvider.digest = str(kwargs[curTorrentProvider.getID() + '_digest']).strip() curTorrentProvider.digest = str(kwargs[curTorrentProvider.get_id() + '_digest']).strip()
except: except:
curTorrentProvider.digest = None curTorrentProvider.digest = None
if hasattr(curTorrentProvider, 'hash'): if hasattr(curTorrentProvider, 'hash'):
try: try:
key = str(kwargs[curTorrentProvider.getID() + '_hash']).strip() key = str(kwargs[curTorrentProvider.get_id() + '_hash']).strip()
if not starify(key, True): if not starify(key, True):
curTorrentProvider.hash = key curTorrentProvider.hash = key
except: except:
@ -4211,7 +4211,7 @@ class ConfigProviders(Config):
if hasattr(curTorrentProvider, 'api_key'): if hasattr(curTorrentProvider, 'api_key'):
try: try:
key = str(kwargs[curTorrentProvider.getID() + '_api_key']).strip() key = str(kwargs[curTorrentProvider.get_id() + '_api_key']).strip()
if not starify(key, True): if not starify(key, True):
curTorrentProvider.api_key = key curTorrentProvider.api_key = key
except: except:
@ -4219,13 +4219,13 @@ class ConfigProviders(Config):
if hasattr(curTorrentProvider, 'username'): if hasattr(curTorrentProvider, 'username'):
try: try:
curTorrentProvider.username = str(kwargs[curTorrentProvider.getID() + '_username']).strip() curTorrentProvider.username = str(kwargs[curTorrentProvider.get_id() + '_username']).strip()
except: except:
curTorrentProvider.username = None curTorrentProvider.username = None
if hasattr(curTorrentProvider, 'password'): if hasattr(curTorrentProvider, 'password'):
try: try:
key = str(kwargs[curTorrentProvider.getID() + '_password']).strip() key = str(kwargs[curTorrentProvider.get_id() + '_password']).strip()
if set('*') != set(key): if set('*') != set(key):
curTorrentProvider.password = key curTorrentProvider.password = key
except: except:
@ -4233,7 +4233,7 @@ class ConfigProviders(Config):
if hasattr(curTorrentProvider, 'passkey'): if hasattr(curTorrentProvider, 'passkey'):
try: try:
key = str(kwargs[curTorrentProvider.getID() + '_passkey']).strip() key = str(kwargs[curTorrentProvider.get_id() + '_passkey']).strip()
if not starify(key, True): if not starify(key, True):
curTorrentProvider.passkey = key curTorrentProvider.passkey = key
except: except:
@ -4242,54 +4242,54 @@ class ConfigProviders(Config):
if hasattr(curTorrentProvider, 'confirmed'): if hasattr(curTorrentProvider, 'confirmed'):
try: try:
curTorrentProvider.confirmed = config.checkbox_to_value( curTorrentProvider.confirmed = config.checkbox_to_value(
kwargs[curTorrentProvider.getID() + '_confirmed']) kwargs[curTorrentProvider.get_id() + '_confirmed'])
except: except:
curTorrentProvider.confirmed = 0 curTorrentProvider.confirmed = 0
if hasattr(curTorrentProvider, 'proxy'): if hasattr(curTorrentProvider, 'proxy'):
try: try:
curTorrentProvider.proxy.enabled = config.checkbox_to_value( curTorrentProvider.proxy.enabled = config.checkbox_to_value(
kwargs[curTorrentProvider.getID() + '_proxy']) kwargs[curTorrentProvider.get_id() + '_proxy'])
except: except:
curTorrentProvider.proxy.enabled = 0 curTorrentProvider.proxy.enabled = 0
if hasattr(curTorrentProvider.proxy, 'url'): if hasattr(curTorrentProvider.proxy, 'url'):
try: try:
curTorrentProvider.proxy.url = str(kwargs[curTorrentProvider.getID() + '_proxy_url']).strip() curTorrentProvider.proxy.url = str(kwargs[curTorrentProvider.get_id() + '_proxy_url']).strip()
except: except:
curTorrentProvider.proxy.url = None curTorrentProvider.proxy.url = None
if hasattr(curTorrentProvider, 'freeleech'): if hasattr(curTorrentProvider, 'freeleech'):
try: try:
curTorrentProvider.freeleech = config.checkbox_to_value( curTorrentProvider.freeleech = config.checkbox_to_value(
kwargs[curTorrentProvider.getID() + '_freeleech']) kwargs[curTorrentProvider.get_id() + '_freeleech'])
except: except:
curTorrentProvider.freeleech = 0 curTorrentProvider.freeleech = 0
if hasattr(curTorrentProvider, 'search_mode'): if hasattr(curTorrentProvider, 'search_mode'):
try: try:
curTorrentProvider.search_mode = str(kwargs[curTorrentProvider.getID() + '_search_mode']).strip() curTorrentProvider.search_mode = str(kwargs[curTorrentProvider.get_id() + '_search_mode']).strip()
except: except:
curTorrentProvider.search_mode = 'eponly' curTorrentProvider.search_mode = 'eponly'
if hasattr(curTorrentProvider, 'search_fallback'): if hasattr(curTorrentProvider, 'search_fallback'):
try: try:
curTorrentProvider.search_fallback = config.checkbox_to_value( curTorrentProvider.search_fallback = config.checkbox_to_value(
kwargs[curTorrentProvider.getID() + '_search_fallback']) kwargs[curTorrentProvider.get_id() + '_search_fallback'])
except: except:
curTorrentProvider.search_fallback = 0 # these exceptions are catching unselected checkboxes curTorrentProvider.search_fallback = 0 # these exceptions are catching unselected checkboxes
if hasattr(curTorrentProvider, 'enable_recentsearch'): if hasattr(curTorrentProvider, 'enable_recentsearch'):
try: try:
curTorrentProvider.enable_recentsearch = config.checkbox_to_value( curTorrentProvider.enable_recentsearch = config.checkbox_to_value(
kwargs[curTorrentProvider.getID() + '_enable_recentsearch']) kwargs[curTorrentProvider.get_id() + '_enable_recentsearch'])
except: except:
curTorrentProvider.enable_recentsearch = 0 # these exceptions are actually catching unselected checkboxes curTorrentProvider.enable_recentsearch = 0 # these exceptions are actually catching unselected checkboxes
if hasattr(curTorrentProvider, 'enable_backlog'): if hasattr(curTorrentProvider, 'enable_backlog'):
try: try:
curTorrentProvider.enable_backlog = config.checkbox_to_value( curTorrentProvider.enable_backlog = config.checkbox_to_value(
kwargs[curTorrentProvider.getID() + '_enable_backlog']) kwargs[curTorrentProvider.get_id() + '_enable_backlog'])
except: except:
curTorrentProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes curTorrentProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes
@ -4298,7 +4298,7 @@ class ConfigProviders(Config):
if hasattr(curNzbProvider, 'api_key'): if hasattr(curNzbProvider, 'api_key'):
try: try:
key = str(kwargs[curNzbProvider.getID() + '_api_key']).strip() key = str(kwargs[curNzbProvider.get_id() + '_api_key']).strip()
if not starify(key, True): if not starify(key, True):
curNzbProvider.api_key = key curNzbProvider.api_key = key
except: except:
@ -4306,34 +4306,34 @@ class ConfigProviders(Config):
if hasattr(curNzbProvider, 'username'): if hasattr(curNzbProvider, 'username'):
try: try:
curNzbProvider.username = str(kwargs[curNzbProvider.getID() + '_username']).strip() curNzbProvider.username = str(kwargs[curNzbProvider.get_id() + '_username']).strip()
except: except:
curNzbProvider.username = None curNzbProvider.username = None
if hasattr(curNzbProvider, 'search_mode'): if hasattr(curNzbProvider, 'search_mode'):
try: try:
curNzbProvider.search_mode = str(kwargs[curNzbProvider.getID() + '_search_mode']).strip() curNzbProvider.search_mode = str(kwargs[curNzbProvider.get_id() + '_search_mode']).strip()
except: except:
curNzbProvider.search_mode = 'eponly' curNzbProvider.search_mode = 'eponly'
if hasattr(curNzbProvider, 'search_fallback'): if hasattr(curNzbProvider, 'search_fallback'):
try: try:
curNzbProvider.search_fallback = config.checkbox_to_value( curNzbProvider.search_fallback = config.checkbox_to_value(
kwargs[curNzbProvider.getID() + '_search_fallback']) kwargs[curNzbProvider.get_id() + '_search_fallback'])
except: except:
curNzbProvider.search_fallback = 0 # these exceptions are actually catching unselected checkboxes curNzbProvider.search_fallback = 0 # these exceptions are actually catching unselected checkboxes
if hasattr(curNzbProvider, 'enable_recentsearch'): if hasattr(curNzbProvider, 'enable_recentsearch'):
try: try:
curNzbProvider.enable_recentsearch = config.checkbox_to_value( curNzbProvider.enable_recentsearch = config.checkbox_to_value(
kwargs[curNzbProvider.getID() + '_enable_recentsearch']) kwargs[curNzbProvider.get_id() + '_enable_recentsearch'])
except: except:
curNzbProvider.enable_recentsearch = 0 # these exceptions are actually catching unselected checkboxes curNzbProvider.enable_recentsearch = 0 # these exceptions are actually catching unselected checkboxes
if hasattr(curNzbProvider, 'enable_backlog'): if hasattr(curNzbProvider, 'enable_backlog'):
try: try:
curNzbProvider.enable_backlog = config.checkbox_to_value( curNzbProvider.enable_backlog = config.checkbox_to_value(
kwargs[curNzbProvider.getID() + '_enable_backlog']) kwargs[curNzbProvider.get_id() + '_enable_backlog'])
except: except:
curNzbProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes curNzbProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes

View file

@ -59,7 +59,7 @@ class SearchTest(test.SickbeardTestDBCase):
def __init__(self, something): def __init__(self, something):
for provider in sickbeard.providers.sortedProviderList(): for provider in sickbeard.providers.sortedProviderList():
provider.getURL = self._fake_getURL provider.get_url = self._fake_getURL
#provider.isActive = self._fake_isActive #provider.isActive = self._fake_isActive
super(SearchTest, self).__init__(something) super(SearchTest, self).__init__(something)