Merge pull request #549 from JackDandy/feature/ChangeProviders
Change refactor and tidy providers.
26
CHANGES.md
|
@ -42,8 +42,12 @@
|
||||||
* Change default de-referrer url to blank
|
* Change default de-referrer url to blank
|
||||||
* Change javascript urls in templates to allow proper caching
|
* Change javascript urls in templates to allow proper caching
|
||||||
* Change downloads to prevent cache misfiring with "Result is not a valid torrent file"
|
* Change downloads to prevent cache misfiring with "Result is not a valid torrent file"
|
||||||
|
* Add BitMeTV torrent provider
|
||||||
* Add Torrenting provider
|
* Add Torrenting provider
|
||||||
* Add FunFile torrent provider
|
* Add FunFile torrent provider
|
||||||
|
* Add TVChaosUK torrent provider
|
||||||
|
* Add HD-Space torrent provider
|
||||||
|
* Add Shazbat torrent provider
|
||||||
* Remove unnecessary call to indexers during nameparsing
|
* Remove unnecessary call to indexers during nameparsing
|
||||||
* Change disable ToTV due to non-deletable yet reported hacker BTC inbox scam and also little to no new content listings
|
* Change disable ToTV due to non-deletable yet reported hacker BTC inbox scam and also little to no new content listings
|
||||||
* Fix Episode View KeyError: 'state-title' failure for shows without a runtime
|
* Fix Episode View KeyError: 'state-title' failure for shows without a runtime
|
||||||
|
@ -53,8 +57,7 @@
|
||||||
* Fix add existing shows from folders that contain a plus char
|
* Fix add existing shows from folders that contain a plus char
|
||||||
* Fix post process issue where items in history were processed out of turn
|
* Fix post process issue where items in history were processed out of turn
|
||||||
* Change increase frequency of updating show data
|
* Change increase frequency of updating show data
|
||||||
* Remove FreshOnTV (TvT) torrent provider
|
* Remove Animenzb provider
|
||||||
* Remove Strike torrent provider
|
|
||||||
* Change increase the scope and number of non release group text that is identified and removed
|
* Change increase the scope and number of non release group text that is identified and removed
|
||||||
* Add a general config setting to allow adding incomplete show data
|
* Add a general config setting to allow adding incomplete show data
|
||||||
* Change to throttle connection rate on thread initiation for adba library
|
* Change to throttle connection rate on thread initiation for adba library
|
||||||
|
@ -74,12 +77,21 @@
|
||||||
* Add IMDb Popular to Add Show page
|
* Add IMDb Popular to Add Show page
|
||||||
* Add version to anime renaming pattern
|
* Add version to anime renaming pattern
|
||||||
* Add Code Climate configuration files
|
* Add Code Climate configuration files
|
||||||
* Change to move init-scripts to single folder
|
* Change move init-scripts to single folder
|
||||||
* Change sickbeard variables to sickgear variables in init-scripts
|
* Change sickbeard variables to sickgear variables in init-scripts
|
||||||
* Change improve the use of multiple plex servers
|
* Change improve the use of multiple plex servers
|
||||||
* Change to move JS code out of home template and into dedicated file
|
* Change move JS code out of home template and into dedicated file
|
||||||
* Change to remove branch from window title
|
* Change remove branch from window title
|
||||||
* Change to move JS code out of inc_top template and into dedicated file
|
* Change move JS code out of inc_top template and into dedicated file
|
||||||
|
* Change cleanup torrent providers
|
||||||
|
* Change utilise tvdbid for searching usenet providers
|
||||||
|
* Add setting to provider BTN to Reject Blu-ray M2TS releases
|
||||||
|
* Remove jsonrpclib library
|
||||||
|
* Change consolidate global and per show ignore and require words functions
|
||||||
|
* Change "Require word" title and notes on Config Search page to properly describe its functional logic
|
||||||
|
* Add regular expression capability to ignore and require words by starting wordlist with "regex:"
|
||||||
|
* Add list shows with custom ignore and require words under the global counterparts on the Search Settings page
|
||||||
|
* Fix failure to search for more than one selected wanted episode
|
||||||
* Add notice for users with Python 2.7.8 or below to update to latest Python
|
* Add notice for users with Python 2.7.8 or below to update to latest Python
|
||||||
|
|
||||||
[develop changelog]
|
[develop changelog]
|
||||||
|
@ -87,6 +99,8 @@
|
||||||
* Add ability to parse command line output from unix unrar version 4 and below
|
* Add ability to parse command line output from unix unrar version 4 and below
|
||||||
* Fix show search box on non-poster show list views
|
* Fix show search box on non-poster show list views
|
||||||
* Fix removal of non-release groups such that anime qualities are not trimmed from name
|
* Fix removal of non-release groups such that anime qualities are not trimmed from name
|
||||||
|
* Change readd Strike torrent provider
|
||||||
|
* Change readd FreshOnTV (TvT) torrent provider
|
||||||
|
|
||||||
|
|
||||||
### 0.10.0 (2015-08-06 11:05:00 UTC)
|
### 0.10.0 (2015-08-06 11:05:00 UTC)
|
||||||
|
|
|
@ -340,7 +340,7 @@ class SickGear(object):
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
if sickbeard.LAUNCH_BROWSER and not self.runAsDaemon:
|
if sickbeard.LAUNCH_BROWSER and not self.runAsDaemon:
|
||||||
logger.log(u'Launching browser and exiting', logger.ERROR)
|
logger.log(u'Launching browser and exiting', logger.ERROR)
|
||||||
sickbeard.launchBrowser(self.startPort)
|
sickbeard.launch_browser(self.startPort)
|
||||||
os._exit(1)
|
os._exit(1)
|
||||||
|
|
||||||
# Check if we need to perform a restore first
|
# Check if we need to perform a restore first
|
||||||
|
@ -377,7 +377,7 @@ class SickGear(object):
|
||||||
|
|
||||||
# Launch browser
|
# Launch browser
|
||||||
if sickbeard.LAUNCH_BROWSER and not (self.noLaunch or self.runAsDaemon):
|
if sickbeard.LAUNCH_BROWSER and not (self.noLaunch or self.runAsDaemon):
|
||||||
sickbeard.launchBrowser(self.startPort)
|
sickbeard.launch_browser(self.startPort)
|
||||||
|
|
||||||
# main loop
|
# main loop
|
||||||
while True:
|
while True:
|
||||||
|
@ -488,7 +488,7 @@ class SickGear(object):
|
||||||
sickbeard.halt()
|
sickbeard.halt()
|
||||||
|
|
||||||
# save all shows to DB
|
# save all shows to DB
|
||||||
sickbeard.saveAll()
|
sickbeard.save_all()
|
||||||
|
|
||||||
# shutdown web server
|
# shutdown web server
|
||||||
if self.webserver:
|
if self.webserver:
|
||||||
|
|
Before Width: | Height: | Size: 726 B |
BIN
gui/slick/images/providers/bitmetv.png
Normal file
After Width: | Height: | Size: 1.3 KiB |
BIN
gui/slick/images/providers/freshontv.png
Normal file
After Width: | Height: | Size: 3.5 KiB |
BIN
gui/slick/images/providers/hdspace.png
Normal file
After Width: | Height: | Size: 752 B |
BIN
gui/slick/images/providers/shazbat.png
Normal file
After Width: | Height: | Size: 557 B |
BIN
gui/slick/images/providers/strike.png
Normal file
After Width: | Height: | Size: 417 B |
BIN
gui/slick/images/providers/tvchaosuk.png
Normal file
After Width: | Height: | Size: 1,002 B |
|
@ -11,9 +11,9 @@
|
||||||
#import os.path
|
#import os.path
|
||||||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
|
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
|
||||||
|
|
||||||
#if $varExists('header')
|
#if $varExists('header')
|
||||||
<h1 class="header">$header</h1>
|
<h1 class="header">$header</h1>
|
||||||
#else
|
#else
|
||||||
<h1 class="title">$title</h1>
|
<h1 class="title">$title</h1>
|
||||||
#end if
|
#end if
|
||||||
|
|
||||||
|
@ -281,7 +281,7 @@
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
#set $field_name = curNzbProvider.get_id() + '_api_key'
|
#set $field_name = curNzbProvider.get_id() + '_api_key'
|
||||||
<input type="text" name="$field_name" value="<%= starify(curNzbProvider.api_key) %>" class="form-control input-sm input350" />
|
<input type="text" name="$field_name" value="<%= starify(curNzbProvider.api_key) %>" class="form-control input-sm input350" />
|
||||||
#if callable(getattr(curNzbProvider, 'ui_string'))
|
#if callable(getattr(curNzbProvider, 'ui_string', None))
|
||||||
<div class="clear-left"><p>${curNzbProvider.ui_string($field_name)}</p></div>
|
<div class="clear-left"><p>${curNzbProvider.ui_string($field_name)}</p></div>
|
||||||
#end if
|
#end if
|
||||||
</span>
|
</span>
|
||||||
|
@ -347,6 +347,17 @@
|
||||||
##
|
##
|
||||||
#for $curTorrentProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.TORRENT]:
|
#for $curTorrentProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.TORRENT]:
|
||||||
<div class="providerDiv" id="${curTorrentProvider.get_id()}Div">
|
<div class="providerDiv" id="${curTorrentProvider.get_id()}Div">
|
||||||
|
#if callable(getattr(curTorrentProvider, 'ui_string', None))
|
||||||
|
#set $field_name = curTorrentProvider.get_id() + '_tip'
|
||||||
|
#set $tip_text = curTorrentProvider.ui_string($field_name)
|
||||||
|
#if $tip_text
|
||||||
|
<div class="field-pair">
|
||||||
|
<span class="component-desc" style="margin:0;width:100%">
|
||||||
|
<div class="clear-left"><p class="grey-text"><span class="red-text">Important! ${curTorrentProvider.name}</span> $tip_text</p></div>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
#end if
|
||||||
|
#end if
|
||||||
#if $hasattr($curTorrentProvider, 'api_key'):
|
#if $hasattr($curTorrentProvider, 'api_key'):
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<label for="${curTorrentProvider.get_id()}_api_key">
|
<label for="${curTorrentProvider.get_id()}_api_key">
|
||||||
|
@ -364,7 +375,7 @@
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
#set $field_name = curTorrentProvider.get_id() + '_digest'
|
#set $field_name = curTorrentProvider.get_id() + '_digest'
|
||||||
<input type="text" name="$field_name" id="$field_name" value="<%= starify(curTorrentProvider.digest) %>" class="form-control input-sm input350" />
|
<input type="text" name="$field_name" id="$field_name" value="<%= starify(curTorrentProvider.digest) %>" class="form-control input-sm input350" />
|
||||||
#if callable(getattr(curTorrentProvider, 'ui_string'))
|
#if callable(getattr(curTorrentProvider, 'ui_string', None))
|
||||||
<div class="clear-left"><p>${curTorrentProvider.ui_string($field_name)}</p></div>
|
<div class="clear-left"><p>${curTorrentProvider.ui_string($field_name)}</p></div>
|
||||||
#end if
|
#end if
|
||||||
</span>
|
</span>
|
||||||
|
@ -396,7 +407,7 @@
|
||||||
<label for="${curTorrentProvider.get_id()}_password">
|
<label for="${curTorrentProvider.get_id()}_password">
|
||||||
<span class="component-title">Password:</span>
|
<span class="component-title">Password:</span>
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
<input type="password" name="${curTorrentProvider.get_id()}_password" id="${curTorrentProvider.get_id()}_password" value="#echo '*' * len($curTorrentProvider.password)#" class="form-control input-sm input350" />
|
<input type="password" name="${curTorrentProvider.get_id()}_password" id="${curTorrentProvider.get_id()}_password" value="#echo $curTorrentProvider.password and '*' * len($curTorrentProvider.password) or ''#" class="form-control input-sm input350" />
|
||||||
</span>
|
</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
@ -419,7 +430,7 @@
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
<input type="number" name="${curTorrentProvider.get_id()}_ratio" id="${curTorrentProvider.get_id()}_ratio" value="$curTorrentProvider._seed_ratio" class="form-control input-sm input75" />
|
<input type="number" name="${curTorrentProvider.get_id()}_ratio" id="${curTorrentProvider.get_id()}_ratio" value="$curTorrentProvider._seed_ratio" class="form-control input-sm input75" />
|
||||||
<p>this ratio is requested of each item sent to $torrent_method_text[$sickbeard.TORRENT_METHOD]</p>
|
<p>this ratio is requested of each item sent to $torrent_method_text[$sickbeard.TORRENT_METHOD]</p>
|
||||||
<div class="clear-left"><p>(set -1 to seed forever, or leave blank for the $torrent_method_text[$sickbeard.TORRENT_METHOD] setting)</p></div>
|
<div class="clear-left"><p>(#if 'Transmission' in $torrent_method_text[$sickbeard.TORRENT_METHOD]#set -1 to seed forever, or #end if#leave blank for the $torrent_method_text[$sickbeard.TORRENT_METHOD] setting)</p></div>
|
||||||
</span>
|
</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
@ -507,6 +518,17 @@
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
#end if
|
#end if
|
||||||
|
#if $hasattr($curTorrentProvider, 'reject_m2ts'):
|
||||||
|
<div class="field-pair">
|
||||||
|
<label for="${curTorrentProvider.get_id()}_reject_m2ts">
|
||||||
|
<span class="component-title">Reject Blu-ray M2TS releases</span>
|
||||||
|
<span class="component-desc">
|
||||||
|
<input type="checkbox" name="${curTorrentProvider.get_id()}_reject_m2ts" id="${curTorrentProvider.get_id()}_reject_m2ts" <%= html_checked if curTorrentProvider.reject_m2ts else '' %>/>
|
||||||
|
<p>enable to ignore Blu-ray MPEG-2 Transport Stream container releases</p>
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
#end if
|
||||||
#if $hasattr($curTorrentProvider, 'enable_recentsearch') and $curTorrentProvider.supportsBacklog:
|
#if $hasattr($curTorrentProvider, 'enable_recentsearch') and $curTorrentProvider.supportsBacklog:
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<label for="${curTorrentProvider.get_id()}_enable_recentsearch">
|
<label for="${curTorrentProvider.get_id()}_enable_recentsearch">
|
||||||
|
@ -725,4 +747,4 @@
|
||||||
//-->
|
//-->
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')
|
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')
|
||||||
|
|
|
@ -113,18 +113,42 @@
|
||||||
<label>
|
<label>
|
||||||
<span class="component-title">Ignore result with any word</span>
|
<span class="component-title">Ignore result with any word</span>
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
<input type="text" name="ignore_words" value="$sickbeard.IGNORE_WORDS" class="form-control input-sm input350">
|
<input type="text" name="ignore_words" value="$sickbeard.IGNORE_WORDS" class="form-control input-sm input350"><p>(opt: start "regex:")</p>
|
||||||
<p class="clear-left note">ignore search result <em class="grey-text">if its title contains any</em> of these comma seperated words</p>
|
<p class="clear-left note">ignore search result <em class="grey-text">if its title contains any</em> of these comma seperated words</p>
|
||||||
</span>
|
</span>
|
||||||
|
<span class="component-title">Shows with custom ignores</span>
|
||||||
|
<span class="component-desc">
|
||||||
|
#set $shows = []
|
||||||
|
#for $show in $using_rls_ignore_words
|
||||||
|
#set void = $shows.append('<a href="%s/home/editShow?show=%s" style="vertical-align:middle">%s</a>' % ($sbRoot, $show[0], $show[1]))
|
||||||
|
#end for
|
||||||
|
#if len($using_rls_ignore_words)
|
||||||
|
<p style="line-height:1.2em;margin-top:6px">#echo ', '.join($shows)#</p>
|
||||||
|
#else
|
||||||
|
<p style="line-height:1.2em;margin-top:7px">...will list here when in use</p>
|
||||||
|
#end if
|
||||||
|
</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<label>
|
<label>
|
||||||
<span class="component-title">Require at least one word</span>
|
<span class="component-title">Require all these words</span>
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
<input type="text" name="require_words" value="$sickbeard.REQUIRE_WORDS" class="form-control input-sm input350">
|
<input type="text" name="require_words" value="$sickbeard.REQUIRE_WORDS" class="form-control input-sm input350"><p>(opt: start "regex:")</p>
|
||||||
<p class="clear-left note">ignore search result <em class="grey-text">unless its title contains one</em> of these comma seperated words</p>
|
<p class="clear-left note">ignore search result <em class="grey-text">unless its title contains all</em> of these comma seperated words</p>
|
||||||
|
</span>
|
||||||
|
<span class="component-title">Shows with custom requires</span>
|
||||||
|
<span class="component-desc">
|
||||||
|
#set $shows = []
|
||||||
|
#for $show in $using_rls_require_words
|
||||||
|
#set void = $shows.append('<a href="%s/home/editShow?show=%s" style="vertical-align:middle">%s</a>' % ($sbRoot, $show[0], $show[1]))
|
||||||
|
#end for
|
||||||
|
#if len($using_rls_require_words)
|
||||||
|
<p style="line-height:1.2em;margin-top:6px">#echo ', '.join($shows)#</p>
|
||||||
|
#else
|
||||||
|
<p style="line-height:1.2em;margin-top:7px">...will list here when in use</p>
|
||||||
|
#end if
|
||||||
</span>
|
</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -125,8 +125,8 @@
|
||||||
<span class="component-title">Ignore result with any word</span>
|
<span class="component-title">Ignore result with any word</span>
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
<input type="text" name="rls_ignore_words" id="rls_ignore_words" value="$show.rls_ignore_words" class="form-control form-control-inline input-sm input350">
|
<input type="text" name="rls_ignore_words" id="rls_ignore_words" value="$show.rls_ignore_words" class="form-control form-control-inline input-sm input350">
|
||||||
<p>e.g. [word1,word2, ... ,word_n]</p>
|
<p>e.g. [[regex:]word1, word2, ..., word_n, regex_n]</p>
|
||||||
<p class="note">ignore search result <em class="grey-text">if its title contains any</em> of these comma seperated words</p>
|
<p class="note">ignore search result <em class="grey-text">if its title contains any</em> of these comma seperated words or regular expressions</p>
|
||||||
</span>
|
</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
@ -136,8 +136,8 @@
|
||||||
<span class="component-title">Require at least one word</span>
|
<span class="component-title">Require at least one word</span>
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
<input type="text" name="rls_require_words" id="rls_require_words" value="$show.rls_require_words" class="form-control form-control-inline input-sm input350">
|
<input type="text" name="rls_require_words" id="rls_require_words" value="$show.rls_require_words" class="form-control form-control-inline input-sm input350">
|
||||||
<p>e.g. [word1,word2, ... ,word_n]</p>
|
<p>e.g. [[regex:]word1, word2, ..., word_n, regex_n]</p>
|
||||||
<p class="note">ignore search result <em class="grey-text">unless its title contains one</em> of these comma seperated words</p>
|
<p class="note">ignore search result <em class="grey-text">unless its title contains one</em> of these comma seperated words or regular expressions</p>
|
||||||
</span>
|
</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
@ -276,7 +276,7 @@
|
||||||
var scene_ex = \$('#SceneName').val()
|
var scene_ex = \$('#SceneName').val()
|
||||||
var option = \$('<option>')
|
var option = \$('<option>')
|
||||||
all_exceptions = []
|
all_exceptions = []
|
||||||
|
|
||||||
\$('#exceptions_list option').each ( function() {
|
\$('#exceptions_list option').each ( function() {
|
||||||
all_exceptions.push( \$(this).val() )
|
all_exceptions.push( \$(this).val() )
|
||||||
});
|
});
|
||||||
|
@ -295,7 +295,7 @@
|
||||||
|
|
||||||
\$('#removeSceneName').click(function() {
|
\$('#removeSceneName').click(function() {
|
||||||
\$('#exceptions_list option:selected').remove();
|
\$('#exceptions_list option:selected').remove();
|
||||||
|
|
||||||
\$(this).toggle_SceneException()
|
\$(this).toggle_SceneException()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -317,4 +317,4 @@
|
||||||
</script>
|
</script>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')
|
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')
|
||||||
|
|
|
@ -3,8 +3,8 @@
|
||||||
#from lib import subliminal
|
#from lib import subliminal
|
||||||
#from sickbeard import common
|
#from sickbeard import common
|
||||||
##
|
##
|
||||||
#set global $title = 'Episode Overview'
|
#set global $title = 'Missing Subtitles'
|
||||||
#set global $header = 'Episode Overview'
|
#set global $header = 'Missing Subtitles'
|
||||||
#set global $sbPath = '..'
|
#set global $sbPath = '..'
|
||||||
#set global $topmenu = 'manage'
|
#set global $topmenu = 'manage'
|
||||||
##
|
##
|
||||||
|
@ -12,9 +12,9 @@
|
||||||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
|
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
|
||||||
|
|
||||||
<div id="content960">
|
<div id="content960">
|
||||||
#if $varExists('header')
|
#if $varExists('header')
|
||||||
<h1 class="header">$header</h1>
|
<h1 class="header">$header</h1>
|
||||||
#else
|
#else
|
||||||
<h1 class="title">$title</h1>
|
<h1 class="title">$title</h1>
|
||||||
#end if
|
#end if
|
||||||
##
|
##
|
||||||
|
@ -64,4 +64,4 @@
|
||||||
</form>
|
</form>
|
||||||
#end if
|
#end if
|
||||||
</div>
|
</div>
|
||||||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')
|
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')
|
||||||
|
|
|
@ -1,229 +0,0 @@
|
||||||
import jsonrpclib
|
|
||||||
from jsonrpclib import Fault
|
|
||||||
from jsonrpclib.jsonrpc import USE_UNIX_SOCKETS
|
|
||||||
import SimpleXMLRPCServer
|
|
||||||
import SocketServer
|
|
||||||
import socket
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import types
|
|
||||||
import traceback
|
|
||||||
import sys
|
|
||||||
try:
|
|
||||||
import fcntl
|
|
||||||
except ImportError:
|
|
||||||
# For Windows
|
|
||||||
fcntl = None
|
|
||||||
|
|
||||||
def get_version(request):
|
|
||||||
# must be a dict
|
|
||||||
if 'jsonrpc' in request.keys():
|
|
||||||
return 2.0
|
|
||||||
if 'id' in request.keys():
|
|
||||||
return 1.0
|
|
||||||
return None
|
|
||||||
|
|
||||||
def validate_request(request):
|
|
||||||
if type(request) is not types.DictType:
|
|
||||||
fault = Fault(
|
|
||||||
-32600, 'Request must be {}, not %s.' % type(request)
|
|
||||||
)
|
|
||||||
return fault
|
|
||||||
rpcid = request.get('id', None)
|
|
||||||
version = get_version(request)
|
|
||||||
if not version:
|
|
||||||
fault = Fault(-32600, 'Request %s invalid.' % request, rpcid=rpcid)
|
|
||||||
return fault
|
|
||||||
request.setdefault('params', [])
|
|
||||||
method = request.get('method', None)
|
|
||||||
params = request.get('params')
|
|
||||||
param_types = (types.ListType, types.DictType, types.TupleType)
|
|
||||||
if not method or type(method) not in types.StringTypes or \
|
|
||||||
type(params) not in param_types:
|
|
||||||
fault = Fault(
|
|
||||||
-32600, 'Invalid request parameters or method.', rpcid=rpcid
|
|
||||||
)
|
|
||||||
return fault
|
|
||||||
return True
|
|
||||||
|
|
||||||
class SimpleJSONRPCDispatcher(SimpleXMLRPCServer.SimpleXMLRPCDispatcher):
|
|
||||||
|
|
||||||
def __init__(self, encoding=None):
|
|
||||||
SimpleXMLRPCServer.SimpleXMLRPCDispatcher.__init__(self,
|
|
||||||
allow_none=True,
|
|
||||||
encoding=encoding)
|
|
||||||
|
|
||||||
def _marshaled_dispatch(self, data, dispatch_method = None):
|
|
||||||
response = None
|
|
||||||
try:
|
|
||||||
request = jsonrpclib.loads(data)
|
|
||||||
except Exception, e:
|
|
||||||
fault = Fault(-32700, 'Request %s invalid. (%s)' % (data, e))
|
|
||||||
response = fault.response()
|
|
||||||
return response
|
|
||||||
if not request:
|
|
||||||
fault = Fault(-32600, 'Request invalid -- no request data.')
|
|
||||||
return fault.response()
|
|
||||||
if type(request) is types.ListType:
|
|
||||||
# This SHOULD be a batch, by spec
|
|
||||||
responses = []
|
|
||||||
for req_entry in request:
|
|
||||||
result = validate_request(req_entry)
|
|
||||||
if type(result) is Fault:
|
|
||||||
responses.append(result.response())
|
|
||||||
continue
|
|
||||||
resp_entry = self._marshaled_single_dispatch(req_entry)
|
|
||||||
if resp_entry is not None:
|
|
||||||
responses.append(resp_entry)
|
|
||||||
if len(responses) > 0:
|
|
||||||
response = '[%s]' % ','.join(responses)
|
|
||||||
else:
|
|
||||||
response = ''
|
|
||||||
else:
|
|
||||||
result = validate_request(request)
|
|
||||||
if type(result) is Fault:
|
|
||||||
return result.response()
|
|
||||||
response = self._marshaled_single_dispatch(request)
|
|
||||||
return response
|
|
||||||
|
|
||||||
def _marshaled_single_dispatch(self, request):
|
|
||||||
# TODO - Use the multiprocessing and skip the response if
|
|
||||||
# it is a notification
|
|
||||||
# Put in support for custom dispatcher here
|
|
||||||
# (See SimpleXMLRPCServer._marshaled_dispatch)
|
|
||||||
method = request.get('method')
|
|
||||||
params = request.get('params')
|
|
||||||
try:
|
|
||||||
response = self._dispatch(method, params)
|
|
||||||
except:
|
|
||||||
exc_type, exc_value, exc_tb = sys.exc_info()
|
|
||||||
fault = Fault(-32603, '%s:%s' % (exc_type, exc_value))
|
|
||||||
return fault.response()
|
|
||||||
if 'id' not in request.keys() or request['id'] == None:
|
|
||||||
# It's a notification
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
response = jsonrpclib.dumps(response,
|
|
||||||
methodresponse=True,
|
|
||||||
rpcid=request['id']
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
except:
|
|
||||||
exc_type, exc_value, exc_tb = sys.exc_info()
|
|
||||||
fault = Fault(-32603, '%s:%s' % (exc_type, exc_value))
|
|
||||||
return fault.response()
|
|
||||||
|
|
||||||
def _dispatch(self, method, params):
|
|
||||||
func = None
|
|
||||||
try:
|
|
||||||
func = self.funcs[method]
|
|
||||||
except KeyError:
|
|
||||||
if self.instance is not None:
|
|
||||||
if hasattr(self.instance, '_dispatch'):
|
|
||||||
return self.instance._dispatch(method, params)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
func = SimpleXMLRPCServer.resolve_dotted_attribute(
|
|
||||||
self.instance,
|
|
||||||
method,
|
|
||||||
True
|
|
||||||
)
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
if func is not None:
|
|
||||||
try:
|
|
||||||
if type(params) is types.ListType:
|
|
||||||
response = func(*params)
|
|
||||||
else:
|
|
||||||
response = func(**params)
|
|
||||||
return response
|
|
||||||
except TypeError:
|
|
||||||
return Fault(-32602, 'Invalid parameters.')
|
|
||||||
except:
|
|
||||||
err_lines = traceback.format_exc().splitlines()
|
|
||||||
trace_string = '%s | %s' % (err_lines[-3], err_lines[-1])
|
|
||||||
fault = jsonrpclib.Fault(-32603, 'Server error: %s' %
|
|
||||||
trace_string)
|
|
||||||
return fault
|
|
||||||
else:
|
|
||||||
return Fault(-32601, 'Method %s not supported.' % method)
|
|
||||||
|
|
||||||
class SimpleJSONRPCRequestHandler(
|
|
||||||
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
|
|
||||||
|
|
||||||
def do_POST(self):
|
|
||||||
if not self.is_rpc_path_valid():
|
|
||||||
self.report_404()
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
max_chunk_size = 10*1024*1024
|
|
||||||
size_remaining = int(self.headers["content-length"])
|
|
||||||
L = []
|
|
||||||
while size_remaining:
|
|
||||||
chunk_size = min(size_remaining, max_chunk_size)
|
|
||||||
L.append(self.rfile.read(chunk_size))
|
|
||||||
size_remaining -= len(L[-1])
|
|
||||||
data = ''.join(L)
|
|
||||||
response = self.server._marshaled_dispatch(data)
|
|
||||||
self.send_response(200)
|
|
||||||
except Exception, e:
|
|
||||||
self.send_response(500)
|
|
||||||
err_lines = traceback.format_exc().splitlines()
|
|
||||||
trace_string = '%s | %s' % (err_lines[-3], err_lines[-1])
|
|
||||||
fault = jsonrpclib.Fault(-32603, 'Server error: %s' % trace_string)
|
|
||||||
response = fault.response()
|
|
||||||
if response == None:
|
|
||||||
response = ''
|
|
||||||
self.send_header("Content-type", "application/json-rpc")
|
|
||||||
self.send_header("Content-length", str(len(response)))
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(response)
|
|
||||||
self.wfile.flush()
|
|
||||||
self.connection.shutdown(1)
|
|
||||||
|
|
||||||
class SimpleJSONRPCServer(SocketServer.TCPServer, SimpleJSONRPCDispatcher):
|
|
||||||
|
|
||||||
allow_reuse_address = True
|
|
||||||
|
|
||||||
def __init__(self, addr, requestHandler=SimpleJSONRPCRequestHandler,
|
|
||||||
logRequests=True, encoding=None, bind_and_activate=True,
|
|
||||||
address_family=socket.AF_INET):
|
|
||||||
self.logRequests = logRequests
|
|
||||||
SimpleJSONRPCDispatcher.__init__(self, encoding)
|
|
||||||
# TCPServer.__init__ has an extra parameter on 2.6+, so
|
|
||||||
# check Python version and decide on how to call it
|
|
||||||
vi = sys.version_info
|
|
||||||
self.address_family = address_family
|
|
||||||
if USE_UNIX_SOCKETS and address_family == socket.AF_UNIX:
|
|
||||||
# Unix sockets can't be bound if they already exist in the
|
|
||||||
# filesystem. The convention of e.g. X11 is to unlink
|
|
||||||
# before binding again.
|
|
||||||
if os.path.exists(addr):
|
|
||||||
try:
|
|
||||||
os.unlink(addr)
|
|
||||||
except OSError:
|
|
||||||
logging.warning("Could not unlink socket %s", addr)
|
|
||||||
# if python 2.5 and lower
|
|
||||||
if vi[0] < 3 and vi[1] < 6:
|
|
||||||
SocketServer.TCPServer.__init__(self, addr, requestHandler)
|
|
||||||
else:
|
|
||||||
SocketServer.TCPServer.__init__(self, addr, requestHandler,
|
|
||||||
bind_and_activate)
|
|
||||||
if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'):
|
|
||||||
flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD)
|
|
||||||
flags |= fcntl.FD_CLOEXEC
|
|
||||||
fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
|
|
||||||
|
|
||||||
class CGIJSONRPCRequestHandler(SimpleJSONRPCDispatcher):
|
|
||||||
|
|
||||||
def __init__(self, encoding=None):
|
|
||||||
SimpleJSONRPCDispatcher.__init__(self, encoding)
|
|
||||||
|
|
||||||
def handle_jsonrpc(self, request_text):
|
|
||||||
response = self._marshaled_dispatch(request_text)
|
|
||||||
print 'Content-Type: application/json-rpc'
|
|
||||||
print 'Content-Length: %d' % len(response)
|
|
||||||
print
|
|
||||||
sys.stdout.write(response)
|
|
||||||
|
|
||||||
handle_xmlrpc = handle_jsonrpc
|
|
|
@ -1,6 +0,0 @@
|
||||||
from jsonrpclib.config import Config
|
|
||||||
config = Config.instance()
|
|
||||||
from jsonrpclib.history import History
|
|
||||||
history = History.instance()
|
|
||||||
from jsonrpclib.jsonrpc import Server, MultiCall, Fault
|
|
||||||
from jsonrpclib.jsonrpc import ProtocolError, loads, dumps
|
|
|
@ -1,38 +0,0 @@
|
||||||
import sys
|
|
||||||
|
|
||||||
class LocalClasses(dict):
|
|
||||||
def add(self, cls):
|
|
||||||
self[cls.__name__] = cls
|
|
||||||
|
|
||||||
class Config(object):
|
|
||||||
"""
|
|
||||||
This is pretty much used exclusively for the 'jsonclass'
|
|
||||||
functionality... set use_jsonclass to False to turn it off.
|
|
||||||
You can change serialize_method and ignore_attribute, or use
|
|
||||||
the local_classes.add(class) to include "local" classes.
|
|
||||||
"""
|
|
||||||
use_jsonclass = True
|
|
||||||
# Change to False to keep __jsonclass__ entries raw.
|
|
||||||
serialize_method = '_serialize'
|
|
||||||
# The serialize_method should be a string that references the
|
|
||||||
# method on a custom class object which is responsible for
|
|
||||||
# returning a tuple of the constructor arguments and a dict of
|
|
||||||
# attributes.
|
|
||||||
ignore_attribute = '_ignore'
|
|
||||||
# The ignore attribute should be a string that references the
|
|
||||||
# attribute on a custom class object which holds strings and / or
|
|
||||||
# references of the attributes the class translator should ignore.
|
|
||||||
classes = LocalClasses()
|
|
||||||
# The list of classes to use for jsonclass translation.
|
|
||||||
version = 2.0
|
|
||||||
# Version of the JSON-RPC spec to support
|
|
||||||
user_agent = 'jsonrpclib/0.1 (Python %s)' % \
|
|
||||||
'.'.join([str(ver) for ver in sys.version_info[0:3]])
|
|
||||||
# User agent to use for calls.
|
|
||||||
_instance = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def instance(cls):
|
|
||||||
if not cls._instance:
|
|
||||||
cls._instance = cls()
|
|
||||||
return cls._instance
|
|
|
@ -1,40 +0,0 @@
|
||||||
class History(object):
|
|
||||||
"""
|
|
||||||
This holds all the response and request objects for a
|
|
||||||
session. A server using this should call "clear" after
|
|
||||||
each request cycle in order to keep it from clogging
|
|
||||||
memory.
|
|
||||||
"""
|
|
||||||
requests = []
|
|
||||||
responses = []
|
|
||||||
_instance = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def instance(cls):
|
|
||||||
if not cls._instance:
|
|
||||||
cls._instance = cls()
|
|
||||||
return cls._instance
|
|
||||||
|
|
||||||
def add_response(self, response_obj):
|
|
||||||
self.responses.append(response_obj)
|
|
||||||
|
|
||||||
def add_request(self, request_obj):
|
|
||||||
self.requests.append(request_obj)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def request(self):
|
|
||||||
if len(self.requests) == 0:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
return self.requests[-1]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def response(self):
|
|
||||||
if len(self.responses) == 0:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
return self.responses[-1]
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
del self.requests[:]
|
|
||||||
del self.responses[:]
|
|
|
@ -1,152 +0,0 @@
|
||||||
import types
|
|
||||||
import inspect
|
|
||||||
import re
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from jsonrpclib import config
|
|
||||||
|
|
||||||
iter_types = [
|
|
||||||
types.DictType,
|
|
||||||
types.ListType,
|
|
||||||
types.TupleType
|
|
||||||
]
|
|
||||||
|
|
||||||
string_types = [
|
|
||||||
types.StringType,
|
|
||||||
types.UnicodeType
|
|
||||||
]
|
|
||||||
|
|
||||||
numeric_types = [
|
|
||||||
types.IntType,
|
|
||||||
types.LongType,
|
|
||||||
types.FloatType
|
|
||||||
]
|
|
||||||
|
|
||||||
value_types = [
|
|
||||||
types.BooleanType,
|
|
||||||
types.NoneType
|
|
||||||
]
|
|
||||||
|
|
||||||
supported_types = iter_types+string_types+numeric_types+value_types
|
|
||||||
invalid_module_chars = r'[^a-zA-Z0-9\_\.]'
|
|
||||||
|
|
||||||
class TranslationError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def dump(obj, serialize_method=None, ignore_attribute=None, ignore=[]):
|
|
||||||
if not serialize_method:
|
|
||||||
serialize_method = config.serialize_method
|
|
||||||
if not ignore_attribute:
|
|
||||||
ignore_attribute = config.ignore_attribute
|
|
||||||
obj_type = type(obj)
|
|
||||||
# Parse / return default "types"...
|
|
||||||
if obj_type in numeric_types+string_types+value_types:
|
|
||||||
return obj
|
|
||||||
if obj_type in iter_types:
|
|
||||||
if obj_type in (types.ListType, types.TupleType):
|
|
||||||
new_obj = []
|
|
||||||
for item in obj:
|
|
||||||
new_obj.append(dump(item, serialize_method,
|
|
||||||
ignore_attribute, ignore))
|
|
||||||
if obj_type is types.TupleType:
|
|
||||||
new_obj = tuple(new_obj)
|
|
||||||
return new_obj
|
|
||||||
# It's a dict...
|
|
||||||
else:
|
|
||||||
new_obj = {}
|
|
||||||
for key, value in obj.iteritems():
|
|
||||||
new_obj[key] = dump(value, serialize_method,
|
|
||||||
ignore_attribute, ignore)
|
|
||||||
return new_obj
|
|
||||||
# It's not a standard type, so it needs __jsonclass__
|
|
||||||
module_name = inspect.getmodule(obj).__name__
|
|
||||||
class_name = obj.__class__.__name__
|
|
||||||
json_class = class_name
|
|
||||||
if module_name not in ['', '__main__']:
|
|
||||||
json_class = '%s.%s' % (module_name, json_class)
|
|
||||||
return_obj = {"__jsonclass__":[json_class,]}
|
|
||||||
# If a serialization method is defined..
|
|
||||||
if serialize_method in dir(obj):
|
|
||||||
# Params can be a dict (keyword) or list (positional)
|
|
||||||
# Attrs MUST be a dict.
|
|
||||||
serialize = getattr(obj, serialize_method)
|
|
||||||
params, attrs = serialize()
|
|
||||||
return_obj['__jsonclass__'].append(params)
|
|
||||||
return_obj.update(attrs)
|
|
||||||
return return_obj
|
|
||||||
# Otherwise, try to figure it out
|
|
||||||
# Obviously, we can't assume to know anything about the
|
|
||||||
# parameters passed to __init__
|
|
||||||
return_obj['__jsonclass__'].append([])
|
|
||||||
attrs = {}
|
|
||||||
ignore_list = getattr(obj, ignore_attribute, [])+ignore
|
|
||||||
for attr_name, attr_value in obj.__dict__.iteritems():
|
|
||||||
if type(attr_value) in supported_types and \
|
|
||||||
attr_name not in ignore_list and \
|
|
||||||
attr_value not in ignore_list:
|
|
||||||
attrs[attr_name] = dump(attr_value, serialize_method,
|
|
||||||
ignore_attribute, ignore)
|
|
||||||
return_obj.update(attrs)
|
|
||||||
return return_obj
|
|
||||||
|
|
||||||
def load(obj):
|
|
||||||
if type(obj) in string_types+numeric_types+value_types:
|
|
||||||
return obj
|
|
||||||
if type(obj) is types.ListType:
|
|
||||||
return_list = []
|
|
||||||
for entry in obj:
|
|
||||||
return_list.append(load(entry))
|
|
||||||
return return_list
|
|
||||||
# Othewise, it's a dict type
|
|
||||||
if '__jsonclass__' not in obj.keys():
|
|
||||||
return_dict = {}
|
|
||||||
for key, value in obj.iteritems():
|
|
||||||
new_value = load(value)
|
|
||||||
return_dict[key] = new_value
|
|
||||||
return return_dict
|
|
||||||
# It's a dict, and it's a __jsonclass__
|
|
||||||
orig_module_name = obj['__jsonclass__'][0]
|
|
||||||
params = obj['__jsonclass__'][1]
|
|
||||||
if orig_module_name == '':
|
|
||||||
raise TranslationError('Module name empty.')
|
|
||||||
json_module_clean = re.sub(invalid_module_chars, '', orig_module_name)
|
|
||||||
if json_module_clean != orig_module_name:
|
|
||||||
raise TranslationError('Module name %s has invalid characters.' %
|
|
||||||
orig_module_name)
|
|
||||||
json_module_parts = json_module_clean.split('.')
|
|
||||||
json_class = None
|
|
||||||
if len(json_module_parts) == 1:
|
|
||||||
# Local class name -- probably means it won't work
|
|
||||||
if json_module_parts[0] not in config.classes.keys():
|
|
||||||
raise TranslationError('Unknown class or module %s.' %
|
|
||||||
json_module_parts[0])
|
|
||||||
json_class = config.classes[json_module_parts[0]]
|
|
||||||
else:
|
|
||||||
json_class_name = json_module_parts.pop()
|
|
||||||
json_module_tree = '.'.join(json_module_parts)
|
|
||||||
try:
|
|
||||||
temp_module = __import__(json_module_tree)
|
|
||||||
except ImportError:
|
|
||||||
raise TranslationError('Could not import %s from module %s.' %
|
|
||||||
(json_class_name, json_module_tree))
|
|
||||||
|
|
||||||
# The returned class is the top-level module, not the one we really
|
|
||||||
# want. (E.g., if we import a.b.c, we now have a.) Walk through other
|
|
||||||
# path components to get to b and c.
|
|
||||||
for i in json_module_parts[1:]:
|
|
||||||
temp_module = getattr(temp_module, i)
|
|
||||||
|
|
||||||
json_class = getattr(temp_module, json_class_name)
|
|
||||||
# Creating the object...
|
|
||||||
new_obj = None
|
|
||||||
if type(params) is types.ListType:
|
|
||||||
new_obj = json_class(*params)
|
|
||||||
elif type(params) is types.DictType:
|
|
||||||
new_obj = json_class(**params)
|
|
||||||
else:
|
|
||||||
raise TranslationError('Constructor args must be a dict or list.')
|
|
||||||
for key, value in obj.iteritems():
|
|
||||||
if key == '__jsonclass__':
|
|
||||||
continue
|
|
||||||
setattr(new_obj, key, value)
|
|
||||||
return new_obj
|
|
|
@ -1,561 +0,0 @@
|
||||||
"""
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
============================
|
|
||||||
JSONRPC Library (jsonrpclib)
|
|
||||||
============================
|
|
||||||
|
|
||||||
This library is a JSON-RPC v.2 (proposed) implementation which
|
|
||||||
follows the xmlrpclib API for portability between clients. It
|
|
||||||
uses the same Server / ServerProxy, loads, dumps, etc. syntax,
|
|
||||||
while providing features not present in XML-RPC like:
|
|
||||||
|
|
||||||
* Keyword arguments
|
|
||||||
* Notifications
|
|
||||||
* Versioning
|
|
||||||
* Batches and batch notifications
|
|
||||||
|
|
||||||
Eventually, I'll add a SimpleXMLRPCServer compatible library,
|
|
||||||
and other things to tie the thing off nicely. :)
|
|
||||||
|
|
||||||
For a quick-start, just open a console and type the following,
|
|
||||||
replacing the server address, method, and parameters
|
|
||||||
appropriately.
|
|
||||||
>>> import jsonrpclib
|
|
||||||
>>> server = jsonrpclib.Server('http://localhost:8181')
|
|
||||||
>>> server.add(5, 6)
|
|
||||||
11
|
|
||||||
>>> server._notify.add(5, 6)
|
|
||||||
>>> batch = jsonrpclib.MultiCall(server)
|
|
||||||
>>> batch.add(3, 50)
|
|
||||||
>>> batch.add(2, 3)
|
|
||||||
>>> batch._notify.add(3, 5)
|
|
||||||
>>> batch()
|
|
||||||
[53, 5]
|
|
||||||
|
|
||||||
See http://code.google.com/p/jsonrpclib/ for more info.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import types
|
|
||||||
import sys
|
|
||||||
from xmlrpclib import Transport as XMLTransport
|
|
||||||
from xmlrpclib import SafeTransport as XMLSafeTransport
|
|
||||||
from xmlrpclib import ServerProxy as XMLServerProxy
|
|
||||||
from xmlrpclib import _Method as XML_Method
|
|
||||||
import time
|
|
||||||
import string
|
|
||||||
import random
|
|
||||||
|
|
||||||
# Library includes
|
|
||||||
import jsonrpclib
|
|
||||||
from jsonrpclib import config
|
|
||||||
from jsonrpclib import history
|
|
||||||
|
|
||||||
# JSON library importing
|
|
||||||
cjson = None
|
|
||||||
json = None
|
|
||||||
try:
|
|
||||||
import cjson
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
import json
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
import simplejson as json
|
|
||||||
except ImportError:
|
|
||||||
raise ImportError(
|
|
||||||
'You must have the cjson, json, or simplejson ' +
|
|
||||||
'module(s) available.'
|
|
||||||
)
|
|
||||||
|
|
||||||
IDCHARS = string.ascii_lowercase+string.digits
|
|
||||||
|
|
||||||
class UnixSocketMissing(Exception):
|
|
||||||
"""
|
|
||||||
Just a properly named Exception if Unix Sockets usage is
|
|
||||||
attempted on a platform that doesn't support them (Windows)
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
#JSON Abstractions
|
|
||||||
|
|
||||||
def jdumps(obj, encoding='utf-8'):
|
|
||||||
# Do 'serialize' test at some point for other classes
|
|
||||||
global cjson
|
|
||||||
if cjson:
|
|
||||||
return cjson.encode(obj)
|
|
||||||
else:
|
|
||||||
return json.dumps(obj, encoding=encoding)
|
|
||||||
|
|
||||||
def jloads(json_string):
|
|
||||||
global cjson
|
|
||||||
if cjson:
|
|
||||||
return cjson.decode(json_string)
|
|
||||||
else:
|
|
||||||
return json.loads(json_string)
|
|
||||||
|
|
||||||
|
|
||||||
# XMLRPClib re-implementations
|
|
||||||
|
|
||||||
class ProtocolError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class TransportMixIn(object):
|
|
||||||
""" Just extends the XMLRPC transport where necessary. """
|
|
||||||
user_agent = config.user_agent
|
|
||||||
# for Python 2.7 support
|
|
||||||
_connection = None
|
|
||||||
|
|
||||||
def send_content(self, connection, request_body):
|
|
||||||
connection.putheader("Content-Type", "application/json-rpc")
|
|
||||||
connection.putheader("Content-Length", str(len(request_body)))
|
|
||||||
connection.endheaders()
|
|
||||||
if request_body:
|
|
||||||
connection.send(request_body)
|
|
||||||
|
|
||||||
def getparser(self):
|
|
||||||
target = JSONTarget()
|
|
||||||
return JSONParser(target), target
|
|
||||||
|
|
||||||
class JSONParser(object):
|
|
||||||
def __init__(self, target):
|
|
||||||
self.target = target
|
|
||||||
|
|
||||||
def feed(self, data):
|
|
||||||
self.target.feed(data)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class JSONTarget(object):
|
|
||||||
def __init__(self):
|
|
||||||
self.data = []
|
|
||||||
|
|
||||||
def feed(self, data):
|
|
||||||
self.data.append(data)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
return ''.join(self.data)
|
|
||||||
|
|
||||||
class Transport(TransportMixIn, XMLTransport):
|
|
||||||
def __init__(self):
|
|
||||||
TransportMixIn.__init__(self)
|
|
||||||
XMLTransport.__init__(self)
|
|
||||||
|
|
||||||
class SafeTransport(TransportMixIn, XMLSafeTransport):
|
|
||||||
def __init__(self):
|
|
||||||
TransportMixIn.__init__(self)
|
|
||||||
XMLSafeTransport.__init__(self)
|
|
||||||
|
|
||||||
from httplib import HTTP, HTTPConnection
|
|
||||||
from socket import socket
|
|
||||||
|
|
||||||
USE_UNIX_SOCKETS = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
from socket import AF_UNIX, SOCK_STREAM
|
|
||||||
USE_UNIX_SOCKETS = True
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if (USE_UNIX_SOCKETS):
|
|
||||||
|
|
||||||
class UnixHTTPConnection(HTTPConnection):
|
|
||||||
def connect(self):
|
|
||||||
self.sock = socket(AF_UNIX, SOCK_STREAM)
|
|
||||||
self.sock.connect(self.host)
|
|
||||||
|
|
||||||
class UnixHTTP(HTTP):
|
|
||||||
_connection_class = UnixHTTPConnection
|
|
||||||
|
|
||||||
class UnixTransport(TransportMixIn, XMLTransport):
|
|
||||||
def make_connection(self, host):
|
|
||||||
import httplib
|
|
||||||
host, extra_headers, x509 = self.get_host_info(host)
|
|
||||||
return UnixHTTP(host)
|
|
||||||
|
|
||||||
|
|
||||||
class ServerProxy(XMLServerProxy):
|
|
||||||
"""
|
|
||||||
Unfortunately, much more of this class has to be copied since
|
|
||||||
so much of it does the serialization.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, uri, transport=None, encoding=None,
|
|
||||||
verbose=0, version=None):
|
|
||||||
import urllib
|
|
||||||
if not version:
|
|
||||||
version = config.version
|
|
||||||
self.__version = version
|
|
||||||
schema, uri = urllib.splittype(uri)
|
|
||||||
if schema not in ('http', 'https', 'unix'):
|
|
||||||
raise IOError('Unsupported JSON-RPC protocol.')
|
|
||||||
if schema == 'unix':
|
|
||||||
if not USE_UNIX_SOCKETS:
|
|
||||||
# Don't like the "generic" Exception...
|
|
||||||
raise UnixSocketMissing("Unix sockets not available.")
|
|
||||||
self.__host = uri
|
|
||||||
self.__handler = '/'
|
|
||||||
else:
|
|
||||||
self.__host, self.__handler = urllib.splithost(uri)
|
|
||||||
if not self.__handler:
|
|
||||||
# Not sure if this is in the JSON spec?
|
|
||||||
#self.__handler = '/'
|
|
||||||
self.__handler == '/'
|
|
||||||
if transport is None:
|
|
||||||
if schema == 'unix':
|
|
||||||
transport = UnixTransport()
|
|
||||||
elif schema == 'https':
|
|
||||||
transport = SafeTransport()
|
|
||||||
else:
|
|
||||||
transport = Transport()
|
|
||||||
self.__transport = transport
|
|
||||||
self.__encoding = encoding
|
|
||||||
self.__verbose = verbose
|
|
||||||
|
|
||||||
def _request(self, methodname, params, rpcid=None):
|
|
||||||
request = dumps(params, methodname, encoding=self.__encoding,
|
|
||||||
rpcid=rpcid, version=self.__version)
|
|
||||||
response = self._run_request(request)
|
|
||||||
check_for_errors(response)
|
|
||||||
return response['result']
|
|
||||||
|
|
||||||
def _request_notify(self, methodname, params, rpcid=None):
|
|
||||||
request = dumps(params, methodname, encoding=self.__encoding,
|
|
||||||
rpcid=rpcid, version=self.__version, notify=True)
|
|
||||||
response = self._run_request(request, notify=True)
|
|
||||||
check_for_errors(response)
|
|
||||||
return
|
|
||||||
|
|
||||||
def _run_request(self, request, notify=None):
|
|
||||||
history.add_request(request)
|
|
||||||
|
|
||||||
response = self.__transport.request(
|
|
||||||
self.__host,
|
|
||||||
self.__handler,
|
|
||||||
request,
|
|
||||||
verbose=self.__verbose
|
|
||||||
)
|
|
||||||
|
|
||||||
# Here, the XMLRPC library translates a single list
|
|
||||||
# response to the single value -- should we do the
|
|
||||||
# same, and require a tuple / list to be passed to
|
|
||||||
# the response object, or expect the Server to be
|
|
||||||
# outputting the response appropriately?
|
|
||||||
|
|
||||||
history.add_response(response)
|
|
||||||
if not response:
|
|
||||||
return None
|
|
||||||
return_obj = loads(response)
|
|
||||||
return return_obj
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
# Same as original, just with new _Method reference
|
|
||||||
return _Method(self._request, name)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _notify(self):
|
|
||||||
# Just like __getattr__, but with notify namespace.
|
|
||||||
return _Notify(self._request_notify)
|
|
||||||
|
|
||||||
|
|
||||||
class _Method(XML_Method):
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
|
||||||
if len(args) > 0 and len(kwargs) > 0:
|
|
||||||
raise ProtocolError('Cannot use both positional ' +
|
|
||||||
'and keyword arguments (according to JSON-RPC spec.)')
|
|
||||||
if len(args) > 0:
|
|
||||||
return self.__send(self.__name, args)
|
|
||||||
else:
|
|
||||||
return self.__send(self.__name, kwargs)
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
self.__name = '%s.%s' % (self.__name, name)
|
|
||||||
return self
|
|
||||||
# The old method returned a new instance, but this seemed wasteful.
|
|
||||||
# The only thing that changes is the name.
|
|
||||||
#return _Method(self.__send, "%s.%s" % (self.__name, name))
|
|
||||||
|
|
||||||
class _Notify(object):
|
|
||||||
def __init__(self, request):
|
|
||||||
self._request = request
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
return _Method(self._request, name)
|
|
||||||
|
|
||||||
# Batch implementation
|
|
||||||
|
|
||||||
class MultiCallMethod(object):
|
|
||||||
|
|
||||||
def __init__(self, method, notify=False):
|
|
||||||
self.method = method
|
|
||||||
self.params = []
|
|
||||||
self.notify = notify
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
|
||||||
if len(kwargs) > 0 and len(args) > 0:
|
|
||||||
raise ProtocolError('JSON-RPC does not support both ' +
|
|
||||||
'positional and keyword arguments.')
|
|
||||||
if len(kwargs) > 0:
|
|
||||||
self.params = kwargs
|
|
||||||
else:
|
|
||||||
self.params = args
|
|
||||||
|
|
||||||
def request(self, encoding=None, rpcid=None):
|
|
||||||
return dumps(self.params, self.method, version=2.0,
|
|
||||||
encoding=encoding, rpcid=rpcid, notify=self.notify)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s' % self.request()
|
|
||||||
|
|
||||||
def __getattr__(self, method):
|
|
||||||
new_method = '%s.%s' % (self.method, method)
|
|
||||||
self.method = new_method
|
|
||||||
return self
|
|
||||||
|
|
||||||
class MultiCallNotify(object):
|
|
||||||
|
|
||||||
def __init__(self, multicall):
|
|
||||||
self.multicall = multicall
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
new_job = MultiCallMethod(name, notify=True)
|
|
||||||
self.multicall._job_list.append(new_job)
|
|
||||||
return new_job
|
|
||||||
|
|
||||||
class MultiCallIterator(object):
|
|
||||||
|
|
||||||
def __init__(self, results):
|
|
||||||
self.results = results
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for i in range(0, len(self.results)):
|
|
||||||
yield self[i]
|
|
||||||
raise StopIteration
|
|
||||||
|
|
||||||
def __getitem__(self, i):
|
|
||||||
item = self.results[i]
|
|
||||||
check_for_errors(item)
|
|
||||||
return item['result']
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(self.results)
|
|
||||||
|
|
||||||
class MultiCall(object):
|
|
||||||
|
|
||||||
def __init__(self, server):
|
|
||||||
self._server = server
|
|
||||||
self._job_list = []
|
|
||||||
|
|
||||||
def _request(self):
|
|
||||||
if len(self._job_list) < 1:
|
|
||||||
# Should we alert? This /is/ pretty obvious.
|
|
||||||
return
|
|
||||||
request_body = '[ %s ]' % ','.join([job.request() for
|
|
||||||
job in self._job_list])
|
|
||||||
responses = self._server._run_request(request_body)
|
|
||||||
del self._job_list[:]
|
|
||||||
if not responses:
|
|
||||||
responses = []
|
|
||||||
return MultiCallIterator(responses)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _notify(self):
|
|
||||||
return MultiCallNotify(self)
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
new_job = MultiCallMethod(name)
|
|
||||||
self._job_list.append(new_job)
|
|
||||||
return new_job
|
|
||||||
|
|
||||||
__call__ = _request
|
|
||||||
|
|
||||||
# These lines conform to xmlrpclib's "compatibility" line.
|
|
||||||
# Not really sure if we should include these, but oh well.
|
|
||||||
Server = ServerProxy
|
|
||||||
|
|
||||||
class Fault(object):
|
|
||||||
# JSON-RPC error class
|
|
||||||
def __init__(self, code=-32000, message='Server error', rpcid=None):
|
|
||||||
self.faultCode = code
|
|
||||||
self.faultString = message
|
|
||||||
self.rpcid = rpcid
|
|
||||||
|
|
||||||
def error(self):
|
|
||||||
return {'code':self.faultCode, 'message':self.faultString}
|
|
||||||
|
|
||||||
def response(self, rpcid=None, version=None):
|
|
||||||
if not version:
|
|
||||||
version = config.version
|
|
||||||
if rpcid:
|
|
||||||
self.rpcid = rpcid
|
|
||||||
return dumps(
|
|
||||||
self, methodresponse=True, rpcid=self.rpcid, version=version
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '<Fault %s: %s>' % (self.faultCode, self.faultString)
|
|
||||||
|
|
||||||
def random_id(length=8):
|
|
||||||
return_id = ''
|
|
||||||
for i in range(length):
|
|
||||||
return_id += random.choice(IDCHARS)
|
|
||||||
return return_id
|
|
||||||
|
|
||||||
class Payload(dict):
|
|
||||||
def __init__(self, rpcid=None, version=None):
|
|
||||||
if not version:
|
|
||||||
version = config.version
|
|
||||||
self.id = rpcid
|
|
||||||
self.version = float(version)
|
|
||||||
|
|
||||||
def request(self, method, params=[]):
|
|
||||||
if type(method) not in types.StringTypes:
|
|
||||||
raise ValueError('Method name must be a string.')
|
|
||||||
if not self.id:
|
|
||||||
self.id = random_id()
|
|
||||||
request = { 'id':self.id, 'method':method }
|
|
||||||
if params:
|
|
||||||
request['params'] = params
|
|
||||||
if self.version >= 2:
|
|
||||||
request['jsonrpc'] = str(self.version)
|
|
||||||
return request
|
|
||||||
|
|
||||||
def notify(self, method, params=[]):
|
|
||||||
request = self.request(method, params)
|
|
||||||
if self.version >= 2:
|
|
||||||
del request['id']
|
|
||||||
else:
|
|
||||||
request['id'] = None
|
|
||||||
return request
|
|
||||||
|
|
||||||
def response(self, result=None):
|
|
||||||
response = {'result':result, 'id':self.id}
|
|
||||||
if self.version >= 2:
|
|
||||||
response['jsonrpc'] = str(self.version)
|
|
||||||
else:
|
|
||||||
response['error'] = None
|
|
||||||
return response
|
|
||||||
|
|
||||||
def error(self, code=-32000, message='Server error.'):
|
|
||||||
error = self.response()
|
|
||||||
if self.version >= 2:
|
|
||||||
del error['result']
|
|
||||||
else:
|
|
||||||
error['result'] = None
|
|
||||||
error['error'] = {'code':code, 'message':message}
|
|
||||||
return error
|
|
||||||
|
|
||||||
def dumps(params=[], methodname=None, methodresponse=None,
|
|
||||||
encoding=None, rpcid=None, version=None, notify=None):
|
|
||||||
"""
|
|
||||||
This differs from the Python implementation in that it implements
|
|
||||||
the rpcid argument since the 2.0 spec requires it for responses.
|
|
||||||
"""
|
|
||||||
if not version:
|
|
||||||
version = config.version
|
|
||||||
valid_params = (types.TupleType, types.ListType, types.DictType)
|
|
||||||
if methodname in types.StringTypes and \
|
|
||||||
type(params) not in valid_params and \
|
|
||||||
not isinstance(params, Fault):
|
|
||||||
"""
|
|
||||||
If a method, and params are not in a listish or a Fault,
|
|
||||||
error out.
|
|
||||||
"""
|
|
||||||
raise TypeError('Params must be a dict, list, tuple or Fault ' +
|
|
||||||
'instance.')
|
|
||||||
# Begin parsing object
|
|
||||||
payload = Payload(rpcid=rpcid, version=version)
|
|
||||||
if not encoding:
|
|
||||||
encoding = 'utf-8'
|
|
||||||
if type(params) is Fault:
|
|
||||||
response = payload.error(params.faultCode, params.faultString)
|
|
||||||
return jdumps(response, encoding=encoding)
|
|
||||||
if type(methodname) not in types.StringTypes and methodresponse != True:
|
|
||||||
raise ValueError('Method name must be a string, or methodresponse '+
|
|
||||||
'must be set to True.')
|
|
||||||
if config.use_jsonclass == True:
|
|
||||||
from jsonrpclib import jsonclass
|
|
||||||
params = jsonclass.dump(params)
|
|
||||||
if methodresponse is True:
|
|
||||||
if rpcid is None:
|
|
||||||
raise ValueError('A method response must have an rpcid.')
|
|
||||||
response = payload.response(params)
|
|
||||||
return jdumps(response, encoding=encoding)
|
|
||||||
request = None
|
|
||||||
if notify == True:
|
|
||||||
request = payload.notify(methodname, params)
|
|
||||||
else:
|
|
||||||
request = payload.request(methodname, params)
|
|
||||||
return jdumps(request, encoding=encoding)
|
|
||||||
|
|
||||||
def loads(data):
|
|
||||||
"""
|
|
||||||
This differs from the Python implementation, in that it returns
|
|
||||||
the request structure in Dict format instead of the method, params.
|
|
||||||
It will return a list in the case of a batch request / response.
|
|
||||||
"""
|
|
||||||
if data == '':
|
|
||||||
# notification
|
|
||||||
return None
|
|
||||||
result = jloads(data)
|
|
||||||
# if the above raises an error, the implementing server code
|
|
||||||
# should return something like the following:
|
|
||||||
# { 'jsonrpc':'2.0', 'error': fault.error(), id: None }
|
|
||||||
if config.use_jsonclass == True:
|
|
||||||
from jsonrpclib import jsonclass
|
|
||||||
result = jsonclass.load(result)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def check_for_errors(result):
|
|
||||||
if not result:
|
|
||||||
# Notification
|
|
||||||
return result
|
|
||||||
if type(result) is not types.DictType:
|
|
||||||
raise TypeError('Response is not a dict.')
|
|
||||||
if 'jsonrpc' in result.keys() and float(result['jsonrpc']) > 2.0:
|
|
||||||
raise NotImplementedError('JSON-RPC version not yet supported.')
|
|
||||||
if 'result' not in result.keys() and 'error' not in result.keys():
|
|
||||||
raise ValueError('Response does not have a result or error key.')
|
|
||||||
if 'error' in result.keys() and result['error'] != None:
|
|
||||||
code = result['error']['code']
|
|
||||||
message = result['error']['message']
|
|
||||||
raise ProtocolError((code, message))
|
|
||||||
return result
|
|
||||||
|
|
||||||
def isbatch(result):
|
|
||||||
if type(result) not in (types.ListType, types.TupleType):
|
|
||||||
return False
|
|
||||||
if len(result) < 1:
|
|
||||||
return False
|
|
||||||
if type(result[0]) is not types.DictType:
|
|
||||||
return False
|
|
||||||
if 'jsonrpc' not in result[0].keys():
|
|
||||||
return False
|
|
||||||
try:
|
|
||||||
version = float(result[0]['jsonrpc'])
|
|
||||||
except ValueError:
|
|
||||||
raise ProtocolError('"jsonrpc" key must be a float(able) value.')
|
|
||||||
if version < 2:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def isnotification(request):
|
|
||||||
if 'id' not in request.keys():
|
|
||||||
# 2.0 notification
|
|
||||||
return True
|
|
||||||
if request['id'] == None:
|
|
||||||
# 1.0 notification
|
|
||||||
return True
|
|
||||||
return False
|
|
|
@ -55,7 +55,7 @@ Some of our innovative features;
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
## Required software
|
## Required software
|
||||||
* Python 2.6+
|
* Python 2.7.9+ (earlier versions must be used with a current version of openssl)
|
||||||
* Cheetah 2.1.0+
|
* Cheetah 2.1.0+
|
||||||
|
|
||||||
## Howto
|
## Howto
|
||||||
|
@ -90,7 +90,6 @@ Finally, a massive thanks to all those that remain in the shadows, the quiet one
|
||||||
* Support
|
* Support
|
||||||
* Please note that, aside from bug reports, we do *not* offer support. We can offer some help, but we really need you to understand the basics of your Linux or Windows OS. If you do not understand basics such as locating a database file, not running as root, setting up file permissions, or claiming a user derp, then we really cannot help you!
|
* Please note that, aside from bug reports, we do *not* offer support. We can offer some help, but we really need you to understand the basics of your Linux or Windows OS. If you do not understand basics such as locating a database file, not running as root, setting up file permissions, or claiming a user derp, then we really cannot help you!
|
||||||
* IRC: `irc.freenode.net` channel `#SickGear`
|
* IRC: `irc.freenode.net` channel `#SickGear`
|
||||||
* Webchat IRC: [webchat link](http://webchat.freenode.net/?channels=SickGear)
|
|
||||||
|
|
||||||
## Screenies
|
## Screenies
|
||||||
<table><thead></thead><tbody>
|
<table><thead></thead><tbody>
|
||||||
|
|
|
@ -32,21 +32,16 @@ import os.path
|
||||||
import uuid
|
import uuid
|
||||||
import base64
|
import base64
|
||||||
sys.path.insert(1, os.path.abspath('../lib'))
|
sys.path.insert(1, os.path.abspath('../lib'))
|
||||||
from sickbeard import providers, metadata, config, webserveInit, searchBacklog, showUpdater, versionChecker, \
|
from sickbeard import helpers, logger, db, naming, metadata, providers, scene_exceptions, scene_numbering, \
|
||||||
autoPostProcesser, subtitles, traktChecker, helpers, db, exceptions, show_queue, search_queue, scheduler, \
|
scheduler, auto_post_processer, search_queue, search_propers, search_recent, search_backlog, \
|
||||||
show_name_helpers, logger, naming, searchRecent, searchProper, scene_numbering, scene_exceptions, name_cache
|
show_queue, show_updater, subtitles, traktChecker, version_checker
|
||||||
from sickbeard.providers.generic import GenericProvider
|
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, minimax
|
||||||
from providers import btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, grabtheinfo, scenetime, pretome, \
|
from sickbeard.common import SD, SKIPPED
|
||||||
omgwtfnzbs, scc, torrentday, hdbits, speedcd, nyaatorrents, torrentbytes, beyondhd, gftracker, transmithe_net, \
|
from sickbeard.databases import mainDB, cache_db, failed_db
|
||||||
bitsoup, tokyotoshokan, animenzb, rarbg, morethan, alpharatio, pisexy, torrentshack, torrenting, funfile
|
|
||||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
|
|
||||||
naming_ep_type, minimax
|
|
||||||
from indexers.indexer_api import indexerApi
|
from indexers.indexer_api import indexerApi
|
||||||
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, \
|
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, \
|
||||||
indexer_episodenotfound, indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts
|
indexer_episodenotfound, indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts
|
||||||
from sickbeard.common import SD, SKIPPED, NAMING_REPEAT
|
from sickbeard.providers.generic import GenericProvider
|
||||||
from sickbeard.databases import mainDB, cache_db, failed_db
|
|
||||||
|
|
||||||
from lib.configobj import ConfigObj
|
from lib.configobj import ConfigObj
|
||||||
|
|
||||||
PID = None
|
PID = None
|
||||||
|
@ -1020,7 +1015,7 @@ def initialize(consoleLogging=True):
|
||||||
if hasattr(torrent_prov, 'options'):
|
if hasattr(torrent_prov, 'options'):
|
||||||
torrent_prov.options = check_setting_str(CFG, prov_id_uc, prov_id + '_options', '')
|
torrent_prov.options = check_setting_str(CFG, prov_id_uc, prov_id + '_options', '')
|
||||||
if hasattr(torrent_prov, '_seed_ratio'):
|
if hasattr(torrent_prov, '_seed_ratio'):
|
||||||
torrent_prov._seed_ratio = check_setting_str(CFG, prov_id_uc, prov_id + '_seed_ratio', '')
|
torrent_prov._seed_ratio = check_setting_str(CFG, prov_id_uc, prov_id + '_seed_ratio', '').replace('None', '')
|
||||||
if hasattr(torrent_prov, 'seed_time'):
|
if hasattr(torrent_prov, 'seed_time'):
|
||||||
torrent_prov.seed_time = check_setting_int(CFG, prov_id_uc, prov_id + '_seed_time', '')
|
torrent_prov.seed_time = check_setting_int(CFG, prov_id_uc, prov_id + '_seed_time', '')
|
||||||
if hasattr(torrent_prov, 'minseed'):
|
if hasattr(torrent_prov, 'minseed'):
|
||||||
|
@ -1029,6 +1024,8 @@ def initialize(consoleLogging=True):
|
||||||
torrent_prov.minleech = check_setting_int(CFG, prov_id_uc, prov_id + '_minleech', 0)
|
torrent_prov.minleech = check_setting_int(CFG, prov_id_uc, prov_id + '_minleech', 0)
|
||||||
if hasattr(torrent_prov, 'freeleech'):
|
if hasattr(torrent_prov, 'freeleech'):
|
||||||
torrent_prov.freeleech = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_freeleech', 0))
|
torrent_prov.freeleech = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_freeleech', 0))
|
||||||
|
if hasattr(torrent_prov, 'reject_m2ts'):
|
||||||
|
torrent_prov.reject_m2ts = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_reject_m2ts', 0))
|
||||||
if hasattr(torrent_prov, 'enable_recentsearch'):
|
if hasattr(torrent_prov, 'enable_recentsearch'):
|
||||||
torrent_prov.enable_recentsearch = bool(check_setting_int(CFG, prov_id_uc,
|
torrent_prov.enable_recentsearch = bool(check_setting_int(CFG, prov_id_uc,
|
||||||
prov_id + '_enable_recentsearch', 1))
|
prov_id + '_enable_recentsearch', 1))
|
||||||
|
@ -1105,7 +1102,7 @@ def initialize(consoleLogging=True):
|
||||||
# initialize schedulers
|
# initialize schedulers
|
||||||
# updaters
|
# updaters
|
||||||
update_now = datetime.timedelta(minutes=0)
|
update_now = datetime.timedelta(minutes=0)
|
||||||
versionCheckScheduler = scheduler.Scheduler(versionChecker.CheckVersion(),
|
versionCheckScheduler = scheduler.Scheduler(version_checker.CheckVersion(),
|
||||||
cycleTime=datetime.timedelta(hours=UPDATE_FREQUENCY),
|
cycleTime=datetime.timedelta(hours=UPDATE_FREQUENCY),
|
||||||
threadName='CHECKVERSION',
|
threadName='CHECKVERSION',
|
||||||
silent=False)
|
silent=False)
|
||||||
|
@ -1114,7 +1111,7 @@ def initialize(consoleLogging=True):
|
||||||
cycleTime=datetime.timedelta(seconds=3),
|
cycleTime=datetime.timedelta(seconds=3),
|
||||||
threadName='SHOWQUEUE')
|
threadName='SHOWQUEUE')
|
||||||
|
|
||||||
showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
|
showUpdateScheduler = scheduler.Scheduler(show_updater.ShowUpdater(),
|
||||||
cycleTime=datetime.timedelta(hours=1),
|
cycleTime=datetime.timedelta(hours=1),
|
||||||
threadName='SHOWUPDATER',
|
threadName='SHOWUPDATER',
|
||||||
start_time=datetime.time(hour=SHOW_UPDATE_HOUR),
|
start_time=datetime.time(hour=SHOW_UPDATE_HOUR),
|
||||||
|
@ -1126,19 +1123,19 @@ def initialize(consoleLogging=True):
|
||||||
threadName='SEARCHQUEUE')
|
threadName='SEARCHQUEUE')
|
||||||
|
|
||||||
update_interval = datetime.timedelta(minutes=RECENTSEARCH_FREQUENCY)
|
update_interval = datetime.timedelta(minutes=RECENTSEARCH_FREQUENCY)
|
||||||
recentSearchScheduler = scheduler.Scheduler(searchRecent.RecentSearcher(),
|
recentSearchScheduler = scheduler.Scheduler(search_recent.RecentSearcher(),
|
||||||
cycleTime=update_interval,
|
cycleTime=update_interval,
|
||||||
threadName='RECENTSEARCHER',
|
threadName='RECENTSEARCHER',
|
||||||
run_delay=update_now if RECENTSEARCH_STARTUP
|
run_delay=update_now if RECENTSEARCH_STARTUP
|
||||||
else datetime.timedelta(minutes=5),
|
else datetime.timedelta(minutes=5),
|
||||||
prevent_cycle_run=searchQueueScheduler.action.is_recentsearch_in_progress)
|
prevent_cycle_run=searchQueueScheduler.action.is_recentsearch_in_progress)
|
||||||
|
|
||||||
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
backlogSearchScheduler = search_backlog.BacklogSearchScheduler(search_backlog.BacklogSearcher(),
|
||||||
cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()),
|
cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()),
|
||||||
threadName='BACKLOG',
|
threadName='BACKLOG',
|
||||||
run_delay=update_now if BACKLOG_STARTUP
|
run_delay=update_now if BACKLOG_STARTUP
|
||||||
else datetime.timedelta(minutes=10),
|
else datetime.timedelta(minutes=10),
|
||||||
prevent_cycle_run=searchQueueScheduler.action.is_standard_backlog_in_progress)
|
prevent_cycle_run=searchQueueScheduler.action.is_standard_backlog_in_progress)
|
||||||
|
|
||||||
search_intervals = {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60}
|
search_intervals = {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60}
|
||||||
if CHECK_PROPERS_INTERVAL in search_intervals:
|
if CHECK_PROPERS_INTERVAL in search_intervals:
|
||||||
|
@ -1148,7 +1145,7 @@ def initialize(consoleLogging=True):
|
||||||
update_interval = datetime.timedelta(hours=1)
|
update_interval = datetime.timedelta(hours=1)
|
||||||
run_at = datetime.time(hour=1) # 1 AM
|
run_at = datetime.time(hour=1) # 1 AM
|
||||||
|
|
||||||
properFinderScheduler = scheduler.Scheduler(searchProper.ProperSearcher(),
|
properFinderScheduler = scheduler.Scheduler(search_propers.ProperSearcher(),
|
||||||
cycleTime=update_interval,
|
cycleTime=update_interval,
|
||||||
threadName='FINDPROPERS',
|
threadName='FINDPROPERS',
|
||||||
start_time=run_at,
|
start_time=run_at,
|
||||||
|
@ -1156,7 +1153,7 @@ def initialize(consoleLogging=True):
|
||||||
prevent_cycle_run=searchQueueScheduler.action.is_propersearch_in_progress)
|
prevent_cycle_run=searchQueueScheduler.action.is_propersearch_in_progress)
|
||||||
|
|
||||||
# processors
|
# processors
|
||||||
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
|
autoPostProcesserScheduler = scheduler.Scheduler(auto_post_processer.PostProcesser(),
|
||||||
cycleTime=datetime.timedelta(
|
cycleTime=datetime.timedelta(
|
||||||
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
||||||
threadName='POSTPROCESSER',
|
threadName='POSTPROCESSER',
|
||||||
|
@ -1335,12 +1332,12 @@ def halt():
|
||||||
|
|
||||||
|
|
||||||
def sig_handler(signum=None, frame=None):
|
def sig_handler(signum=None, frame=None):
|
||||||
if type(signum) != type(None):
|
if isinstance(signum, type(None)):
|
||||||
logger.log(u'Signal %i caught, saving and exiting...' % int(signum))
|
logger.log(u'Signal %i caught, saving and exiting...' % int(signum))
|
||||||
events.put(events.SystemEvent.SHUTDOWN)
|
events.put(events.SystemEvent.SHUTDOWN)
|
||||||
|
|
||||||
|
|
||||||
def saveAll():
|
def save_all():
|
||||||
global showList
|
global showList
|
||||||
|
|
||||||
# write all shows
|
# write all shows
|
||||||
|
@ -1356,7 +1353,7 @@ def saveAll():
|
||||||
def restart(soft=True):
|
def restart(soft=True):
|
||||||
if soft:
|
if soft:
|
||||||
halt()
|
halt()
|
||||||
saveAll()
|
save_all()
|
||||||
logger.log(u'Re-initializing all data')
|
logger.log(u'Re-initializing all data')
|
||||||
initialize()
|
initialize()
|
||||||
else:
|
else:
|
||||||
|
@ -1515,6 +1512,8 @@ def save_config():
|
||||||
new_config[prov_id_uc][prov_id + '_minleech'] = int(torrent_prov.minleech)
|
new_config[prov_id_uc][prov_id + '_minleech'] = int(torrent_prov.minleech)
|
||||||
if hasattr(torrent_prov, 'freeleech'):
|
if hasattr(torrent_prov, 'freeleech'):
|
||||||
new_config[prov_id_uc][prov_id + '_freeleech'] = int(torrent_prov.freeleech)
|
new_config[prov_id_uc][prov_id + '_freeleech'] = int(torrent_prov.freeleech)
|
||||||
|
if hasattr(torrent_prov, 'reject_m2ts'):
|
||||||
|
new_config[prov_id_uc][prov_id + '_reject_m2ts'] = int(torrent_prov.reject_m2ts)
|
||||||
if hasattr(torrent_prov, 'enable_recentsearch'):
|
if hasattr(torrent_prov, 'enable_recentsearch'):
|
||||||
new_config[prov_id_uc][prov_id + '_enable_recentsearch'] = int(torrent_prov.enable_recentsearch)
|
new_config[prov_id_uc][prov_id + '_enable_recentsearch'] = int(torrent_prov.enable_recentsearch)
|
||||||
if hasattr(torrent_prov, 'enable_backlog'):
|
if hasattr(torrent_prov, 'enable_backlog'):
|
||||||
|
@ -1799,7 +1798,7 @@ def save_config():
|
||||||
new_config.write()
|
new_config.write()
|
||||||
|
|
||||||
|
|
||||||
def launchBrowser(start_port=None):
|
def launch_browser(start_port=None):
|
||||||
if not start_port:
|
if not start_port:
|
||||||
start_port = WEB_PORT
|
start_port = WEB_PORT
|
||||||
if ENABLE_HTTPS:
|
if ENABLE_HTTPS:
|
||||||
|
|
|
@ -45,4 +45,4 @@ class PostProcesser():
|
||||||
|
|
||||||
processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
|
processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
|
||||||
|
|
||||||
self.amActive = False
|
self.amActive = False
|
|
@ -1,4 +1,5 @@
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
class BS4Parser:
|
class BS4Parser:
|
||||||
|
@ -12,6 +13,11 @@ class BS4Parser:
|
||||||
|
|
||||||
kwargs_new[k] = v
|
kwargs_new[k] = v
|
||||||
|
|
||||||
|
tag, attr = [x in kwargs_new and kwargs_new.pop(x) or y for (x, y) in [('tag', 'table'), ('attr', '')]]
|
||||||
|
if attr:
|
||||||
|
args = (re.sub(r'(?is).*(<%(tag)s[^>]+%(attr)s[^>]*>.*</%(tag)s>).*' % {'tag': tag, 'attr': attr},
|
||||||
|
r'<html><head></head><body>\1</body></html>', args[0]).strip(),) + args[1:]
|
||||||
|
|
||||||
self.soup = BeautifulSoup(*args, **kwargs_new)
|
self.soup = BeautifulSoup(*args, **kwargs_new)
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
|
|
|
@ -16,12 +16,9 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from base64 import b64encode
|
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard.clients.generic import GenericClient
|
from sickbeard.clients.generic import GenericClient
|
||||||
from lib.rtorrent import RTorrent
|
from lib.rtorrent import RTorrent
|
||||||
from lib.rtorrent.err import MethodError
|
|
||||||
|
|
||||||
|
|
||||||
class rTorrentAPI(GenericClient):
|
class rTorrentAPI(GenericClient):
|
||||||
|
@ -45,7 +42,6 @@ class rTorrentAPI(GenericClient):
|
||||||
return self.auth
|
return self.auth
|
||||||
|
|
||||||
def _add_torrent_uri(self, result):
|
def _add_torrent_uri(self, result):
|
||||||
filedata = None
|
|
||||||
|
|
||||||
if not self.auth:
|
if not self.auth:
|
||||||
return False
|
return False
|
||||||
|
@ -62,7 +58,7 @@ class rTorrentAPI(GenericClient):
|
||||||
|
|
||||||
# Set label
|
# Set label
|
||||||
if sickbeard.TORRENT_LABEL:
|
if sickbeard.TORRENT_LABEL:
|
||||||
torrent.set_custom(1, sickbeard.TORRENT_LABEL.lower())
|
torrent.set_custom(1, sickbeard.TORRENT_LABEL)
|
||||||
|
|
||||||
if sickbeard.TORRENT_PATH:
|
if sickbeard.TORRENT_PATH:
|
||||||
torrent.set_directory(sickbeard.TORRENT_PATH)
|
torrent.set_directory(sickbeard.TORRENT_PATH)
|
||||||
|
@ -76,7 +72,6 @@ class rTorrentAPI(GenericClient):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _add_torrent_file(self, result):
|
def _add_torrent_file(self, result):
|
||||||
filedata = None
|
|
||||||
|
|
||||||
if not self.auth:
|
if not self.auth:
|
||||||
return False
|
return False
|
||||||
|
@ -84,7 +79,7 @@ class rTorrentAPI(GenericClient):
|
||||||
if not result:
|
if not result:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# group_name = 'sb_test'.lower() ##### Use provider instead of _test
|
# group_name = 'sb_test' ##### Use provider instead of _test
|
||||||
# if not self._set_torrent_ratio(group_name):
|
# if not self._set_torrent_ratio(group_name):
|
||||||
# return False
|
# return False
|
||||||
|
|
||||||
|
@ -98,7 +93,7 @@ class rTorrentAPI(GenericClient):
|
||||||
|
|
||||||
# Set label
|
# Set label
|
||||||
if sickbeard.TORRENT_LABEL:
|
if sickbeard.TORRENT_LABEL:
|
||||||
torrent.set_custom(1, sickbeard.TORRENT_LABEL.lower())
|
torrent.set_custom(1, sickbeard.TORRENT_LABEL)
|
||||||
|
|
||||||
if sickbeard.TORRENT_PATH:
|
if sickbeard.TORRENT_PATH:
|
||||||
torrent.set_directory(sickbeard.TORRENT_PATH)
|
torrent.set_directory(sickbeard.TORRENT_PATH)
|
||||||
|
|
|
@ -91,7 +91,7 @@ class Quality:
|
||||||
RAWHDTV = 1 << 3 # 8 -- 720p/1080i mpeg2 (trollhd releases)
|
RAWHDTV = 1 << 3 # 8 -- 720p/1080i mpeg2 (trollhd releases)
|
||||||
FULLHDTV = 1 << 4 # 16 -- 1080p HDTV (QCF releases)
|
FULLHDTV = 1 << 4 # 16 -- 1080p HDTV (QCF releases)
|
||||||
HDWEBDL = 1 << 5 # 32
|
HDWEBDL = 1 << 5 # 32
|
||||||
FULLHDWEBDL = 1 << 6 # 64 -- 1080p web-dl
|
FULLHDWEBDL = 1 << 6 # 64 -- 1080p web-dl
|
||||||
HDBLURAY = 1 << 7 # 128
|
HDBLURAY = 1 << 7 # 128
|
||||||
FULLHDBLURAY = 1 << 8 # 256
|
FULLHDBLURAY = 1 << 8 # 256
|
||||||
|
|
||||||
|
@ -171,7 +171,7 @@ class Quality:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def sceneQuality(name, anime=False):
|
def sceneQuality(name, anime=False):
|
||||||
"""
|
"""
|
||||||
Return The quality from the scene episode File
|
Return The quality from the scene episode File
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = os.path.basename(name)
|
name = os.path.basename(name)
|
||||||
|
@ -205,15 +205,15 @@ class Quality:
|
||||||
else:
|
else:
|
||||||
return Quality.UNKNOWN
|
return Quality.UNKNOWN
|
||||||
|
|
||||||
if checkName(['(pdtv|hdtv|dsr|tvrip)([-]|.((aac|ac3|dd).?\d\.?\d.)*(xvid|x264|h.?264))'], all) and not checkName(['(720|1080)[pi]'], all) \
|
if checkName(['(pdtv|hdtv|dsr|tvrip)([-]|.((aac|ac3|dd).?\d\.?\d.)*(xvid|x264|h.?264))'], all) and not checkName(['(720|1080|2160)[pi]'], all) \
|
||||||
and not checkName(['hr.ws.pdtv.(x264|h.?264)'], any):
|
and not checkName(['hr.ws.pdtv.(x264|h.?264)'], any):
|
||||||
return Quality.SDTV
|
return Quality.SDTV
|
||||||
elif checkName(['web.?dl|web.?rip', 'xvid|x264|h.?264'], all) and not checkName(['(720|1080)[pi]'], all):
|
elif checkName(['web.?dl|web.?rip', 'xvid|x264|h.?264'], all) and not checkName(['(720|1080|2160)[pi]'], all):
|
||||||
return Quality.SDTV
|
return Quality.SDTV
|
||||||
elif checkName(['(dvd.?rip|b[r|d]rip)(.ws)?(.(xvid|divx|x264|h.?264))?'], any) and not checkName(['(720|1080)[pi]'], all):
|
elif checkName(['(dvd.?rip|b[r|d]rip)(.ws)?(.(xvid|divx|x264|h.?264))?'], any) and not checkName(['(720|1080|2160)[pi]'], all):
|
||||||
return Quality.SDDVD
|
return Quality.SDDVD
|
||||||
elif checkName(['720p', 'hdtv', 'x264|h.?264'], all) or checkName(['hr.ws.pdtv.(x264|h.?264)'], any) \
|
elif checkName(['720p', 'hdtv', 'x264|h.?264'], all) or checkName(['hr.ws.pdtv.(x264|h.?264)'], any) \
|
||||||
and not checkName(['(1080)[pi]'], all):
|
and not checkName(['(1080|2160)[pi]'], all):
|
||||||
return Quality.HDTV
|
return Quality.HDTV
|
||||||
elif checkName(['720p|1080i', 'hdtv', 'mpeg-?2'], all) or checkName(['1080[pi].hdtv', 'h.?264'], all):
|
elif checkName(['720p|1080i', 'hdtv', 'mpeg-?2'], all) or checkName(['1080[pi].hdtv', 'h.?264'], all):
|
||||||
return Quality.RAWHDTV
|
return Quality.RAWHDTV
|
||||||
|
@ -337,7 +337,7 @@ ANY = Quality.combineQualities(
|
||||||
[Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
|
[Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
|
||||||
Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD
|
Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD
|
||||||
|
|
||||||
# legacy template, cant remove due to reference in mainDB upgrade?
|
# legacy template, cant remove due to reference in mainDB upgrade?
|
||||||
BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
|
BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
|
||||||
|
|
||||||
qualityPresets = (SD, HD, HD720p, HD1080p, ANY)
|
qualityPresets = (SD, HD, HD720p, HD1080p, ANY)
|
||||||
|
@ -401,4 +401,4 @@ class Overview:
|
||||||
|
|
||||||
countryList = {'Australia': 'AU',
|
countryList = {'Australia': 'AU',
|
||||||
'Canada': 'CA',
|
'Canada': 'CA',
|
||||||
'USA': 'US'}
|
'USA': 'US'}
|
||||||
|
|
|
@ -22,8 +22,9 @@ import re
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
import sickbeard.providers
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
from sickbeard import helpers, logger, naming, db, providers
|
from sickbeard import helpers, logger, naming, db
|
||||||
|
|
||||||
|
|
||||||
naming_ep_type = ('%(seasonnumber)dx%(episodenumber)02d',
|
naming_ep_type = ('%(seasonnumber)dx%(episodenumber)02d',
|
||||||
|
|
|
@ -94,18 +94,17 @@ def remove_extension(name):
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
def remove_non_release_groups(name, anime=False):
|
def remove_non_release_groups(name, is_anime=False):
|
||||||
"""
|
"""
|
||||||
Remove non release groups from name
|
Remove non release groups from name
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if name:
|
if name:
|
||||||
rc = [re.compile(r'(?i)' + v) for v in [
|
rc = [re.compile(r'(?i)' + v) for v in [
|
||||||
'([\s\.\-_\[\{\(]*(no-rar|nzbgeek|ripsalot|rp|siklopentan)[\s\.\-_\]\}\)]*)$',
|
'([\s\.\-_\[\{\(]*(no-rar|nzbgeek|ripsalot|rp|siklopentan)[\s\.\-_\]\}\)]*)$',
|
||||||
'(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*(www\.\w+.\w+)[\s\.\-_]*[\]\}\)][\s\.\-_]*)$',
|
'(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*(www\.\w+.\w+)[\s\.\-_]*[\]\}\)][\s\.\-_]*)$',
|
||||||
'(?<=\w)([\s\.\-_]*[\[\{\(]\s*(rar(bg|tv)|((e[tz]|v)tv))[\s\.\-_]*[\]\}\)][\s\.\-_]*)$'] +
|
'(?<=\w)([\s\.\-_]*[\[\{\(]\s*(rar(bg|tv)|((e[tz]|v)tv))[\s\.\-_]*[\]\}\)][\s\.\-_]*)$'] +
|
||||||
(['(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*[\w\s\.\-\_]+[\s\.\-_]*[\]\}\)][\s\.\-_]*)$'], [])[anime]
|
(['(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*[\w\s\.\-\_]+[\s\.\-_]*[\]\}\)][\s\.\-_]*)$'], [])[is_anime]]
|
||||||
]
|
|
||||||
rename = name
|
rename = name
|
||||||
while rename:
|
while rename:
|
||||||
for regex in rc:
|
for regex in rc:
|
||||||
|
@ -1165,7 +1164,7 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
|
||||||
logger.log(u'HTTP error %s while loading URL %s' % (e.errno, url), logger.WARNING)
|
logger.log(u'HTTP error %s while loading URL %s' % (e.errno, url), logger.WARNING)
|
||||||
return
|
return
|
||||||
except requests.exceptions.ConnectionError as e:
|
except requests.exceptions.ConnectionError as e:
|
||||||
logger.log(u'Connection error msg:%s while loading URL %s' % (str(e.message), url), logger.WARNING)
|
logger.log(u'Internet connection error msg:%s while loading URL %s' % (str(e.message), url), logger.WARNING)
|
||||||
return
|
return
|
||||||
except requests.exceptions.ReadTimeout as e:
|
except requests.exceptions.ReadTimeout as e:
|
||||||
logger.log(u'Read timed out msg:%s while loading URL %s' % (str(e.message), url), logger.WARNING)
|
logger.log(u'Read timed out msg:%s while loading URL %s' % (str(e.message), url), logger.WARNING)
|
||||||
|
@ -1173,8 +1172,11 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
|
||||||
except (requests.exceptions.Timeout, socket.timeout) as e:
|
except (requests.exceptions.Timeout, socket.timeout) as e:
|
||||||
logger.log(u'Connection timed out msg:%s while loading URL %s' % (str(e.message), url), logger.WARNING)
|
logger.log(u'Connection timed out msg:%s while loading URL %s' % (str(e.message), url), logger.WARNING)
|
||||||
return
|
return
|
||||||
except Exception:
|
except Exception as e:
|
||||||
logger.log(u'Exception caught while loading URL %s Detail... %s' % (url, traceback.format_exc()), logger.WARNING)
|
if e.message:
|
||||||
|
logger.log(u'Exception caught while loading URL %s\r\nDetail... %s\r\n%s' % (url, str(e.message), traceback.format_exc()), logger.WARNING)
|
||||||
|
else:
|
||||||
|
logger.log(u'Unknown exception while loading URL %s\r\nDetail... %s' % (url, traceback.format_exc()), logger.WARNING)
|
||||||
return
|
return
|
||||||
|
|
||||||
if json:
|
if json:
|
||||||
|
@ -1410,6 +1412,7 @@ def clear_unused_providers():
|
||||||
myDB = db.DBConnection('cache.db')
|
myDB = db.DBConnection('cache.db')
|
||||||
myDB.action('DELETE FROM provider_cache WHERE provider NOT IN (%s)' % ','.join(['?'] * len(providers)), providers)
|
myDB.action('DELETE FROM provider_cache WHERE provider NOT IN (%s)' % ','.join(['?'] * len(providers)), providers)
|
||||||
|
|
||||||
|
|
||||||
def make_search_segment_html_string(segment, max_eps=5):
|
def make_search_segment_html_string(segment, max_eps=5):
|
||||||
seg_str = ''
|
seg_str = ''
|
||||||
if segment and not isinstance(segment, list):
|
if segment and not isinstance(segment, list):
|
||||||
|
@ -1427,3 +1430,7 @@ def make_search_segment_html_string(segment, max_eps=5):
|
||||||
episodes = ['S' + str(x.season).zfill(2) + 'E' + str(x.episode).zfill(2) for x in segment]
|
episodes = ['S' + str(x.season).zfill(2) + 'E' + str(x.episode).zfill(2) for x in segment]
|
||||||
seg_str = u'Episode' + maybe_plural(len(episodes)) + ': ' + ', '.join(episodes)
|
seg_str = u'Episode' + maybe_plural(len(episodes)) + ': ' + ', '.join(episodes)
|
||||||
return seg_str
|
return seg_str
|
||||||
|
|
||||||
|
|
||||||
|
def has_anime():
|
||||||
|
return False if not sickbeard.showList else any(filter(lambda show: show.is_anime, sickbeard.showList))
|
||||||
|
|
|
@ -32,22 +32,30 @@ from sickbeard import history
|
||||||
|
|
||||||
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality
|
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality
|
||||||
|
|
||||||
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
from name_parser.parser import NameParser
|
||||||
|
|
||||||
|
|
||||||
def searchPropers():
|
def search_propers():
|
||||||
|
|
||||||
if not sickbeard.DOWNLOAD_PROPERS:
|
if not sickbeard.DOWNLOAD_PROPERS:
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.log(u'Beginning the search for new propers')
|
logger.log(u'Beginning search for new propers')
|
||||||
|
|
||||||
propers = _getProperList()
|
age_shows, age_anime = 2, 14
|
||||||
|
aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows)
|
||||||
|
aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime)
|
||||||
|
recent_shows, recent_anime = _recent_history(aired_since_shows, aired_since_anime)
|
||||||
|
if recent_shows or recent_anime:
|
||||||
|
propers = _get_proper_list(aired_since_shows, recent_shows, recent_anime)
|
||||||
|
|
||||||
if propers:
|
if propers:
|
||||||
_downloadPropers(propers)
|
_download_propers(propers)
|
||||||
|
else:
|
||||||
|
logger.log(u'No downloads or snatches found for the last %s%s days to use for a propers search' %
|
||||||
|
(age_shows, ('', ' (%s for anime)' % age_anime)[helpers.has_anime()]))
|
||||||
|
|
||||||
_set_lastProperSearch(datetime.datetime.today().toordinal())
|
_set_last_proper_search(datetime.datetime.today().toordinal())
|
||||||
|
|
||||||
run_at = ''
|
run_at = ''
|
||||||
if None is sickbeard.properFinderScheduler.start_time:
|
if None is sickbeard.properFinderScheduler.start_time:
|
||||||
|
@ -59,226 +67,242 @@ def searchPropers():
|
||||||
|
|
||||||
logger.log(u'Completed the search for new propers%s' % run_at)
|
logger.log(u'Completed the search for new propers%s' % run_at)
|
||||||
|
|
||||||
def _getProperList():
|
|
||||||
|
def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
||||||
propers = {}
|
propers = {}
|
||||||
|
|
||||||
search_date = datetime.datetime.today() - datetime.timedelta(days=2)
|
|
||||||
|
|
||||||
# for each provider get a list of the
|
# for each provider get a list of the
|
||||||
origThreadName = threading.currentThread().name
|
orig_thread_name = threading.currentThread().name
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
|
||||||
for curProvider in providers:
|
for cur_provider in providers:
|
||||||
threading.currentThread().name = origThreadName + ' :: [' + curProvider.name + ']'
|
if not recent_anime and cur_provider.anime_only:
|
||||||
|
continue
|
||||||
|
threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'
|
||||||
|
|
||||||
logger.log(u'Searching for any new PROPER releases from ' + curProvider.name)
|
logger.log(u'Searching for new PROPER releases')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
curPropers = curProvider.find_propers(search_date)
|
found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows, anime=recent_anime)
|
||||||
except exceptions.AuthException as e:
|
except exceptions.AuthException as e:
|
||||||
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
|
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
|
||||||
continue
|
continue
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.log(u'Error while searching ' + curProvider.name + ', skipping: ' + ex(e), logger.ERROR)
|
logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
finally:
|
finally:
|
||||||
threading.currentThread().name = origThreadName
|
threading.currentThread().name = orig_thread_name
|
||||||
|
|
||||||
# if they haven't been added by a different provider than add the proper to the list
|
# if they haven't been added by a different provider than add the proper to the list
|
||||||
for x in curPropers:
|
count = 0
|
||||||
name = _genericName(x.name)
|
np = NameParser(False, try_scene_exceptions=True)
|
||||||
if not name in propers:
|
for x in found_propers:
|
||||||
logger.log(u'Found new proper: ' + x.name, logger.DEBUG)
|
name = _generic_name(x.name)
|
||||||
x.provider = curProvider
|
if name not in propers:
|
||||||
propers[name] = x
|
try:
|
||||||
|
parse_result = np.parse(x.title)
|
||||||
|
if parse_result.series_name and parse_result.episode_numbers and \
|
||||||
|
parse_result.show.indexerid in recent_shows + recent_anime:
|
||||||
|
logger.log(u'Found new proper: ' + x.name, logger.DEBUG)
|
||||||
|
x.show = parse_result.show.indexerid
|
||||||
|
x.provider = cur_provider
|
||||||
|
propers[name] = x
|
||||||
|
count += 1
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cur_provider.log_result('Propers', count, '%s' % cur_provider.name)
|
||||||
|
|
||||||
# take the list of unique propers and get it sorted by
|
# take the list of unique propers and get it sorted by
|
||||||
sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
|
sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
|
||||||
finalPropers = []
|
verified_propers = []
|
||||||
|
|
||||||
for curProper in sortedPropers:
|
for cur_proper in sorted_propers:
|
||||||
|
|
||||||
try:
|
|
||||||
myParser = NameParser(False)
|
|
||||||
parse_result = myParser.parse(curProper.name)
|
|
||||||
except InvalidNameException:
|
|
||||||
logger.log(u'Unable to parse the filename ' + curProper.name + ' into a valid episode', logger.DEBUG)
|
|
||||||
continue
|
|
||||||
except InvalidShowException:
|
|
||||||
logger.log(u'Unable to parse the filename ' + curProper.name + ' into a valid show', logger.DEBUG)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not parse_result.series_name:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not parse_result.episode_numbers:
|
|
||||||
logger.log(
|
|
||||||
u'Ignoring ' + curProper.name + ' because it\'s for a full season rather than specific episode',
|
|
||||||
logger.DEBUG)
|
|
||||||
continue
|
|
||||||
|
|
||||||
logger.log(
|
|
||||||
u'Successful match! Result ' + parse_result.original_name + ' matched to show ' + parse_result.show.name,
|
|
||||||
logger.DEBUG)
|
|
||||||
|
|
||||||
# set the indexerid in the db to the show's indexerid
|
# set the indexerid in the db to the show's indexerid
|
||||||
curProper.indexerid = parse_result.show.indexerid
|
cur_proper.indexerid = parse_result.show.indexerid
|
||||||
|
|
||||||
# set the indexer in the db to the show's indexer
|
# set the indexer in the db to the show's indexer
|
||||||
curProper.indexer = parse_result.show.indexer
|
cur_proper.indexer = parse_result.show.indexer
|
||||||
|
|
||||||
# populate our Proper instance
|
# populate our Proper instance
|
||||||
curProper.season = parse_result.season_number if parse_result.season_number != None else 1
|
cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
|
||||||
curProper.episode = parse_result.episode_numbers[0]
|
cur_proper.episode = parse_result.episode_numbers[0]
|
||||||
curProper.release_group = parse_result.release_group
|
cur_proper.release_group = parse_result.release_group
|
||||||
curProper.version = parse_result.version
|
cur_proper.version = parse_result.version
|
||||||
curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
|
cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
|
||||||
|
|
||||||
# only get anime proper if it has release group and version
|
# only get anime proper if it has release group and version
|
||||||
if parse_result.is_anime:
|
if parse_result.is_anime:
|
||||||
if not curProper.release_group and curProper.version == -1:
|
if not cur_proper.release_group and -1 == cur_proper.version:
|
||||||
logger.log(u'Proper ' + curProper.name + ' doesn\'t have a release group and version, ignoring it',
|
logger.log(u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name,
|
||||||
logger.DEBUG)
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not show_name_helpers.filterBadReleases(curProper.name, parse=False):
|
if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False):
|
||||||
logger.log(u'Proper ' + curProper.name + ' isn\'t a valid scene release that we want, ignoring it',
|
logger.log(u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name,
|
||||||
logger.DEBUG)
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if parse_result.show.rls_ignore_words and search.filter_release_name(curProper.name,
|
re_extras = dict(re_prefix='.*', re_suffix='.*')
|
||||||
parse_result.show.rls_ignore_words):
|
result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_ignore_words, **re_extras)
|
||||||
logger.log(
|
if None is not result and result:
|
||||||
u'Ignoring ' + curProper.name + ' based on ignored words filter: ' + parse_result.show.rls_ignore_words,
|
logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name)
|
||||||
logger.MESSAGE)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if parse_result.show.rls_require_words and not search.filter_release_name(curProper.name,
|
result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_require_words, **re_extras)
|
||||||
parse_result.show.rls_require_words):
|
if None is not result and not result:
|
||||||
logger.log(
|
logger.log(u'Ignored: %s for not containing any required word match' % cur_proper.name)
|
||||||
u'Ignoring ' + curProper.name + ' based on required words filter: ' + parse_result.show.rls_require_words,
|
|
||||||
logger.MESSAGE)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# check if we actually want this proper (if it's the right quality)
|
# check if we actually want this proper (if it's the right quality)
|
||||||
myDB = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
sqlResults = myDB.select('SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
sql_results = my_db.select('SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
||||||
[curProper.indexerid, curProper.season, curProper.episode])
|
[cur_proper.indexerid, cur_proper.season, cur_proper.episode])
|
||||||
if not sqlResults:
|
if not sql_results:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
|
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
|
||||||
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]['status']))
|
old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status']))
|
||||||
if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality:
|
if old_status not in (DOWNLOADED, SNATCHED) or cur_proper.quality != old_quality:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# check if we actually want this proper (if it's the right release group and a higher version)
|
# check if we actually want this proper (if it's the right release group and a higher version)
|
||||||
if parse_result.is_anime:
|
if parse_result.is_anime:
|
||||||
myDB = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
sqlResults = myDB.select(
|
sql_results = my_db.select(
|
||||||
'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
||||||
[curProper.indexerid, curProper.season, curProper.episode])
|
[cur_proper.indexerid, cur_proper.season, cur_proper.episode])
|
||||||
|
|
||||||
oldVersion = int(sqlResults[0]['version'])
|
old_version = int(sql_results[0]['version'])
|
||||||
oldRelease_group = (sqlResults[0]['release_group'])
|
old_release_group = (sql_results[0]['release_group'])
|
||||||
|
|
||||||
if oldVersion > -1 and oldVersion < curProper.version:
|
if -1 < old_version < cur_proper.version:
|
||||||
logger.log('Found new anime v' + str(curProper.version) + ' to replace existing v' + str(oldVersion))
|
logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version))
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if oldRelease_group != curProper.release_group:
|
if cur_proper.release_group != old_release_group:
|
||||||
logger.log('Skipping proper from release group: ' + curProper.release_group + ', does not match existing release group: ' + oldRelease_group)
|
logger.log(u'Skipping proper from release group: %s, does not match existing release group: %s' %
|
||||||
|
(cur_proper.release_group, old_release_group))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
|
# if the show is in our list and there hasn't been a proper already added for that particular episode
|
||||||
if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map(
|
# then add it to our list of propers
|
||||||
operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
|
if cur_proper.indexerid != -1 and (cur_proper.indexerid, cur_proper.season, cur_proper.episode) not in map(
|
||||||
logger.log(u'Found a proper that we need: ' + str(curProper.name))
|
operator.attrgetter('indexerid', 'season', 'episode'), verified_propers):
|
||||||
finalPropers.append(curProper)
|
logger.log(u'Found a proper that may be useful: %s' % cur_proper.name)
|
||||||
|
verified_propers.append(cur_proper)
|
||||||
|
|
||||||
return finalPropers
|
return verified_propers
|
||||||
|
|
||||||
def _downloadPropers(properList):
|
|
||||||
|
|
||||||
for curProper in properList:
|
def _download_propers(proper_list):
|
||||||
|
|
||||||
historyLimit = datetime.datetime.today() - datetime.timedelta(days=30)
|
for cur_proper in proper_list:
|
||||||
|
|
||||||
|
history_limit = datetime.datetime.today() - datetime.timedelta(days=30)
|
||||||
|
|
||||||
# make sure the episode has been downloaded before
|
# make sure the episode has been downloaded before
|
||||||
myDB = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
historyResults = myDB.select(
|
history_results = my_db.select(
|
||||||
'SELECT resource FROM history '
|
'SELECT resource FROM history ' +
|
||||||
'WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? '
|
'WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? ' +
|
||||||
'AND action IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')',
|
'AND action IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')',
|
||||||
[curProper.indexerid, curProper.season, curProper.episode, curProper.quality,
|
[cur_proper.indexerid, cur_proper.season, cur_proper.episode, cur_proper.quality,
|
||||||
historyLimit.strftime(history.dateFormat)])
|
history_limit.strftime(history.dateFormat)])
|
||||||
|
|
||||||
# if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it
|
# if we didn't download this episode in the first place we don't know what quality to use for the proper = skip
|
||||||
if len(historyResults) == 0:
|
if 0 == len(history_results):
|
||||||
logger.log(
|
logger.log(u'Skipping download because cannot find an original history entry for proper ' + cur_proper.name)
|
||||||
u'Unable to find an original history entry for proper ' + curProper.name + ' so I\'m not downloading it.')
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
# get the show object
|
# get the show object
|
||||||
showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid)
|
show_obj = helpers.findCertainShow(sickbeard.showList, cur_proper.indexerid)
|
||||||
if showObj == None:
|
if None is show_obj:
|
||||||
logger.log(u'Unable to find the show with indexerid ' + str(
|
logger.log(u'Unable to find the show with indexerid ' + str(
|
||||||
curProper.indexerid) + ' so unable to download the proper', logger.ERROR)
|
cur_proper.indexerid) + ' so unable to download the proper', logger.ERROR)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# make sure that none of the existing history downloads are the same proper we're trying to download
|
# make sure that none of the existing history downloads are the same proper we're trying to download
|
||||||
clean_proper_name = _genericName(helpers.remove_non_release_groups(curProper.name, showObj.anime))
|
clean_proper_name = _generic_name(helpers.remove_non_release_groups(cur_proper.name, show_obj.is_anime()))
|
||||||
isSame = False
|
is_same = False
|
||||||
for curResult in historyResults:
|
for result in history_results:
|
||||||
# if the result exists in history already we need to skip it
|
# if the result exists in history already we need to skip it
|
||||||
if _genericName(helpers.remove_non_release_groups(curResult['resource'])) == clean_proper_name:
|
if clean_proper_name == _generic_name(helpers.remove_non_release_groups(result['resource'])):
|
||||||
isSame = True
|
is_same = True
|
||||||
break
|
break
|
||||||
if isSame:
|
if is_same:
|
||||||
logger.log(u'This proper is already in history, skipping it', logger.DEBUG)
|
logger.log(u'This proper is already in history, skipping it', logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
ep_obj = show_obj.getEpisode(cur_proper.season, cur_proper.episode)
|
||||||
|
|
||||||
epObj = showObj.getEpisode(curProper.season, curProper.episode)
|
|
||||||
|
|
||||||
# make the result object
|
# make the result object
|
||||||
result = curProper.provider.get_result([epObj], curProper.url)
|
result = cur_proper.provider.get_result([ep_obj], cur_proper.url)
|
||||||
if None is result:
|
if None is result:
|
||||||
continue
|
continue
|
||||||
result.name = curProper.name
|
result.name = cur_proper.name
|
||||||
result.quality = curProper.quality
|
result.quality = cur_proper.quality
|
||||||
result.version = curProper.version
|
result.version = cur_proper.version
|
||||||
|
|
||||||
# snatch it
|
# snatch it
|
||||||
search.snatchEpisode(result, SNATCHED_PROPER)
|
search.snatch_episode(result, SNATCHED_PROPER)
|
||||||
|
|
||||||
def _genericName(name):
|
|
||||||
|
def _recent_history(aired_since_shows, aired_since_anime):
|
||||||
|
|
||||||
|
recent_shows, recent_anime = [], []
|
||||||
|
|
||||||
|
aired_since_shows = aired_since_shows.toordinal()
|
||||||
|
aired_since_anime = aired_since_anime.toordinal()
|
||||||
|
|
||||||
|
my_db = db.DBConnection()
|
||||||
|
sql_results = my_db.select(
|
||||||
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||||
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||||
|
' WHERE e.airdate >= %s' % min(aired_since_shows, aired_since_anime) +
|
||||||
|
' AND (e.status IN (%s))' % ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED])
|
||||||
|
)
|
||||||
|
|
||||||
|
for sqlshow in sql_results:
|
||||||
|
show = helpers.findCertainShow(sickbeard.showList, sqlshow['showid'])
|
||||||
|
if show:
|
||||||
|
if sqlshow['airdate'] >= aired_since_shows and not show.is_anime:
|
||||||
|
sqlshow['showid'] not in recent_shows and recent_shows.append(sqlshow['showid'])
|
||||||
|
else:
|
||||||
|
sqlshow['showid'] not in recent_anime and show.is_anime and recent_anime.append(sqlshow['showid'])
|
||||||
|
|
||||||
|
return recent_shows, recent_anime
|
||||||
|
|
||||||
|
|
||||||
|
def _generic_name(name):
|
||||||
return name.replace('.', ' ').replace('-', ' ').replace('_', ' ').lower()
|
return name.replace('.', ' ').replace('-', ' ').replace('_', ' ').lower()
|
||||||
|
|
||||||
def _set_lastProperSearch(when):
|
|
||||||
|
|
||||||
logger.log(u'Setting the last Proper search in the DB to ' + str(when), logger.DEBUG)
|
def _set_last_proper_search(when):
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
logger.log(u'Setting the last Proper search in the DB to %s' % when, logger.DEBUG)
|
||||||
sqlResults = myDB.select('SELECT * FROM info')
|
|
||||||
|
|
||||||
if len(sqlResults) == 0:
|
my_db = db.DBConnection()
|
||||||
myDB.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)',
|
sql_results = my_db.select('SELECT * FROM info')
|
||||||
[0, 0, str(when)])
|
|
||||||
|
if 0 == len(sql_results):
|
||||||
|
my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)',
|
||||||
|
[0, 0, str(when)])
|
||||||
else:
|
else:
|
||||||
myDB.action('UPDATE info SET last_proper_search=' + str(when))
|
my_db.action('UPDATE info SET last_proper_search=%s' % when)
|
||||||
|
|
||||||
def _get_lastProperSearch():
|
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
def _get_last_proper_search():
|
||||||
sqlResults = myDB.select('SELECT * FROM info')
|
|
||||||
|
my_db = db.DBConnection()
|
||||||
|
sql_results = my_db.select('SELECT * FROM info')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
last_proper_search = datetime.date.fromordinal(int(sqlResults[0]['last_proper_search']))
|
last_proper_search = datetime.date.fromordinal(int(sql_results[0]['last_proper_search']))
|
||||||
except:
|
except:
|
||||||
return datetime.date.fromordinal(1)
|
return datetime.date.fromordinal(1)
|
||||||
|
|
||||||
|
|
|
@ -16,42 +16,56 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
__all__ = ['womble',
|
|
||||||
'btn',
|
|
||||||
'thepiratebay',
|
|
||||||
'kat',
|
|
||||||
'torrentleech',
|
|
||||||
'scc',
|
|
||||||
'torrentday',
|
|
||||||
'hdbits',
|
|
||||||
'iptorrents',
|
|
||||||
'omgwtfnzbs',
|
|
||||||
'speedcd',
|
|
||||||
'nyaatorrents',
|
|
||||||
'torrentbytes',
|
|
||||||
'bitsoup',
|
|
||||||
'tokyotoshokan',
|
|
||||||
'animenzb',
|
|
||||||
'rarbg',
|
|
||||||
'morethan',
|
|
||||||
'alpharatio',
|
|
||||||
'pisexy',
|
|
||||||
'torrentshack',
|
|
||||||
'beyondhd',
|
|
||||||
'gftracker',
|
|
||||||
'transmithe_net',
|
|
||||||
'grabtheinfo',
|
|
||||||
'scenetime',
|
|
||||||
'pretome',
|
|
||||||
'torrenting',
|
|
||||||
'funfile',
|
|
||||||
]
|
|
||||||
|
|
||||||
from os import sys
|
from os import sys
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
|
||||||
|
from . import generic
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
|
# usenet
|
||||||
|
from . import newznab, omgwtfnzbs, womble
|
||||||
|
# torrent
|
||||||
|
from . import alpharatio, beyondhd, bitmetv, bitsoup, btn, freshontv, funfile, gftracker, grabtheinfo, \
|
||||||
|
hdbits, hdspace, iptorrents, kat, morethan, pisexy, pretome, rarbg, scc, scenetime, shazbat, speedcd, strike, \
|
||||||
|
thepiratebay, torrentbytes, torrentday, torrenting, torrentleech, torrentshack, transmithe_net, tvchaosuk
|
||||||
|
# anime
|
||||||
|
from . import nyaatorrents, tokyotoshokan
|
||||||
|
|
||||||
|
__all__ = ['omgwtfnzbs',
|
||||||
|
'womble',
|
||||||
|
'alpharatio',
|
||||||
|
'beyondhd',
|
||||||
|
'bitmetv',
|
||||||
|
'bitsoup',
|
||||||
|
'btn',
|
||||||
|
'freshontv',
|
||||||
|
'funfile',
|
||||||
|
'gftracker',
|
||||||
|
'grabtheinfo',
|
||||||
|
'hdbits',
|
||||||
|
'hdspace',
|
||||||
|
'iptorrents',
|
||||||
|
'kat',
|
||||||
|
'morethan',
|
||||||
|
'pisexy',
|
||||||
|
'pretome',
|
||||||
|
'rarbg',
|
||||||
|
'scc',
|
||||||
|
'scenetime',
|
||||||
|
'shazbat',
|
||||||
|
'speedcd',
|
||||||
|
'strike',
|
||||||
|
'thepiratebay',
|
||||||
|
'torrentbytes',
|
||||||
|
'torrentday',
|
||||||
|
'torrenting',
|
||||||
|
'torrentleech',
|
||||||
|
'torrentshack',
|
||||||
|
'transmithe_net',
|
||||||
|
'tvchaosuk',
|
||||||
|
'nyaatorrents',
|
||||||
|
'tokyotoshokan',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def sortedProviderList():
|
def sortedProviderList():
|
||||||
|
|
|
@ -18,12 +18,12 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,52 +36,37 @@ class AlphaRatioProvider(generic.TorrentProvider):
|
||||||
self.url_base = 'https://alpharatio.cc/'
|
self.url_base = 'https://alpharatio.cc/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'login.php',
|
'login': self.url_base + 'login.php',
|
||||||
'search': self.url_base + 'torrents.php?searchstr=%s'
|
'search': self.url_base + 'torrents.php?searchstr=%s%s&' + '&'.join(
|
||||||
+ '&tags_type=1&order_by=time&order_way=desc'
|
['tags_type=1', 'order_by=time', 'order_way=desc'] +
|
||||||
+ '&filter_cat[1]=1&filter_cat[2]=1&filter_cat[3]=1&filter_cat[4]=1&filter_cat[5]=1'
|
['filter_cat[%s]=1' % c for c in 1, 2, 3, 4, 5] +
|
||||||
+ '&action=basic&searchsubmit=1',
|
['action=basic', 'searchsubmit=1']),
|
||||||
'get': self.url_base + '%s'}
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
|
self.freeleech = False
|
||||||
self.cache = AlphaRatioCache(self)
|
self.cache = AlphaRatioCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'session' in self.session.cookies
|
return super(AlphaRatioProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')),
|
||||||
if logged_in():
|
post_params={'keeplogged': '1', 'login': 'Login'})
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password, 'keeplogged': '1', 'login': 'Login'}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate with %s, abort provider'
|
|
||||||
if response and 'Invalid Username/password' in response:
|
|
||||||
msg = u'Invalid username or password for %s. Check settings'
|
|
||||||
logger.log(msg % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'view', 'get': 'download'}.items())
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'view', 'get': 'download'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
|
search_url = self.urls['search'] % (search_string, ('', '&freetorrent=1')[self.freeleech])
|
||||||
|
|
||||||
if isinstance(search_string, unicode):
|
|
||||||
search_string = unidecode(search_string)
|
|
||||||
|
|
||||||
search_url = self.urls['search'] % search_string
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
|
@ -98,50 +83,44 @@ class AlphaRatioProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
if mode != 'Cache' and (seeders < self.minseed or leechers < self.minleech):
|
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
title = tr.find('a', title=rc['info']).get_text().strip()
|
title = tr.find('a', title=rc['info']).get_text().strip()
|
||||||
|
|
||||||
link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/')
|
link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/')
|
||||||
download_url = self.urls['get'] % link
|
download_url = self.urls['get'] % link
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class AlphaRatioCache(tvcache.TVCache):
|
class AlphaRatioCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
|
||||||
|
|
||||||
provider = AlphaRatioProvider()
|
provider = AlphaRatioProvider()
|
||||||
|
|
|
@ -1,115 +0,0 @@
|
||||||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
||||||
# URL: http://code.google.com/p/sickbeard/
|
|
||||||
#
|
|
||||||
# This file is part of Sick Beard.
|
|
||||||
#
|
|
||||||
# Sick Beard is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Sick Beard is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import urllib
|
|
||||||
|
|
||||||
from . import generic
|
|
||||||
from sickbeard import classes, show_name_helpers, logger, tvcache
|
|
||||||
|
|
||||||
|
|
||||||
class AnimeNZBProvider(generic.NZBProvider):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
generic.NZBProvider.__init__(self, 'AnimeNZB', anime_only=True)
|
|
||||||
|
|
||||||
self.url = 'http://animenzb.com/'
|
|
||||||
|
|
||||||
self.cache = AnimeNZBCache(self)
|
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj):
|
|
||||||
|
|
||||||
return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
|
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
|
||||||
|
|
||||||
return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
|
|
||||||
|
|
||||||
def _do_search(self, search_string, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
|
||||||
if self.show and not self.show.is_anime:
|
|
||||||
return results
|
|
||||||
|
|
||||||
params = {'cat': 'anime',
|
|
||||||
'q': search_string.encode('utf-8'),
|
|
||||||
'max': '100'}
|
|
||||||
|
|
||||||
search_url = self.url + 'rss?' + urllib.urlencode(params)
|
|
||||||
|
|
||||||
logger.log(u'Search url: %s' % search_url, logger.DEBUG)
|
|
||||||
|
|
||||||
data = self.cache.getRSSFeed(search_url)
|
|
||||||
if data and 'entries' in data:
|
|
||||||
|
|
||||||
items = data.entries
|
|
||||||
for curItem in items:
|
|
||||||
(title, url) = self._get_title_and_url(curItem)
|
|
||||||
|
|
||||||
if title and url:
|
|
||||||
results.append(curItem)
|
|
||||||
else:
|
|
||||||
logger.log(u'The data returned from %s is incomplete, this result is unusable' % self.name,
|
|
||||||
logger.DEBUG)
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
def find_propers(self, date=None):
|
|
||||||
|
|
||||||
results = []
|
|
||||||
for item in self._do_search('v2|v3|v4|v5'):
|
|
||||||
|
|
||||||
(title, url) = self._get_title_and_url(item)
|
|
||||||
|
|
||||||
if 'published_parsed' in item and item['published_parsed']:
|
|
||||||
result_date = item.published_parsed
|
|
||||||
if result_date:
|
|
||||||
result_date = datetime.datetime(*result_date[0:6])
|
|
||||||
else:
|
|
||||||
logger.log(u'Unable to figure out the date for entry %s, skipping it' % title)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not date or result_date > date:
|
|
||||||
search_result = classes.Proper(title, url, result_date, self.show)
|
|
||||||
results.append(search_result)
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
class AnimeNZBCache(tvcache.TVCache):
|
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
|
||||||
|
|
||||||
def _getRSSData(self):
|
|
||||||
|
|
||||||
params = {'cat': 'anime'.encode('utf-8'),
|
|
||||||
'max': '100'.encode('utf-8')}
|
|
||||||
|
|
||||||
rss_url = self.provider.url + 'rss?' + urllib.urlencode(params)
|
|
||||||
logger.log(u'%s cache update URL: %s' % (self.provider.name, rss_url), logger.DEBUG)
|
|
||||||
|
|
||||||
data = self.getRSSFeed(rss_url)
|
|
||||||
if data and 'entries' in data:
|
|
||||||
return data.entries
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
provider = AnimeNZBProvider()
|
|
|
@ -16,7 +16,6 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
|
@ -32,13 +31,12 @@ class BeyondHDProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
self.url_base = 'https://beyondhd.me/'
|
self.url_base = 'https://beyondhd.me/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'cache': self.url_base + 'api_tv.php?passkey=%s&cats=%s',
|
'browse': self.url_base + 'api_tv.php?passkey=%s&cats=%s',
|
||||||
'search': '&search=%s',
|
'search': '&search=%s'}
|
||||||
}
|
|
||||||
|
|
||||||
self.categories = {'Season': '89',
|
self.categories = {'Season': '89',
|
||||||
'Episode': '40,44,48,46,43,45',
|
'Episode': '40,44,48,43,45',
|
||||||
'Cache': '40,44,48,89,46,43,45'}
|
'Cache': '40,44,48,89,43,45'}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
|
@ -53,54 +51,54 @@ class BeyondHDProvider(generic.TorrentProvider):
|
||||||
logger.log(u'Incorrect authentication credentials for %s : %s' % (self.name, data_json['error']), logger.DEBUG)
|
logger.log(u'Incorrect authentication credentials for %s : %s' % (self.name, data_json['error']), logger.DEBUG)
|
||||||
raise AuthException('Authentication credentials for %s are incorrect, check your config' % self.name)
|
raise AuthException('Authentication credentials for %s are incorrect, check your config' % self.name)
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._check_auth():
|
if not self._check_auth():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
if 'Cache' != mode:
|
if mode in ['Season', 'Episode']:
|
||||||
show_type = self.show.air_by_date and 'Air By Date' \
|
show_type = self.show.air_by_date and 'Air By Date' \
|
||||||
or self.show.is_sports and 'Sports' or self.show.is_anime and 'Anime' or None
|
or self.show.is_sports and 'Sports' or self.show.is_anime and 'Anime' or None
|
||||||
if show_type:
|
if show_type:
|
||||||
logger.log(u'Provider does not carry shows of type: [%s], skipping' % show_type, logger.DEBUG)
|
logger.log(u'Provider does not carry shows of type: [%s], skipping' % show_type, logger.DEBUG)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
mode_cats = (mode, 'Cache')['Propers' == mode]
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
if isinstance(search_string, unicode):
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
search_string = unidecode(search_string)
|
search_url = self.urls['browse'] % (self.passkey, self.categories[mode_cats])
|
||||||
|
|
||||||
search_url = self.urls['cache'] % (self.passkey, self.categories[mode])
|
|
||||||
if 'Cache' != mode:
|
if 'Cache' != mode:
|
||||||
search_url += self.urls['search'] % re.sub('[\.\s]+', ' ', search_string)
|
search_url += self.urls['search'] % re.sub('[\.\s]+', ' ', search_string)
|
||||||
|
|
||||||
data_json = self.get_url(search_url, json=True)
|
data_json = self.get_url(search_url, json=True)
|
||||||
|
|
||||||
cnt = len(results)
|
cnt = len(items[mode])
|
||||||
if data_json and 'results' in data_json and self._check_auth_from_data(data_json):
|
if data_json and 'results' in data_json and self._check_auth_from_data(data_json):
|
||||||
for item in data_json['results']:
|
for item in data_json['results']:
|
||||||
|
|
||||||
seeders, leechers = item['seeders'], item['leechers']
|
seeders, leechers = item.get('seeders', 0), item.get('leechers', 0)
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
title, download_url = item['file'], item['get']
|
title, download_url = item.get('file'), item.get('get')
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
results.append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(item.get('size'))))
|
||||||
|
|
||||||
self._log_result(mode, len(results) - cnt, search_url)
|
|
||||||
time.sleep(1.1)
|
time.sleep(1.1)
|
||||||
# Sort items by seeders
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
results.sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, scene=False, use_or=False)
|
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date, ['proper', 'repack'])
|
|
||||||
|
|
||||||
|
|
||||||
class BeyondHDCache(tvcache.TVCache):
|
class BeyondHDCache(tvcache.TVCache):
|
||||||
|
@ -108,11 +106,9 @@ class BeyondHDCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 10 # cache update frequency
|
def _cache_data(self):
|
||||||
|
|
||||||
def _getRSSData(self):
|
return self.provider.cache_data()
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
|
||||||
|
|
||||||
|
|
||||||
provider = BeyondHDProvider()
|
provider = BeyondHDProvider()
|
||||||
|
|
131
sickbeard/providers/bitmetv.py
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
# coding=utf-8
|
||||||
|
#
|
||||||
|
# This file is part of SickGear.
|
||||||
|
#
|
||||||
|
# SickGear is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# SickGear is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from . import generic
|
||||||
|
from sickbeard import logger, tvcache
|
||||||
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
class BitmetvProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
generic.TorrentProvider.__init__(self, 'BitMeTV')
|
||||||
|
|
||||||
|
self.url_base = 'http://www.bitmetv.org/'
|
||||||
|
|
||||||
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
|
'login': self.url_base + 'links.php',
|
||||||
|
'search': self.url_base + 'browse.php?%s&search=%s',
|
||||||
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
|
self.categories = {'shows': 0, 'anime': 86} # exclusively one cat per key
|
||||||
|
|
||||||
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
|
self.digest, self.minseed, self.minleech = 3 * [None]
|
||||||
|
self.cache = BitmetvCache(self)
|
||||||
|
|
||||||
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
|
return super(BitmetvProvider, self)._authorised(
|
||||||
|
logged_in=(lambda x=None: (None is x or 'Other Links' in x) and self.has_all_cookies() and
|
||||||
|
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
|
||||||
|
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
|
||||||
|
|
||||||
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if not self._authorised():
|
||||||
|
return results
|
||||||
|
|
||||||
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
||||||
|
for mode in search_params.keys():
|
||||||
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
|
category = 'cat=%s' % self.categories[
|
||||||
|
(mode in ['Season', 'Episode'] and self.show and self.show.is_anime) and 'anime' or 'shows']
|
||||||
|
search_url = self.urls['search'] % (category, search_string)
|
||||||
|
|
||||||
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
|
cnt = len(items[mode])
|
||||||
|
try:
|
||||||
|
if not html or self._has_no_results(html):
|
||||||
|
raise generic.HaltParseException
|
||||||
|
|
||||||
|
with BS4Parser(html, features=['html5lib', 'permissive'], attr='cellpadding="5"') as soup:
|
||||||
|
torrent_table = soup.find('table', attrs={'cellpadding': 5})
|
||||||
|
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||||
|
|
||||||
|
if 2 > len(torrent_rows):
|
||||||
|
raise generic.HaltParseException
|
||||||
|
|
||||||
|
for tr in torrent_rows[1:]:
|
||||||
|
try:
|
||||||
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
|
(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
|
continue
|
||||||
|
|
||||||
|
info = tr.find('a', href=rc['info'])
|
||||||
|
title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
|
||||||
|
download_url = self.urls['get'] % tr.find('a', href=rc['get']).get('href')
|
||||||
|
except (AttributeError, TypeError, ValueError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if title and download_url:
|
||||||
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
|
except generic.HaltParseException:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def ui_string(key):
|
||||||
|
|
||||||
|
return 'bitmetv_digest' == key and 'use... \'uid=xx; pass=yy\'' or ''
|
||||||
|
|
||||||
|
|
||||||
|
class BitmetvCache(tvcache.TVCache):
|
||||||
|
|
||||||
|
def __init__(self, this_provider):
|
||||||
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
|
self.update_freq = 7 # cache update frequency
|
||||||
|
|
||||||
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
|
provider = BitmetvProvider()
|
|
@ -16,12 +16,12 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,51 +33,30 @@ class BitSoupProvider(generic.TorrentProvider):
|
||||||
self.url_base = 'https://www.bitsoup.me/'
|
self.url_base = 'https://www.bitsoup.me/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'takelogin.php',
|
'login': self.url_base + 'takelogin.php',
|
||||||
'search': self.url_base + 'browse.php?search=%s%s',
|
'search': self.url_base + 'browse.php?search=%s&%s&incldead=0&blah=0',
|
||||||
'get': self.url_base + '%s'}
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
self.categories = '&c42=1&c45=1&c49=1&c7=1&incldead=0&blah=0'
|
self.categories = {'shows': [42, 45, 49, 32, 7], 'anime': [23]}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.cache = BitSoupCache(self)
|
self.cache = BitSoupCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
|
|
||||||
if logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
|
||||||
login_params = {'username': self.username, 'password': self.password, 'ssl': 'yes'}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate with %s, abort provider'
|
|
||||||
if response and 'Username or password incorrect' in response:
|
|
||||||
msg = u'Invalid username or password for %s. Check settings'
|
|
||||||
logger.log(msg % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
|
search_url = self.urls['search'] % (search_string, self._categories_string(mode))
|
||||||
|
|
||||||
if isinstance(search_string, unicode):
|
|
||||||
search_string = unidecode(search_string)
|
|
||||||
|
|
||||||
search_url = self.urls['search'] % (search_string, self.categories)
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
|
@ -85,7 +64,7 @@ class BitSoupProvider(generic.TorrentProvider):
|
||||||
if not html or self._has_no_results(html):
|
if not html or self._has_no_results(html):
|
||||||
raise generic.HaltParseException
|
raise generic.HaltParseException
|
||||||
|
|
||||||
with BS4Parser(html, 'html.parser') as soup:
|
with BS4Parser(html, 'html.parser', attr='class="koptekst"') as soup:
|
||||||
torrent_table = soup.find('table', attrs={'class': 'koptekst'})
|
torrent_table = soup.find('table', attrs={'class': 'koptekst'})
|
||||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||||
|
|
||||||
|
@ -94,40 +73,36 @@ class BitSoupProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2)]
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('a', href=rc['info'])
|
info = tr.find('a', href=rc['info'])
|
||||||
title = info.get_text().strip()
|
title = info.get_text().strip()
|
||||||
|
|
||||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
# for each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, sep_date='|', use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class BitSoupCache(tvcache.TVCache):
|
class BitSoupCache(tvcache.TVCache):
|
||||||
|
@ -135,11 +110,11 @@ class BitSoupCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = BitSoupProvider()
|
provider = BitSoupProvider()
|
||||||
|
|
|
@ -15,16 +15,19 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import datetime
|
|
||||||
import time
|
|
||||||
import math
|
import math
|
||||||
import socket
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import classes, scene_exceptions, logger, tvcache
|
from sickbeard import helpers, logger, scene_exceptions, tvcache
|
||||||
from sickbeard.helpers import sanitizeSceneName
|
from sickbeard.helpers import tryInt
|
||||||
from sickbeard.exceptions import ex, AuthException
|
|
||||||
from lib import jsonrpclib
|
try:
|
||||||
|
import json
|
||||||
|
except ImportError:
|
||||||
|
from lib import simplejson as json
|
||||||
|
import random
|
||||||
|
|
||||||
|
|
||||||
class BTNProvider(generic.TorrentProvider):
|
class BTNProvider(generic.TorrentProvider):
|
||||||
|
@ -33,110 +36,101 @@ class BTNProvider(generic.TorrentProvider):
|
||||||
generic.TorrentProvider.__init__(self, 'BTN')
|
generic.TorrentProvider.__init__(self, 'BTN')
|
||||||
|
|
||||||
self.url_base = 'https://broadcasthe.net'
|
self.url_base = 'https://broadcasthe.net'
|
||||||
self.url_api = 'http://api.btnapps.net'
|
self.url_api = 'https://api.btnapps.net'
|
||||||
|
|
||||||
|
self.proper_search_terms = ['%.proper.%', '%.repack.%']
|
||||||
self.url = self.url_base
|
self.url = self.url_base
|
||||||
|
|
||||||
self.api_key = None
|
self.api_key, self.minseed, self.minleech = 3 * [None]
|
||||||
|
self.reject_m2ts = False
|
||||||
|
self.session.headers = {'Content-Type': 'application/json-rpc'}
|
||||||
self.cache = BTNCache(self)
|
self.cache = BTNCache(self)
|
||||||
|
|
||||||
def _check_auth_from_data(self, data_json):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
if data_json is None:
|
return self._check_auth()
|
||||||
return self._check_auth()
|
|
||||||
|
|
||||||
if 'api-error' not in data_json:
|
def _search_provider(self, search_params, age=0, **kwargs):
|
||||||
return True
|
|
||||||
|
|
||||||
logger.log(u'Incorrect authentication credentials for %s : %s' % (self.name, data_json['api-error']),
|
|
||||||
logger.DEBUG)
|
|
||||||
raise AuthException('Your authentication credentials for %s are incorrect, check your config.' % self.name)
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
self._check_auth()
|
self._check_auth()
|
||||||
|
|
||||||
params = {}
|
|
||||||
|
|
||||||
if search_params:
|
|
||||||
params.update(search_params)
|
|
||||||
|
|
||||||
if age:
|
|
||||||
params['age'] = '<=%i' % age # age in seconds
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
data_json = self._api_call(params)
|
for mode in search_params.keys():
|
||||||
if not (data_json and self._check_auth_from_data(data_json)):
|
for search_param in search_params[mode]:
|
||||||
self._log_result('rpc search', 0, self.name)
|
|
||||||
else:
|
|
||||||
|
|
||||||
found_torrents = {} if 'torrents' not in data_json else data_json['torrents']
|
params = {}
|
||||||
|
if 'Propers' == mode:
|
||||||
|
params.update({'release': search_param})
|
||||||
|
age = 4 * 24 * 60 * 60
|
||||||
|
else:
|
||||||
|
search_param and params.update(search_param)
|
||||||
|
age and params.update(dict(age='<=%i' % age)) # age in seconds
|
||||||
|
|
||||||
# We got something, we know the API sends max 1000 results at a time.
|
json_rpc = (lambda param_dct, items_per_page=1000, offset=0:
|
||||||
# See if there are more than 1000 results for our query, if not we
|
'{"jsonrpc": "2.0", "id": "%s", "method": "getTorrents", "params": ["%s", %s, %s, %s]}' %
|
||||||
# keep requesting until we've got everything.
|
(''.join(random.sample('abcdefghijklmnopqrstuvwxyz0123456789', 8)),
|
||||||
# max 150 requests per hour so limit at that. Scan every 15 minutes. 60 / 15 = 4.
|
self.api_key, json.dumps(param_dct), items_per_page, offset))
|
||||||
max_pages = 150
|
|
||||||
results_per_page = 1000
|
|
||||||
|
|
||||||
if 'results' in data_json and int(data_json['results']) >= results_per_page:
|
try:
|
||||||
pages_needed = int(math.ceil(int(data_json['results']) / results_per_page))
|
response = helpers.getURL(self.url_api, post_data=json_rpc(params), session=self.session, json=True)
|
||||||
if pages_needed > max_pages:
|
error_text = response['error']['message']
|
||||||
pages_needed = max_pages
|
logger.log(('Call Limit' in error_text and u'Action aborted because the %(prov)s 150 calls/hr limit was reached' or
|
||||||
|
u'Action prematurely ended. %(prov)s server error response = %(desc)s') % {'prov': self.name, 'desc': error_text}, logger.WARNING)
|
||||||
|
return results
|
||||||
|
except:
|
||||||
|
data_json = response and 'result' in response and response['result'] or {}
|
||||||
|
|
||||||
# +1 because range(1,4) = 1, 2, 3
|
if data_json:
|
||||||
for page in range(1, pages_needed + 1):
|
|
||||||
data_json = self._api_call(params, results_per_page, page * results_per_page)
|
|
||||||
# Note that this these are individual requests and might time out individually. This would result in 'gaps'
|
|
||||||
# in the results. There is no way to fix this though.
|
|
||||||
if 'torrents' in data_json:
|
|
||||||
found_torrents.update(data_json['torrents'])
|
|
||||||
|
|
||||||
cnt = len(results)
|
found_torrents = {} if 'torrents' not in data_json else data_json['torrents']
|
||||||
for torrentid, torrent_info in found_torrents.iteritems():
|
|
||||||
title, url = self._get_title_and_url(torrent_info)
|
# We got something, we know the API sends max 1000 results at a time.
|
||||||
if title and url:
|
# See if there are more than 1000 results for our query, if not we
|
||||||
results.append(torrent_info)
|
# keep requesting until we've got everything.
|
||||||
self._log_result('search', len(results) - cnt, self.name + ' JSON-RPC API')
|
# max 150 requests per hour so limit at that. Scan every 15 minutes. 60 / 15 = 4.
|
||||||
|
max_pages = 5 # 150 was the old value and impractical
|
||||||
|
results_per_page = 1000
|
||||||
|
|
||||||
|
if 'results' in data_json and int(data_json['results']) >= results_per_page:
|
||||||
|
pages_needed = int(math.ceil(int(data_json['results']) / results_per_page))
|
||||||
|
if pages_needed > max_pages:
|
||||||
|
pages_needed = max_pages
|
||||||
|
|
||||||
|
# +1 because range(1,4) = 1, 2, 3
|
||||||
|
for page in range(1, pages_needed + 1):
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = helpers.getURL(self.url_api, json=True, session=self.session,
|
||||||
|
post_data=json_rpc(params, results_per_page, page * results_per_page))
|
||||||
|
error_text = response['error']['message']
|
||||||
|
logger.log(('Call Limit' in error_text and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached' or
|
||||||
|
u'Action prematurely ended. %(prov)s server error response = %(desc)s') % {'prov': self.name, 'desc': error_text}, logger.WARNING)
|
||||||
|
return results
|
||||||
|
except:
|
||||||
|
data_json = response and 'result' in response and response['result'] or {}
|
||||||
|
|
||||||
|
# Note that this these are individual requests and might time out individually. This would result in 'gaps'
|
||||||
|
# in the results. There is no way to fix this though.
|
||||||
|
if 'torrents' in data_json:
|
||||||
|
found_torrents.update(data_json['torrents'])
|
||||||
|
|
||||||
|
cnt = len(results)
|
||||||
|
for torrentid, torrent_info in found_torrents.iteritems():
|
||||||
|
seeders, leechers = [tryInt(n) for n in torrent_info.get('Seeders'), torrent_info.get('Leechers')]
|
||||||
|
if self._peers_fail(mode, seeders, leechers) or \
|
||||||
|
self.reject_m2ts and re.match(r'(?i)m2?ts', torrent_info.get('Container', '')):
|
||||||
|
continue
|
||||||
|
|
||||||
|
title, url = self._title_and_url(torrent_info)
|
||||||
|
if title and url:
|
||||||
|
results.append(torrent_info)
|
||||||
|
|
||||||
|
self._log_search(mode, len(results) - cnt, self.name)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _api_call(self, params=None, results_per_page=1000, offset=0):
|
def _title_and_url(self, data_json):
|
||||||
|
|
||||||
if None is params:
|
|
||||||
params = {}
|
|
||||||
|
|
||||||
logger.log(u'Searching with parameters: ' + str(params), logger.DEBUG)
|
|
||||||
|
|
||||||
parsed_json = {}
|
|
||||||
server = jsonrpclib.Server(self.url_api)
|
|
||||||
try:
|
|
||||||
parsed_json = server.getTorrents(self.api_key, params, int(results_per_page), int(offset))
|
|
||||||
|
|
||||||
except jsonrpclib.jsonrpc.ProtocolError as error:
|
|
||||||
if 'Call Limit' in error.message:
|
|
||||||
logger.log(u'Request ignored because the %s 150 calls/hr limit was reached' % self.name, logger.WARNING)
|
|
||||||
else:
|
|
||||||
logger.log(u'JSON-RPC protocol error while accessing %s: %s' % (self.name, ex(error)), logger.ERROR)
|
|
||||||
return {'api-error': ex(error)}
|
|
||||||
|
|
||||||
except socket.timeout:
|
|
||||||
logger.log(u'Timeout while accessing ' + self.name, logger.WARNING)
|
|
||||||
|
|
||||||
except socket.error as error:
|
|
||||||
# timeouts are sometimes thrown as socket errors
|
|
||||||
logger.log(u'Socket error while accessing %s: %s' % (self.name, error[1]), logger.ERROR)
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
errorstring = str(error)
|
|
||||||
if errorstring.startswith('<') and errorstring.endswith('>'):
|
|
||||||
errorstring = errorstring[1:-1]
|
|
||||||
logger.log(u'Error while accessing %s: %s' % (self.name, errorstring), logger.ERROR)
|
|
||||||
|
|
||||||
return parsed_json
|
|
||||||
|
|
||||||
def _get_title_and_url(self, data_json):
|
|
||||||
|
|
||||||
# The BTN API gives a lot of information in response,
|
# The BTN API gives a lot of information in response,
|
||||||
# however SickGear is built mostly around Scene or
|
# however SickGear is built mostly around Scene or
|
||||||
|
@ -165,45 +159,44 @@ class BTNProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
return title, url
|
return title, url
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj, **kwargs):
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
search_params = []
|
search_params = []
|
||||||
current_params = {'category': 'Season'}
|
base_params = {'category': 'Season'}
|
||||||
|
|
||||||
# Search for entire seasons: no need to do special things for air by date or sports shows
|
# Search for entire seasons: no need to do special things for air by date or sports shows
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
||||||
# Search for the year of the air by date show
|
# Search for the year of the air by date show
|
||||||
current_params['name'] = str(ep_obj.airdate).split('-')[0]
|
base_params['name'] = str(ep_obj.airdate).split('-')[0]
|
||||||
elif ep_obj.show.is_anime:
|
elif ep_obj.show.is_anime:
|
||||||
current_params['name'] = '%s' % ep_obj.scene_absolute_number
|
base_params['name'] = '%s' % ep_obj.scene_absolute_number
|
||||||
else:
|
else:
|
||||||
current_params['name'] = 'Season %s' % (ep_obj.season, ep_obj.scene_season)[bool(ep_obj.show.is_scene)]
|
base_params['name'] = 'Season %s' % (ep_obj.season, ep_obj.scene_season)[bool(ep_obj.show.is_scene)]
|
||||||
|
|
||||||
# search
|
|
||||||
if 1 == ep_obj.show.indexer:
|
if 1 == ep_obj.show.indexer:
|
||||||
current_params['tvdb'] = ep_obj.show.indexerid
|
base_params['tvdb'] = ep_obj.show.indexerid
|
||||||
search_params.append(current_params)
|
search_params.append(base_params)
|
||||||
elif 2 == ep_obj.show.indexer:
|
# elif 2 == ep_obj.show.indexer:
|
||||||
current_params['tvrage'] = ep_obj.show.indexerid
|
# current_params['tvrage'] = ep_obj.show.indexerid
|
||||||
search_params.append(current_params)
|
# search_params.append(current_params)
|
||||||
else:
|
else:
|
||||||
name_exceptions = list(
|
name_exceptions = list(
|
||||||
set([sanitizeSceneName(a) for a in scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
set([helpers.sanitizeSceneName(a) for a in
|
||||||
|
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
||||||
for name in name_exceptions:
|
for name in name_exceptions:
|
||||||
# Search by name if we don't have tvdb or tvrage id
|
series_param = {'series': name}
|
||||||
cur_return = current_params.copy()
|
series_param.update(base_params)
|
||||||
cur_return['series'] = name
|
search_params.append(series_param)
|
||||||
search_params.append(cur_return)
|
|
||||||
|
|
||||||
return search_params
|
return [dict({'Season': search_params})]
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
if not ep_obj:
|
if not ep_obj:
|
||||||
return [{}]
|
return [{}]
|
||||||
|
|
||||||
to_return = []
|
search_params = []
|
||||||
search_params = {'category': 'Episode'}
|
base_params = {'category': 'Episode'}
|
||||||
|
|
||||||
# episode
|
# episode
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
||||||
|
@ -211,54 +204,35 @@ class BTNProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
# BTN uses dots in dates, we just search for the date since that
|
# BTN uses dots in dates, we just search for the date since that
|
||||||
# combined with the series identifier should result in just one episode
|
# combined with the series identifier should result in just one episode
|
||||||
search_params['name'] = date_str.replace('-', '.')
|
base_params['name'] = date_str.replace('-', '.')
|
||||||
elif ep_obj.show.is_anime:
|
elif ep_obj.show.is_anime:
|
||||||
search_params['name'] = '%s' % ep_obj.scene_absolute_number
|
base_params['name'] = '%s' % ep_obj.scene_absolute_number
|
||||||
else:
|
else:
|
||||||
# Do a general name search for the episode, formatted like SXXEYY
|
# Do a general name search for the episode, formatted like SXXEYY
|
||||||
season, episode = ((ep_obj.season, ep_obj.episode),
|
season, episode = ((ep_obj.season, ep_obj.episode),
|
||||||
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
||||||
search_params['name'] = 'S%02dE%02d' % (season, episode)
|
base_params['name'] = 'S%02dE%02d' % (season, episode)
|
||||||
|
|
||||||
# search
|
# search
|
||||||
if 1 == ep_obj.show.indexer:
|
if 1 == ep_obj.show.indexer:
|
||||||
search_params['tvdb'] = ep_obj.show.indexerid
|
base_params['tvdb'] = ep_obj.show.indexerid
|
||||||
to_return.append(search_params)
|
search_params.append(base_params)
|
||||||
elif 2 == ep_obj.show.indexer:
|
# elif 2 == ep_obj.show.indexer:
|
||||||
search_params['tvrage'] = ep_obj.show.indexerid
|
# search_params['tvrage'] = ep_obj.show.indexerid
|
||||||
to_return.append(search_params)
|
# to_return.append(search_params)
|
||||||
else:
|
else:
|
||||||
# add new query string for every exception
|
# add new query string for every exception
|
||||||
name_exceptions = list(
|
name_exceptions = list(
|
||||||
set([sanitizeSceneName(a) for a in scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
set([helpers.sanitizeSceneName(a) for a in
|
||||||
for cur_exception in name_exceptions:
|
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
||||||
cur_return = search_params.copy()
|
for name in name_exceptions:
|
||||||
cur_return['series'] = cur_exception
|
series_param = {'series': name}
|
||||||
to_return.append(cur_return)
|
series_param.update(base_params)
|
||||||
|
search_params.append(series_param)
|
||||||
|
|
||||||
return to_return
|
return [dict({'Episode': search_params})]
|
||||||
|
|
||||||
def find_propers(self, search_date=None):
|
def cache_data(self, **kwargs):
|
||||||
|
|
||||||
results = []
|
|
||||||
|
|
||||||
search_terms = ['%.proper.%', '%.repack.%']
|
|
||||||
|
|
||||||
for term in search_terms:
|
|
||||||
for item in self._do_search({'release': term}, age=4 * 24 * 60 * 60):
|
|
||||||
if item['Time']:
|
|
||||||
try:
|
|
||||||
result_date = datetime.datetime.fromtimestamp(float(item['Time']))
|
|
||||||
except TypeError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not search_date or result_date > search_date:
|
|
||||||
title, url = self._get_title_and_url(item)
|
|
||||||
results.append(classes.Proper(title, url, result_date, self.show))
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
def get_cache_data(self, **kwargs):
|
|
||||||
|
|
||||||
# Get the torrents uploaded since last check.
|
# Get the torrents uploaded since last check.
|
||||||
seconds_since_last_update = int(math.ceil(time.time() - time.mktime(kwargs['age'])))
|
seconds_since_last_update = int(math.ceil(time.time() - time.mktime(kwargs['age'])))
|
||||||
|
@ -275,7 +249,7 @@ class BTNProvider(generic.TorrentProvider):
|
||||||
% self.name, logger.WARNING)
|
% self.name, logger.WARNING)
|
||||||
seconds_since_last_update = 86400
|
seconds_since_last_update = 86400
|
||||||
|
|
||||||
return self._do_search(search_params=None, age=seconds_since_last_update)
|
return self._search_provider(dict({'Cache': ['']}), age=seconds_since_last_update)
|
||||||
|
|
||||||
|
|
||||||
class BTNCache(tvcache.TVCache):
|
class BTNCache(tvcache.TVCache):
|
||||||
|
@ -283,11 +257,11 @@ class BTNCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 15 # cache update frequency
|
self.update_freq = 15 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data(age=self._getLastUpdate().timetuple(), min_time=self.minTime)
|
return self.provider.cache_data(age=self._getLastUpdate().timetuple(), min_time=self.update_freq)
|
||||||
|
|
||||||
|
|
||||||
provider = BTNProvider()
|
provider = BTNProvider()
|
||||||
|
|
137
sickbeard/providers/freshontv.py
Normal file
|
@ -0,0 +1,137 @@
|
||||||
|
# coding=utf-8
|
||||||
|
#
|
||||||
|
# This file is part of SickGear.
|
||||||
|
#
|
||||||
|
# SickGear is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# SickGear is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from . import generic
|
||||||
|
from sickbeard import logger, tvcache
|
||||||
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
class FreshOnTVProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
generic.TorrentProvider.__init__(self, 'FreshOnTV')
|
||||||
|
|
||||||
|
self.url_base = 'https://freshon.tv/'
|
||||||
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
|
'login': self.url_base + 'login.php?action=makelogin',
|
||||||
|
'search': self.url_base + 'browse.php?incldead=%s&words=0&cat=0&search=%s',
|
||||||
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
|
self.freeleech = False
|
||||||
|
self.cache = FreshOnTVCache(self)
|
||||||
|
|
||||||
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
|
return super(FreshOnTVProvider, self)._authorised(
|
||||||
|
post_params={'login': 'Do it!'},
|
||||||
|
failed_msg=(lambda x=None: 'DDoS protection by CloudFlare' in x and
|
||||||
|
u'Unable to login to %s due to CloudFlare DDoS javascript check' or
|
||||||
|
'Username does not exist' in x and
|
||||||
|
u'Invalid username or password for %s. Check settings' or
|
||||||
|
u'Failed to authenticate or parse a response from %s, abort provider'))
|
||||||
|
|
||||||
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if not self._authorised():
|
||||||
|
return results
|
||||||
|
|
||||||
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
freeleech = (0, 3)[self.freeleech]
|
||||||
|
|
||||||
|
rc = dict((k, re.compile('(?i)' + v))
|
||||||
|
for (k, v) in {'info': 'detail', 'get': 'download', 'name': '_name'}.items())
|
||||||
|
for mode in search_params.keys():
|
||||||
|
for search_string in search_params[mode]:
|
||||||
|
|
||||||
|
search_string, search_url = self._title_and_url((
|
||||||
|
isinstance(search_string, unicode) and unidecode(search_string) or search_string,
|
||||||
|
self.urls['search'] % (freeleech, search_string)))
|
||||||
|
|
||||||
|
# returns top 15 results by default, expandable in user profile to 100
|
||||||
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
|
cnt = len(items[mode])
|
||||||
|
try:
|
||||||
|
if not html or self._has_no_results(html):
|
||||||
|
raise generic.HaltParseException
|
||||||
|
|
||||||
|
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||||
|
torrent_table = soup.find('table', attrs={'class': 'frame'})
|
||||||
|
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||||
|
|
||||||
|
if 2 > len(torrent_rows):
|
||||||
|
raise generic.HaltParseException
|
||||||
|
|
||||||
|
for tr in torrent_rows[1:]:
|
||||||
|
try:
|
||||||
|
if tr.find('img', alt='Nuked'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
|
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
|
continue
|
||||||
|
|
||||||
|
info = tr.find('a', href=rc['info'], attrs={'class': rc['name']})
|
||||||
|
title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
|
||||||
|
|
||||||
|
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||||
|
except (AttributeError, TypeError, ValueError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if title and download_url:
|
||||||
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
|
except generic.HaltParseException:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _get_episode_search_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
|
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class FreshOnTVCache(tvcache.TVCache):
|
||||||
|
|
||||||
|
def __init__(self, this_provider):
|
||||||
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
|
self.update_freq = 20
|
||||||
|
|
||||||
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
|
provider = FreshOnTVProvider()
|
|
@ -16,12 +16,12 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,51 +33,39 @@ class FunFileProvider(generic.TorrentProvider):
|
||||||
self.url_base = 'https://www.funfile.org/'
|
self.url_base = 'https://www.funfile.org/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'takelogin.php',
|
'login': self.url_base + 'takelogin.php',
|
||||||
'search': self.url_base + 'browse.php?%ssearch=%s',
|
'search': self.url_base + 'browse.php?%s&search=%s&incldead=0&showspam=1&',
|
||||||
'get': self.url_base + '%s'}
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
self.categories = 'cat=7&incldead=0&s_title=1&showspam=1&'
|
self.categories = {'shows': [7], 'anime': [44]}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
self.url_timeout = 90
|
self.url_timeout = 90
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.cache = FunFileCache(self)
|
self.cache = FunFileCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: None is not self.session.cookies.get('uid', domain='.funfile.org') and None is not self.session.cookies.get('pass', domain='.funfile.org')
|
return super(FunFileProvider, self)._authorised(
|
||||||
if logged_in():
|
logged_in=(lambda x=None: None is not self.session.cookies.get('uid', domain='.funfile.org') and
|
||||||
return True
|
None is not self.session.cookies.get('pass', domain='.funfile.org')),
|
||||||
|
post_params={'login': 'Login', 'returnto': '/'}, timeout=self.url_timeout)
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password, 'submit': 'Log in'}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session, timeout=self.url_timeout)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate with %s, abort provider'
|
|
||||||
if response and 'Username or password incorrect' in response:
|
|
||||||
msg = u'Invalid username or password for %s. Check settings'
|
|
||||||
logger.log(msg % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download',
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download',
|
||||||
'cats': 'cat=(?:7)'}.items())
|
'cats': 'cat=(?:%s)' % self._categories_string(template='', delimiter='|')
|
||||||
|
}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
if isinstance(search_string, unicode):
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
search_string = unidecode(search_string)
|
search_url = self.urls['search'] % (self._categories_string(mode), search_string)
|
||||||
|
|
||||||
search_url = self.urls['search'] % (self.categories, search_string)
|
|
||||||
html = self.get_url(search_url, timeout=self.url_timeout)
|
html = self.get_url(search_url, timeout=self.url_timeout)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
|
@ -98,53 +86,44 @@ class FunFileProvider(generic.TorrentProvider):
|
||||||
if not info:
|
if not info:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
if None is tr.find('a', href=rc['cats'])\
|
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]]
|
||||||
or ('Cache' != mode and (seeders < self.minseed or leechers < self.minleech)):
|
if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
|
title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
|
||||||
download_url = self.urls['get'] % tr.find('a', href=rc['get']).get('href')
|
download_url = self.urls['get'] % tr.find('a', href=rc['get']).get('href')
|
||||||
|
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except (generic.HaltParseException, AttributeError):
|
except (generic.HaltParseException, AttributeError):
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
# For each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class FunFileCache(tvcache.TVCache):
|
class FunFileCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 15 # cache update frequency
|
self.update_freq = 15 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = FunFileProvider()
|
provider = FunFileProvider()
|
||||||
|
|
|
@ -19,24 +19,26 @@
|
||||||
|
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
|
|
||||||
import time
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import itertools
|
||||||
|
import math
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import itertools
|
import time
|
||||||
from base64 import b16encode, b32decode
|
from base64 import b16encode, b32decode
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import requests
|
import requests
|
||||||
import requests.cookies
|
import requests.cookies
|
||||||
|
from hachoir_parser import guessParser
|
||||||
|
from hachoir_core.stream import FileInputStream
|
||||||
|
|
||||||
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
|
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
|
||||||
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
|
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
|
||||||
from sickbeard.exceptions import SickBeardException, AuthException, ex
|
from sickbeard.exceptions import SickBeardException, AuthException, ex
|
||||||
from sickbeard.helpers import maybe_plural, _remove_file_failed as remove_file_failed
|
from sickbeard.helpers import maybe_plural, _remove_file_failed as remove_file_failed
|
||||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||||
from sickbeard.show_name_helpers import allPossibleShowNames
|
from sickbeard.show_name_helpers import allPossibleShowNames
|
||||||
from hachoir_parser import guessParser
|
|
||||||
from hachoir_core.stream import FileInputStream
|
|
||||||
|
|
||||||
|
|
||||||
class HaltParseException(SickBeardException):
|
class HaltParseException(SickBeardException):
|
||||||
|
@ -53,6 +55,8 @@ class GenericProvider:
|
||||||
self.name = name
|
self.name = name
|
||||||
self.supportsBacklog = supports_backlog
|
self.supportsBacklog = supports_backlog
|
||||||
self.anime_only = anime_only
|
self.anime_only = anime_only
|
||||||
|
if anime_only:
|
||||||
|
self.proper_search_terms = 'v1|v2|v3|v4|v5'
|
||||||
self.url = ''
|
self.url = ''
|
||||||
|
|
||||||
self.show = None
|
self.show = None
|
||||||
|
@ -62,6 +66,7 @@ class GenericProvider:
|
||||||
self.enabled = False
|
self.enabled = False
|
||||||
self.enable_recentsearch = False
|
self.enable_recentsearch = False
|
||||||
self.enable_backlog = False
|
self.enable_backlog = False
|
||||||
|
self.categories = None
|
||||||
|
|
||||||
self.cache = tvcache.TVCache(self)
|
self.cache = tvcache.TVCache(self)
|
||||||
|
|
||||||
|
@ -89,10 +94,10 @@ class GenericProvider:
|
||||||
|
|
||||||
return '%s.png' % ('newznab', default_name[0])[any(default_name)]
|
return '%s.png' % ('newznab', default_name[0])[any(default_name)]
|
||||||
|
|
||||||
def _check_auth(self):
|
def _authorised(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _do_login(self):
|
def _check_auth(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def is_active(self):
|
def is_active(self):
|
||||||
|
@ -133,7 +138,7 @@ class GenericProvider:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# check for auth
|
# check for auth
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return
|
return
|
||||||
|
|
||||||
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
|
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
|
||||||
|
@ -145,7 +150,7 @@ class GenericProvider:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# check for auth
|
# check for auth
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if GenericProvider.TORRENT == self.providerType:
|
if GenericProvider.TORRENT == self.providerType:
|
||||||
|
@ -239,25 +244,25 @@ class GenericProvider:
|
||||||
def get_quality(self, item, anime=False):
|
def get_quality(self, item, anime=False):
|
||||||
"""
|
"""
|
||||||
Figures out the quality of the given RSS item node
|
Figures out the quality of the given RSS item node
|
||||||
|
|
||||||
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
|
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
|
||||||
|
|
||||||
Returns a Quality value obtained from the node's data
|
Returns a Quality value obtained from the node's data
|
||||||
"""
|
"""
|
||||||
(title, url) = self._get_title_and_url(item) # @UnusedVariable
|
(title, url) = self._title_and_url(item) # @UnusedVariable
|
||||||
quality = Quality.sceneQuality(title, anime)
|
quality = Quality.sceneQuality(title, anime)
|
||||||
return quality
|
return quality
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
def _search_provider(self, search_params, search_mode='eponly', epcount=0, age=0):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def _get_season_search_strings(self, episode):
|
def _season_strings(self, episode):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def _get_episode_search_strings(self, *args, **kwargs):
|
def _episode_strings(self, *args, **kwargs):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def _get_title_and_url(self, item):
|
def _title_and_url(self, item):
|
||||||
"""
|
"""
|
||||||
Retrieves the title and URL data from the item
|
Retrieves the title and URL data from the item
|
||||||
|
|
||||||
|
@ -267,25 +272,14 @@ class GenericProvider:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
title, url = None, None
|
title, url = None, None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if isinstance(item, tuple):
|
title, url = isinstance(item, tuple) and (item[0], item[1]) or \
|
||||||
title = item[0]
|
(item.get('title', None), item.get('link', None))
|
||||||
url = item[1]
|
|
||||||
else:
|
|
||||||
if 'title' in item:
|
|
||||||
title = item.title
|
|
||||||
|
|
||||||
if 'link' in item:
|
|
||||||
url = item.link
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if title:
|
title = title and re.sub(r'\s+', '.', u'%s' % title)
|
||||||
title = re.sub(r'\s+', '.', u'%s' % title)
|
url = url and str(url).replace('&', '&')
|
||||||
|
|
||||||
if url:
|
|
||||||
url = str(url).replace('&', '&')
|
|
||||||
|
|
||||||
return title, url
|
return title, url
|
||||||
|
|
||||||
|
@ -310,21 +304,21 @@ class GenericProvider:
|
||||||
# found result, search next episode
|
# found result, search next episode
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# skip if season already searched
|
|
||||||
if 1 < len(episodes) and ep_obj.scene_season == searched_scene_season:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# mark season searched for season pack searches so we can skip later on
|
|
||||||
searched_scene_season = ep_obj.scene_season
|
|
||||||
|
|
||||||
if 'sponly' == search_mode:
|
if 'sponly' == search_mode:
|
||||||
# get season search results
|
# skip if season already searched
|
||||||
for curString in self._get_season_search_strings(ep_obj):
|
if 1 < len(episodes) and searched_scene_season == ep_obj.scene_season:
|
||||||
item_list += self._do_search(curString, search_mode, len(episodes))
|
continue
|
||||||
|
|
||||||
|
searched_scene_season = ep_obj.scene_season
|
||||||
|
|
||||||
|
# get season search params
|
||||||
|
search_params = self._season_strings(ep_obj)
|
||||||
else:
|
else:
|
||||||
# get single episode search results
|
# get single episode search params
|
||||||
for curString in self._get_episode_search_strings(ep_obj):
|
search_params = self._episode_strings(ep_obj)
|
||||||
item_list += self._do_search(curString, 'eponly', len(episodes))
|
|
||||||
|
for cur_param in search_params:
|
||||||
|
item_list += self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes))
|
||||||
|
|
||||||
# if we found what we needed already from cache then return results and exit
|
# if we found what we needed already from cache then return results and exit
|
||||||
if len(results) == len(episodes):
|
if len(results) == len(episodes):
|
||||||
|
@ -349,18 +343,18 @@ class GenericProvider:
|
||||||
|
|
||||||
# filter results
|
# filter results
|
||||||
cl = []
|
cl = []
|
||||||
|
parser = NameParser(False, convert=True)
|
||||||
for item in item_list:
|
for item in item_list:
|
||||||
(title, url) = self._get_title_and_url(item)
|
(title, url) = self._title_and_url(item)
|
||||||
|
|
||||||
# parse the file name
|
# parse the file name
|
||||||
try:
|
try:
|
||||||
parser = NameParser(False, convert=True)
|
|
||||||
parse_result = parser.parse(title)
|
parse_result = parser.parse(title)
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
logger.log(u'Unable to parse the filename ' + title + ' into a valid episode', logger.DEBUG)
|
logger.log(u'Unable to parse the filename %s into a valid episode' % title, logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
except InvalidShowException:
|
except InvalidShowException:
|
||||||
logger.log(u'No show name or scene exception matched the parsed filename ' + title, logger.DEBUG)
|
logger.log(u'No match for search criteria in the parsed filename ' + title, logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
show_obj = parse_result.show
|
show_obj = parse_result.show
|
||||||
|
@ -372,28 +366,28 @@ class GenericProvider:
|
||||||
if not (show_obj.air_by_date or show_obj.is_sports):
|
if not (show_obj.air_by_date or show_obj.is_sports):
|
||||||
if 'sponly' == search_mode:
|
if 'sponly' == search_mode:
|
||||||
if len(parse_result.episode_numbers):
|
if len(parse_result.episode_numbers):
|
||||||
logger.log(u'This is supposed to be a season pack search but the result ' + title
|
logger.log(u'This is supposed to be a season pack search but the result ' + title +
|
||||||
+ u' is not a valid season pack, skipping it', logger.DEBUG)
|
u' is not a valid season pack, skipping it', logger.DEBUG)
|
||||||
add_cache_entry = True
|
add_cache_entry = True
|
||||||
if len(parse_result.episode_numbers)\
|
if len(parse_result.episode_numbers)\
|
||||||
and (parse_result.season_number not in set([ep.season for ep in episodes])
|
and (parse_result.season_number not in set([ep.season for ep in episodes]) or not [
|
||||||
or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
|
ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
|
||||||
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid episode that we are trying'
|
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid episode that we are trying' +
|
||||||
+ u' to snatch, ignoring', logger.DEBUG)
|
u' to snatch, ignoring', logger.DEBUG)
|
||||||
add_cache_entry = True
|
add_cache_entry = True
|
||||||
else:
|
else:
|
||||||
if not len(parse_result.episode_numbers)\
|
if not len(parse_result.episode_numbers)\
|
||||||
and parse_result.season_number\
|
and parse_result.season_number\
|
||||||
and not [ep for ep in episodes
|
and not [ep for ep in episodes
|
||||||
if ep.season == parse_result.season_number
|
if ep.season == parse_result.season_number and
|
||||||
and ep.episode in parse_result.episode_numbers]:
|
ep.episode in parse_result.episode_numbers]:
|
||||||
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying'
|
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' +
|
||||||
+ u' to snatch, ignoring', logger.DEBUG)
|
u' to snatch, ignoring', logger.DEBUG)
|
||||||
add_cache_entry = True
|
add_cache_entry = True
|
||||||
elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
|
elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
|
||||||
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
|
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
|
||||||
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying'
|
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' +
|
||||||
+ u' to snatch, ignoring', logger.DEBUG)
|
u' to snatch, ignoring', logger.DEBUG)
|
||||||
add_cache_entry = True
|
add_cache_entry = True
|
||||||
|
|
||||||
if not add_cache_entry:
|
if not add_cache_entry:
|
||||||
|
@ -402,8 +396,8 @@ class GenericProvider:
|
||||||
actual_episodes = parse_result.episode_numbers
|
actual_episodes = parse_result.episode_numbers
|
||||||
else:
|
else:
|
||||||
if not parse_result.is_air_by_date:
|
if not parse_result.is_air_by_date:
|
||||||
logger.log(u'This is supposed to be a date search but the result ' + title
|
logger.log(u'This is supposed to be a date search but the result ' + title +
|
||||||
+ u' didn\'t parse as one, skipping it', logger.DEBUG)
|
u' didn\'t parse as one, skipping it', logger.DEBUG)
|
||||||
add_cache_entry = True
|
add_cache_entry = True
|
||||||
else:
|
else:
|
||||||
airdate = parse_result.air_date.toordinal()
|
airdate = parse_result.air_date.toordinal()
|
||||||
|
@ -412,8 +406,8 @@ class GenericProvider:
|
||||||
[show_obj.indexerid, airdate])
|
[show_obj.indexerid, airdate])
|
||||||
|
|
||||||
if 1 != len(sql_results):
|
if 1 != len(sql_results):
|
||||||
logger.log(u'Tried to look up the date for the episode ' + title + ' but the database didn\'t'
|
logger.log(u'Tried to look up the date for the episode ' + title + ' but the database didn\'t' +
|
||||||
+ u' give proper results, skipping it', logger.WARNING)
|
u' give proper results, skipping it', logger.WARNING)
|
||||||
add_cache_entry = True
|
add_cache_entry = True
|
||||||
|
|
||||||
if not add_cache_entry:
|
if not add_cache_entry:
|
||||||
|
@ -462,8 +456,8 @@ class GenericProvider:
|
||||||
logger.log(u'Single episode result.', logger.DEBUG)
|
logger.log(u'Single episode result.', logger.DEBUG)
|
||||||
elif 1 < len(ep_obj):
|
elif 1 < len(ep_obj):
|
||||||
ep_num = MULTI_EP_RESULT
|
ep_num = MULTI_EP_RESULT
|
||||||
logger.log(u'Separating multi-episode result to check for later - result contains episodes: '
|
logger.log(u'Separating multi-episode result to check for later - result contains episodes: ' +
|
||||||
+ str(parse_result.episode_numbers), logger.DEBUG)
|
str(parse_result.episode_numbers), logger.DEBUG)
|
||||||
elif 0 == len(ep_obj):
|
elif 0 == len(ep_obj):
|
||||||
ep_num = SEASON_RESULT
|
ep_num = SEASON_RESULT
|
||||||
logger.log(u'Separating full season result to check for later', logger.DEBUG)
|
logger.log(u'Separating full season result to check for later', logger.DEBUG)
|
||||||
|
@ -480,7 +474,7 @@ class GenericProvider:
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=None):
|
def find_propers(self, search_date=None, **kwargs):
|
||||||
|
|
||||||
results = self.cache.listPropers(search_date)
|
results = self.cache.listPropers(search_date)
|
||||||
|
|
||||||
|
@ -494,16 +488,25 @@ class GenericProvider:
|
||||||
"""
|
"""
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
@staticmethod
|
def _log_search(self, mode='Cache', count=0, url='url missing'):
|
||||||
def _log_result(mode='cache', count=0, url='url missing'):
|
|
||||||
"""
|
"""
|
||||||
Simple function to log the result of a search
|
Simple function to log the result of a search types except propers
|
||||||
:param count: count of successfully processed items
|
:param count: count of successfully processed items
|
||||||
:param url: source url of item(s)
|
:param url: source url of item(s)
|
||||||
"""
|
"""
|
||||||
mode = mode.lower()
|
if 'Propers' != mode:
|
||||||
logger.log(u'%s in response from %s' % (('No %s items' % mode,
|
self.log_result(mode, count, url)
|
||||||
'%s %s item%s' % (count, mode, maybe_plural(count)))[0 < count], url))
|
|
||||||
|
def log_result(self, mode='Cache', count=0, url='url missing'):
|
||||||
|
"""
|
||||||
|
Simple function to log the result of any search
|
||||||
|
:param count: count of successfully processed items
|
||||||
|
:param url: source url of item(s)
|
||||||
|
"""
|
||||||
|
str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode]
|
||||||
|
logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], (
|
||||||
|
'%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)),
|
||||||
|
re.sub('(\s)\s+', r'\1', url)))
|
||||||
|
|
||||||
def check_auth_cookie(self):
|
def check_auth_cookie(self):
|
||||||
|
|
||||||
|
@ -529,8 +532,34 @@ class GenericProvider:
|
||||||
|
|
||||||
return False, 'Cookies not correctly formatted key=value pairs e.g. uid=xx;pass=yy)'
|
return False, 'Cookies not correctly formatted key=value pairs e.g. uid=xx;pass=yy)'
|
||||||
|
|
||||||
|
def has_all_cookies(self, cookies=None, pre=''):
|
||||||
|
|
||||||
class NZBProvider(GenericProvider):
|
cookies = cookies or ['uid', 'pass']
|
||||||
|
return False not in ['%s%s' % (pre, item) in self.session.cookies for item in ([cookies], cookies)[isinstance(cookies, list)]]
|
||||||
|
|
||||||
|
def _categories_string(self, mode='Cache', template='c%s=1', delimiter='&'):
|
||||||
|
|
||||||
|
return delimiter.join([('%s', template)[any(template)] % c for c in sorted(self.categories['shows'] + (
|
||||||
|
[], [] if 'anime' not in self.categories else self.categories['anime'])[
|
||||||
|
('Cache' == mode and helpers.has_anime()) or ((mode in ['Season', 'Episode']) and self.show and self.show.is_anime)])])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _bytesizer(size_dim=''):
|
||||||
|
|
||||||
|
try:
|
||||||
|
value = float('.'.join(re.findall('(?i)(\d+)(?:[\.,](\d+))?', size_dim)[0]))
|
||||||
|
except TypeError:
|
||||||
|
return size_dim
|
||||||
|
except IndexError:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
value *= 1024 ** ['b', 'k', 'm', 'g', 't'].index(re.findall('(t|g|m|k)[i]?b', size_dim.lower())[0])
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
return int(math.ceil(value))
|
||||||
|
|
||||||
|
|
||||||
|
class NZBProvider(object, GenericProvider):
|
||||||
|
|
||||||
def __init__(self, name, supports_backlog=True, anime_only=False):
|
def __init__(self, name, supports_backlog=True, anime_only=False):
|
||||||
GenericProvider.__init__(self, name, supports_backlog, anime_only)
|
GenericProvider.__init__(self, name, supports_backlog, anime_only)
|
||||||
|
@ -543,7 +572,7 @@ class NZBProvider(GenericProvider):
|
||||||
|
|
||||||
def maybe_apikey(self):
|
def maybe_apikey(self):
|
||||||
|
|
||||||
if hasattr(self, 'needs_auth'):
|
if hasattr(self, 'needs_auth') and self.needs_auth:
|
||||||
if hasattr(self, 'key') and 0 < len(self.key):
|
if hasattr(self, 'key') and 0 < len(self.key):
|
||||||
return self.key
|
return self.key
|
||||||
if hasattr(self, 'api_key') and 0 < len(self.api_key):
|
if hasattr(self, 'api_key') and 0 < len(self.api_key):
|
||||||
|
@ -562,7 +591,7 @@ class NZBProvider(GenericProvider):
|
||||||
|
|
||||||
return GenericProvider._check_auth(self)
|
return GenericProvider._check_auth(self)
|
||||||
|
|
||||||
def _find_propers(self, search_date=None):
|
def find_propers(self, search_date=None, shows=None, anime=None, **kwargs):
|
||||||
|
|
||||||
cache_results = self.cache.listPropers(search_date)
|
cache_results = self.cache.listPropers(search_date)
|
||||||
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
||||||
|
@ -573,11 +602,20 @@ class NZBProvider(GenericProvider):
|
||||||
term_items_found = False
|
term_items_found = False
|
||||||
do_search_alt = False
|
do_search_alt = False
|
||||||
|
|
||||||
search_terms = ['.proper.', '.repack.']
|
search_terms = []
|
||||||
proper_check = re.compile(r'(?i)\b(proper)|(repack)\b')
|
regex = []
|
||||||
|
if shows:
|
||||||
|
search_terms += ['.proper.', '.repack.']
|
||||||
|
regex += ['proper|repack']
|
||||||
|
proper_check = re.compile(r'(?i)(\b%s\b)' % '|'.join(regex))
|
||||||
|
if anime:
|
||||||
|
terms = 'v1|v2|v3|v4|v5'
|
||||||
|
search_terms += [terms]
|
||||||
|
regex += [terms]
|
||||||
|
proper_check = re.compile(r'(?i)(%s)' % '|'.join(regex))
|
||||||
|
|
||||||
while index < len(search_terms):
|
while index < len(search_terms):
|
||||||
search_params = {'q': search_terms[index]}
|
search_params = {'q': search_terms[index], 'maxage': 4}
|
||||||
if alt_search:
|
if alt_search:
|
||||||
|
|
||||||
if do_search_alt:
|
if do_search_alt:
|
||||||
|
@ -595,9 +633,9 @@ class NZBProvider(GenericProvider):
|
||||||
else:
|
else:
|
||||||
index += 1
|
index += 1
|
||||||
|
|
||||||
for item in self._do_search(search_params, age=4):
|
for item in self._search_provider({'Propers': [search_params]}):
|
||||||
|
|
||||||
(title, url) = self._get_title_and_url(item)
|
(title, url) = self._title_and_url(item)
|
||||||
|
|
||||||
if not proper_check.search(title):
|
if not proper_check.search(title):
|
||||||
continue
|
continue
|
||||||
|
@ -620,8 +658,13 @@ class NZBProvider(GenericProvider):
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
def cache_data(self, *args, **kwargs):
|
||||||
|
|
||||||
class TorrentProvider(GenericProvider):
|
search_params = {'Cache': [{}]}
|
||||||
|
return self._search_provider(search_params)
|
||||||
|
|
||||||
|
|
||||||
|
class TorrentProvider(object, GenericProvider):
|
||||||
|
|
||||||
def __init__(self, name, supports_backlog=True, anime_only=False):
|
def __init__(self, name, supports_backlog=True, anime_only=False):
|
||||||
GenericProvider.__init__(self, name, supports_backlog, anime_only)
|
GenericProvider.__init__(self, name, supports_backlog, anime_only)
|
||||||
|
@ -639,12 +682,21 @@ class TorrentProvider(GenericProvider):
|
||||||
|
|
||||||
return self._seed_ratio
|
return self._seed_ratio
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _sort_seeders(mode, items):
|
||||||
|
|
||||||
|
mode in ['Season', 'Episode'] and items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
||||||
|
|
||||||
|
def _peers_fail(self, mode, seeders=0, leechers=0):
|
||||||
|
|
||||||
|
return 'Cache' != mode and (seeders < getattr(self, 'minseed', 0) or leechers < getattr(self, 'minleech', 0))
|
||||||
|
|
||||||
def get_quality(self, item, anime=False):
|
def get_quality(self, item, anime=False):
|
||||||
|
|
||||||
if isinstance(item, tuple):
|
if isinstance(item, tuple):
|
||||||
name = item[0]
|
name = item[0]
|
||||||
elif isinstance(item, dict):
|
elif isinstance(item, dict):
|
||||||
name, url = self._get_title_and_url(item)
|
name, url = self._title_and_url(item)
|
||||||
else:
|
else:
|
||||||
name = item.title
|
name = item.title
|
||||||
return Quality.sceneQuality(name, anime)
|
return Quality.sceneQuality(name, anime)
|
||||||
|
@ -664,51 +716,59 @@ class TorrentProvider(GenericProvider):
|
||||||
Quality.FULLHDBLURAY: '1080p Bluray x264'
|
Quality.FULLHDBLURAY: '1080p Bluray x264'
|
||||||
}.get(quality, '')
|
}.get(quality, '')
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj, detail_only=False, scene=True):
|
def _season_strings(self, ep_obj, detail_only=False, scene=True, prefix='', **kwargs):
|
||||||
|
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
|
||||||
ep_detail = str(ep_obj.airdate).split('-')[0]
|
|
||||||
elif ep_obj.show.is_anime:
|
|
||||||
ep_detail = ep_obj.scene_absolute_number
|
|
||||||
else:
|
|
||||||
ep_detail = 'S%02d' % int((ep_obj.season, ep_obj.scene_season)[bool(ep_obj.show.is_scene)])
|
|
||||||
|
|
||||||
detail = ({}, {'Season_only': [ep_detail]})[detail_only and not self.show.is_sports and not self.show.is_anime]
|
|
||||||
return [dict({'Season': self._build_search_strings(ep_detail, scene)}.items() + detail.items())]
|
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', detail_only=False, scene=True, sep_date=' ', use_or=True):
|
|
||||||
|
|
||||||
if not ep_obj:
|
if not ep_obj:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if self.show.air_by_date or self.show.is_sports:
|
show = ep_obj.show
|
||||||
ep_detail = str(ep_obj.airdate).replace('-', sep_date)
|
ep_dict = self._ep_dict(ep_obj)
|
||||||
if self.show.is_sports:
|
sp_detail = (show.air_by_date or show.is_sports) and str(ep_obj.airdate).split('-')[0] or \
|
||||||
month = ep_obj.airdate.strftime('%b')
|
(show.is_anime and ep_obj.scene_absolute_number or
|
||||||
ep_detail = ([ep_detail] + [month], '%s|%s' % (ep_detail, month))[use_or]
|
'S%(seasonnumber)02d' % ep_dict if 'sp_detail' not in kwargs.keys() else kwargs['sp_detail'](ep_dict))
|
||||||
elif self.show.is_anime:
|
sp_detail = ([sp_detail], sp_detail)[isinstance(sp_detail, list)]
|
||||||
ep_detail = ep_obj.scene_absolute_number
|
detail = ({}, {'Season_only': sp_detail})[detail_only and not self.show.is_sports and not self.show.is_anime]
|
||||||
else:
|
return [dict({'Season': self._build_search_strings(sp_detail, scene, prefix)}.items() + detail.items())]
|
||||||
season, episode = ((ep_obj.season, ep_obj.episode),
|
|
||||||
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
|
||||||
ep_dict = {'seasonnumber': season, 'episodenumber': episode}
|
|
||||||
ep_detail = sickbeard.config.naming_ep_type[2] % ep_dict
|
|
||||||
append = (add_string, '')[self.show.is_anime]
|
|
||||||
detail = ({}, {'Episode_only': [ep_detail]})[detail_only and not self.show.is_sports and not self.show.is_anime]
|
|
||||||
return [dict({'Episode': self._build_search_strings(ep_detail, scene, append)}.items() + detail.items())]
|
|
||||||
|
|
||||||
def _build_search_strings(self, ep_detail, process_name=True, append=''):
|
def _episode_strings(self, ep_obj, detail_only=False, scene=True, prefix='', sep_date=' ', date_or=False, **kwargs):
|
||||||
|
|
||||||
|
if not ep_obj:
|
||||||
|
return []
|
||||||
|
|
||||||
|
show = ep_obj.show
|
||||||
|
if show.air_by_date or show.is_sports:
|
||||||
|
ep_detail = [str(ep_obj.airdate).replace('-', sep_date)]\
|
||||||
|
if 'date_detail' not in kwargs.keys() else kwargs['date_detail'](ep_obj.airdate)
|
||||||
|
if show.is_sports:
|
||||||
|
month = ep_obj.airdate.strftime('%b')
|
||||||
|
ep_detail = (ep_detail + [month], ['%s|%s' % (x, month) for x in ep_detail])[date_or]
|
||||||
|
elif show.is_anime:
|
||||||
|
ep_detail = ep_obj.scene_absolute_number \
|
||||||
|
if 'ep_detail_anime' not in kwargs.keys() else kwargs['ep_detail_anime'](ep_obj.scene_absolute_number)
|
||||||
|
else:
|
||||||
|
ep_dict = self._ep_dict(ep_obj)
|
||||||
|
ep_detail = sickbeard.config.naming_ep_type[2] % ep_dict \
|
||||||
|
if 'ep_detail' not in kwargs.keys() else kwargs['ep_detail'](ep_dict)
|
||||||
|
ep_detail = ([ep_detail], ep_detail)[isinstance(ep_detail, list)]
|
||||||
|
detail = ({}, {'Episode_only': ep_detail})[detail_only and not show.is_sports and not show.is_anime]
|
||||||
|
return [dict({'Episode': self._build_search_strings(ep_detail, scene, prefix)}.items() + detail.items())]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _ep_dict(ep_obj):
|
||||||
|
season, episode = ((ep_obj.season, ep_obj.episode),
|
||||||
|
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
||||||
|
return {'seasonnumber': season, 'episodenumber': episode}
|
||||||
|
|
||||||
|
def _build_search_strings(self, ep_detail, process_name=True, prefix=''):
|
||||||
"""
|
"""
|
||||||
Build a list of search strings for querying a provider
|
Build a list of search strings for querying a provider
|
||||||
:param ep_detail: String of episode detail or List of episode details
|
:param ep_detail: String of episode detail or List of episode details
|
||||||
:param process_name: Bool Whether to call sanitizeSceneName() on show name
|
:param process_name: Bool Whether to call sanitizeSceneName() on show name
|
||||||
:param append: String to append to search strings
|
:param prefix: String to insert to search strings
|
||||||
:return: List of search string parameters
|
:return: List of search string parameters
|
||||||
"""
|
"""
|
||||||
if not isinstance(ep_detail, list):
|
ep_detail = ([ep_detail], ep_detail)[isinstance(ep_detail, list)]
|
||||||
ep_detail = [ep_detail]
|
prefix = ([prefix], prefix)[isinstance(prefix, list)]
|
||||||
if not isinstance(append, list):
|
|
||||||
append = [append]
|
|
||||||
|
|
||||||
search_params = []
|
search_params = []
|
||||||
crop = re.compile(r'([\.\s])(?:\1)+')
|
crop = re.compile(r'([\.\s])(?:\1)+')
|
||||||
|
@ -716,9 +776,71 @@ class TorrentProvider(GenericProvider):
|
||||||
if process_name:
|
if process_name:
|
||||||
name = helpers.sanitizeSceneName(name)
|
name = helpers.sanitizeSceneName(name)
|
||||||
for detail in ep_detail:
|
for detail in ep_detail:
|
||||||
search_params += [crop.sub(r'\1', '%s %s' % (name, detail) + ('', ' ' + x)[any(x)]) for x in append]
|
search_params += [crop.sub(r'\1', '%s %s%s' % (name, x, detail)) for x in prefix]
|
||||||
return search_params
|
return search_params
|
||||||
|
|
||||||
|
def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30):
|
||||||
|
|
||||||
|
maxed_out = (lambda x: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', x))
|
||||||
|
logged_in, failed_msg = [None is not a and a or b for (a, b) in (
|
||||||
|
(logged_in, (lambda x=None: self.has_all_cookies())),
|
||||||
|
(failed_msg, (lambda x='': maxed_out(x) and u'Urgent abort, running low on login attempts. Password flushed to prevent service disruption to %s.' or
|
||||||
|
(re.search(r'(?i)(username|password)((<[^>]+>)|\W)*(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', x) and
|
||||||
|
u'Invalid username or password for %s. Check settings' or
|
||||||
|
u'Failed to authenticate or parse a response from %s, abort provider')))
|
||||||
|
)]
|
||||||
|
|
||||||
|
if logged_in():
|
||||||
|
return True
|
||||||
|
|
||||||
|
if hasattr(self, 'digest'):
|
||||||
|
self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', self.digest)
|
||||||
|
success, msg = self._check_cookie()
|
||||||
|
if not success:
|
||||||
|
self.cookies = None
|
||||||
|
logger.log(u'%s: [%s]' % (msg, self.cookies), logger.WARNING)
|
||||||
|
return False
|
||||||
|
elif not self._check_auth():
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance(url, type([])):
|
||||||
|
for i in range(0, len(url)):
|
||||||
|
helpers.getURL(url.pop(), session=self.session)
|
||||||
|
|
||||||
|
if not url:
|
||||||
|
if hasattr(self, 'urls'):
|
||||||
|
url = self.urls.get('login_action')
|
||||||
|
if url:
|
||||||
|
response = helpers.getURL(url, session=self.session)
|
||||||
|
try:
|
||||||
|
action = re.findall('[<]form[\w\W]+?action="([^"]+)', response)[0]
|
||||||
|
url = self.urls['config_provider_home_uri'] + action.lstrip('/')
|
||||||
|
except KeyError:
|
||||||
|
return super(TorrentProvider, self)._authorised()
|
||||||
|
else:
|
||||||
|
url = self.urls.get('login')
|
||||||
|
if not url:
|
||||||
|
return super(TorrentProvider, self)._authorised()
|
||||||
|
|
||||||
|
if hasattr(self, 'username') and hasattr(self, 'password'):
|
||||||
|
creds = dict(username=self.username, password=self.password)
|
||||||
|
if not post_params:
|
||||||
|
post_params = creds.copy()
|
||||||
|
elif self.password not in post_params.values() and isinstance(post_params, type({})):
|
||||||
|
post_params.update(creds)
|
||||||
|
|
||||||
|
response = helpers.getURL(url, post_data=post_params, session=self.session, timeout=timeout)
|
||||||
|
if response:
|
||||||
|
if logged_in(response):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if maxed_out(response) and hasattr(self, 'password'):
|
||||||
|
self.password = None
|
||||||
|
sickbeard.save_config()
|
||||||
|
logger.log(failed_msg(response) % self.name, logger.ERROR)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
def _check_auth(self):
|
def _check_auth(self):
|
||||||
|
|
||||||
if hasattr(self, 'username') and hasattr(self, 'password'):
|
if hasattr(self, 'username') and hasattr(self, 'password'):
|
||||||
|
@ -742,67 +864,43 @@ class TorrentProvider(GenericProvider):
|
||||||
|
|
||||||
raise AuthException('%s for %s is empty in config provider options' % (setting, self.name))
|
raise AuthException('%s for %s is empty in config provider options' % (setting, self.name))
|
||||||
|
|
||||||
def _find_propers(self, search_date=datetime.datetime.today(), search_terms=None):
|
def find_propers(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
Search for releases of type PROPER
|
Search for releases of type PROPER
|
||||||
:param search_date: Filter search on episodes since this date
|
|
||||||
:param search_terms: String or list of strings that qualify PROPER release types
|
|
||||||
:return: list of Proper objects
|
:return: list of Proper objects
|
||||||
"""
|
"""
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
my_db = db.DBConnection()
|
search_terms = getattr(self, 'proper_search_terms', ['proper', 'repack'])
|
||||||
sql_results = my_db.select(
|
if not isinstance(search_terms, list):
|
||||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
|
||||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
|
||||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
|
||||||
' AND (e.status IN (%s)' % ','.join([str(x) for x in Quality.DOWNLOADED]) +
|
|
||||||
' OR (e.status IN (%s)))' % ','.join([str(x) for x in Quality.SNATCHED])
|
|
||||||
)
|
|
||||||
|
|
||||||
if not sql_results:
|
|
||||||
return results
|
|
||||||
|
|
||||||
clean_term = re.compile(r'(?i)[^a-z\|\.]+')
|
|
||||||
for sqlshow in sql_results:
|
|
||||||
showid, season, episode = [int(sqlshow[item]) for item in ('showid', 'season', 'episode')]
|
|
||||||
|
|
||||||
self.show = helpers.findCertainShow(sickbeard.showList, showid)
|
|
||||||
if not self.show:
|
|
||||||
continue
|
|
||||||
|
|
||||||
cur_ep = self.show.getEpisode(season, episode)
|
|
||||||
|
|
||||||
if None is search_terms:
|
if None is search_terms:
|
||||||
search_terms = ['proper', 'repack']
|
search_terms = 'proper|repack'
|
||||||
elif not isinstance(search_terms, list):
|
search_terms = [search_terms]
|
||||||
if '' == search_terms:
|
|
||||||
search_terms = 'proper|repack'
|
|
||||||
search_terms = [search_terms]
|
|
||||||
|
|
||||||
for proper_term in search_terms:
|
items = self._search_provider({'Propers': search_terms})
|
||||||
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
|
|
||||||
|
|
||||||
search_string = self._get_episode_search_strings(cur_ep, add_string=proper_term)
|
clean_term = re.compile(r'(?i)[^a-z1-9\|\.]+')
|
||||||
for item in self._do_search(search_string[0]):
|
for proper_term in search_terms:
|
||||||
title, url = self._get_title_and_url(item)
|
|
||||||
if not proper_check.search(title):
|
|
||||||
continue
|
|
||||||
results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
|
|
||||||
|
|
||||||
|
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
|
||||||
|
for item in items:
|
||||||
|
title, url = self._title_and_url(item)
|
||||||
|
if proper_check.search(title):
|
||||||
|
results.append(classes.Proper(title, url, datetime.datetime.today(),
|
||||||
|
helpers.findCertainShow(sickbeard.showList, None)))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _has_no_results(*html):
|
def _has_no_results(*html):
|
||||||
return re.search(r'(?i)<(?:h\d|strong)[^>]*>(?:'
|
return re.search(r'(?i)<(?:b|h\d|strong)[^>]*>(?:' +
|
||||||
+ 'your\ssearch\sdid\snot\smatch|'
|
'your\ssearch\sdid\snot\smatch|' +
|
||||||
+ 'nothing\sfound|'
|
'nothing\sfound|' +
|
||||||
+ 'no\storrents\sfound|'
|
'no\storrents\sfound|' +
|
||||||
+ '.*?there\sare\sno\sresults|'
|
'.*?there\sare\sno\sresults|' +
|
||||||
+ '.*?no\shits\.\sTry\sadding'
|
'.*?no\shits\.\sTry\sadding' +
|
||||||
+ ')', html[0])
|
')', html[0])
|
||||||
|
|
||||||
def get_cache_data(self, *args, **kwargs):
|
def cache_data(self, *args, **kwargs):
|
||||||
|
|
||||||
search_params = {'Cache': ['']}
|
search_params = {'Cache': ['']}
|
||||||
return self._do_search(search_params)
|
return self._search_provider(search_params)
|
||||||
|
|
|
@ -16,13 +16,13 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,53 +33,39 @@ class GFTrackerProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
self.url_base = 'https://thegft.org/'
|
self.url_base = 'https://thegft.org/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login_get': self.url_base + 'login.php',
|
'login_init': self.url_base + 'login.php',
|
||||||
'login_post': self.url_base + 'loginsite.php',
|
'login': self.url_base + 'loginsite.php',
|
||||||
'cache': self.url_base + 'browse.php?view=0&c26=1&c37=1&c19=1&c47=1&c17=1&c4=1&searchtype=1',
|
'browse': self.url_base + 'browse.php?view=0&%s&searchtype=1%s',
|
||||||
'search': '&search=%s',
|
'search': '&search=%s',
|
||||||
'get': self.url_base + '%s'}
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
|
self.categories = {'shows': [4, 17, 19, 26, 37, 47], 'anime': [16]}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.cache = GFTrackerCache(self)
|
self.cache = GFTrackerCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'gft_uid' in self.session.cookies and 'gft_pass' in self.session.cookies
|
return super(GFTrackerProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='gft_')),
|
||||||
if logged_in():
|
url=[self.urls['login_init']])
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
helpers.getURL(self.urls['login_get'], session=self.session)
|
|
||||||
login_params = {'username': self.username, 'password': self.password}
|
|
||||||
response = helpers.getURL(self.urls['login_post'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
logger.log(u'Failed to authenticate with %s, abort provider.' % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'details', 'get': 'download',
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'details', 'get': 'download',
|
||||||
'seeders': r'(^\d+)', 'leechers': r'(\d+)$'}.items())
|
'seeders': r'(^\d+)', 'leechers': r'(\d+)$'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
if isinstance(search_string, unicode):
|
search_url = self.urls['browse'] % (self._categories_string(mode),
|
||||||
search_string = unidecode(search_string)
|
(self.urls['search'] % search_string, '')['Cache' == mode])
|
||||||
|
|
||||||
search_url = self.urls['cache']
|
|
||||||
if 'Cache' != mode:
|
|
||||||
search_url += self.urls['search'] % search_string
|
|
||||||
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
|
@ -99,44 +85,41 @@ class GFTrackerProvider(generic.TorrentProvider):
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = 2 * [tr.find_all('td')[-1].get_text().strip()]
|
seeders, leechers = 2 * [tr.find_all('td')[-1].get_text().strip()]
|
||||||
seeders = int(rc['seeders'].findall(seeders)[0])
|
seeders, leechers = [tryInt(n) for n in [
|
||||||
leechers = int(rc['leechers'].findall(leechers)[0])
|
rc['seeders'].findall(seeders)[0], rc['leechers'].findall(leechers)[0]]]
|
||||||
if mode != 'Cache' and (seeders < self.minseed or leechers < self.minleech):
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('a', href=rc['info'])
|
info = tr.find('a', href=rc['info'])
|
||||||
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
||||||
|
size = tr.find_all('td')[-2].get_text().strip()
|
||||||
|
|
||||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
'Cache' != mode and items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False)
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj, **kwargs):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return generic.TorrentProvider._get_season_search_strings(self, ep_obj, scene=False)
|
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, scene=False, use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class GFTrackerCache(tvcache.TVCache):
|
class GFTrackerCache(tvcache.TVCache):
|
||||||
|
@ -144,10 +127,11 @@ class GFTrackerCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 17 # cache update frequency
|
self.update_freq = 17 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
|
||||||
|
|
||||||
provider = GFTrackerProvider()
|
provider = GFTrackerProvider()
|
||||||
|
|
|
@ -16,12 +16,12 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,54 +33,33 @@ class GrabTheInfoProvider(generic.TorrentProvider):
|
||||||
self.url_base = 'http://grabthe.info/'
|
self.url_base = 'http://grabthe.info/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'takelogin.php',
|
'login': self.url_base + 'takelogin.php',
|
||||||
'cache': self.url_base + 'browse.php?%s',
|
'browse': self.url_base + 'browse.php?%s&incldead=%s&blah=0%s',
|
||||||
'search': '&search=%s',
|
'search': '&search=%s',
|
||||||
'get': self.url_base + '%s'}
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
self.categories = 'c56=1&c8=1&c61=1&c10=1&incldead=0&blah=0'
|
self.categories = {'shows': [36, 32, 43, 56, 8, 10, 61]}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
|
self.freeleech = False
|
||||||
self.cache = GrabTheInfoCache(self)
|
self.cache = GrabTheInfoCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
|
|
||||||
if logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
|
||||||
login_params = {'username': self.username, 'password': self.password}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate with %s, abort provider'
|
|
||||||
if response and 'Username or password incorrect' in response:
|
|
||||||
msg = u'Invalid username or password for %s. Check settings'
|
|
||||||
logger.log(msg % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
|
search_url = self.urls['browse'] % (self._categories_string(), ('0', '3')[self.freeleech],
|
||||||
|
(self.urls['search'] % search_string, '')['Cache' == mode])
|
||||||
|
|
||||||
if isinstance(search_string, unicode):
|
|
||||||
search_string = unidecode(search_string)
|
|
||||||
|
|
||||||
search_url = self.urls['cache'] % self.categories
|
|
||||||
if 'cache' != mode.lower():
|
|
||||||
search_url += self.urls['search'] % search_string
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
|
@ -113,36 +92,32 @@ class GrabTheInfoProvider(generic.TorrentProvider):
|
||||||
if None is download_url:
|
if None is download_url:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -3)]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
except (AttributeError, TypeError, KeyError):
|
except (AttributeError, TypeError, ValueError, KeyError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title:
|
if title:
|
||||||
items[mode].append((title, self.urls['get']
|
items[mode].append((title, self.urls['get'] % str(download_url['href'].lstrip('/')),
|
||||||
% str(download_url['href'].lstrip('/')), seeders))
|
seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
# for each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
'Cache' != mode and items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, sep_date='|', use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class GrabTheInfoCache(tvcache.TVCache):
|
class GrabTheInfoCache(tvcache.TVCache):
|
||||||
|
@ -150,11 +125,11 @@ class GrabTheInfoCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = GrabTheInfoProvider()
|
provider = GrabTheInfoProvider()
|
||||||
|
|
|
@ -16,12 +16,13 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import classes, logger, tvcache
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.exceptions import AuthException
|
from sickbeard.exceptions import AuthException
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
|
from sickbeard.indexers import indexer_config
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import json
|
import json
|
||||||
|
@ -40,11 +41,13 @@ class HDBitsProvider(generic.TorrentProvider):
|
||||||
'search': self.url_base + 'api/torrents',
|
'search': self.url_base + 'api/torrents',
|
||||||
'get': self.url_base + 'download.php?%s'}
|
'get': self.url_base + 'download.php?%s'}
|
||||||
|
|
||||||
self.categories = 2 # TV
|
self.categories = [3, 5, 2]
|
||||||
|
|
||||||
|
self.proper_search_terms = [' proper ', ' repack ']
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.passkey = 2 * [None]
|
self.username, self.passkey, self.minseed, self.minleech = 4 * [None]
|
||||||
|
self.freeleech = False
|
||||||
self.cache = HDBitsCache(self)
|
self.cache = HDBitsCache(self)
|
||||||
|
|
||||||
def check_auth_from_data(self, parsed_json):
|
def check_auth_from_data(self, parsed_json):
|
||||||
|
@ -55,112 +58,105 @@ class HDBitsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj, **kwargs):
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return [self._build_search_strings(show=ep_obj.show, season=ep_obj)]
|
params = super(HDBitsProvider, self)._season_strings(ep_obj)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
show = ep_obj.show
|
||||||
|
if indexer_config.INDEXER_TVDB == show.indexer and show.indexerid:
|
||||||
|
params[0]['Season'].insert(0, dict(tvdb=dict(
|
||||||
|
id=show.indexerid,
|
||||||
|
season=(show.air_by_date or show.is_sports) and str(ep_obj.airdate)[:7] or
|
||||||
|
(show.is_anime and ('%d' % ep_obj.scene_absolute_number) or
|
||||||
|
(ep_obj.season, ep_obj.scene_season)[bool(show.is_scene)]))))
|
||||||
|
|
||||||
return [self._build_search_strings(show=ep_obj.show, episode=ep_obj)]
|
return params
|
||||||
|
|
||||||
def _get_title_and_url(self, item):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
title = item['name']
|
params = super(HDBitsProvider, self)._episode_strings(ep_obj, sep_date='|')
|
||||||
if title:
|
|
||||||
title = u'' + title.replace(' ', '.')
|
|
||||||
|
|
||||||
url = self.urls['get'] % urllib.urlencode({'id': item['id'], 'passkey': self.passkey})
|
show = ep_obj.show
|
||||||
|
if indexer_config.INDEXER_TVDB == show.indexer and show.indexerid:
|
||||||
|
id_param = dict(
|
||||||
|
id=show.indexerid,
|
||||||
|
episode=show.air_by_date and str(ep_obj.airdate).replace('-', ' ') or
|
||||||
|
(show.is_sports and ep_obj.airdate.strftime('%b') or
|
||||||
|
(show.is_anime and ('%i' % int(ep_obj.scene_absolute_number)) or
|
||||||
|
(ep_obj.episode, ep_obj.scene_episode)[bool(show.is_scene)])))
|
||||||
|
if not(show.air_by_date and show.is_sports and show.is_anime):
|
||||||
|
id_param['season'] = (ep_obj.season, ep_obj.scene_season)[bool(show.is_scene)]
|
||||||
|
params[0]['Episode'].insert(0, dict(tvdb=id_param))
|
||||||
|
|
||||||
return title, url
|
return params
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
self._check_auth()
|
self._check_auth()
|
||||||
|
|
||||||
logger.log(u'Search url: %s search_params: %s' % (self.urls['search'], search_params), logger.DEBUG)
|
|
||||||
|
|
||||||
response_json = self.get_url(self.urls['search'], post_data=search_params, json=True)
|
|
||||||
if response_json and 'data' in response_json and self.check_auth_from_data(response_json):
|
|
||||||
return response_json['data']
|
|
||||||
|
|
||||||
logger.log(u'Resulting JSON from %s isn\'t correct, not parsing it' % self.name, logger.ERROR)
|
|
||||||
return []
|
|
||||||
|
|
||||||
def find_propers(self, search_date=None):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
api_data = {'username': self.username, 'passkey': self.passkey, 'category': self.categories}
|
||||||
|
|
||||||
search_terms = [' proper ', ' repack ']
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
for term in search_terms:
|
for mode in search_params.keys():
|
||||||
for item in self._do_search(self._build_search_strings(search_term=term)):
|
for search_string in search_params[mode]:
|
||||||
if item['utadded']:
|
|
||||||
|
post_data = api_data.copy()
|
||||||
|
if isinstance(search_string, dict):
|
||||||
|
post_data.update(search_string)
|
||||||
|
id_search = True
|
||||||
|
else:
|
||||||
|
post_data['search'] = search_string
|
||||||
|
id_search = False
|
||||||
|
|
||||||
|
post_data = json.dumps(post_data)
|
||||||
|
search_url = self.urls['search']
|
||||||
|
|
||||||
|
json_resp = self.get_url(search_url, post_data=post_data, json=True)
|
||||||
|
if not (json_resp and 'data' in json_resp and self.check_auth_from_data(json_resp)):
|
||||||
|
logger.log(u'Response from %s does not contain any json data, abort' % self.name, logger.ERROR)
|
||||||
|
return results
|
||||||
|
|
||||||
|
cnt = len(items[mode])
|
||||||
|
for item in json_resp['data']:
|
||||||
try:
|
try:
|
||||||
result_date = datetime.datetime.fromtimestamp(int(item['utadded']))
|
seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in 'seed', 'leech', 'size']]
|
||||||
except:
|
if self._peers_fail(mode, seeders, leechers)\
|
||||||
result_date = None
|
or self.freeleech and re.search('(?i)no', item.get('freeleech', 'no')):
|
||||||
|
|
||||||
if result_date and (not search_date or result_date > search_date):
|
|
||||||
title, url = self._get_title_and_url(item)
|
|
||||||
if not re.search('(?i)(?:%s)' % term.strip(), title):
|
|
||||||
continue
|
continue
|
||||||
results.append(classes.Proper(title, url, result_date, self.show))
|
title = item['name']
|
||||||
|
download_url = self.urls['get'] % urllib.urlencode({'id': item['id'], 'passkey': self.passkey})
|
||||||
|
|
||||||
|
except (AttributeError, TypeError, ValueError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if title and download_url:
|
||||||
|
items[mode].append((title, download_url, item.get('seeders', 0), self._bytesizer(size)))
|
||||||
|
|
||||||
|
self._log_search(mode, len(items[mode]) - cnt,
|
||||||
|
('search_string: ' + search_string, self.name)['Cache' == mode])
|
||||||
|
|
||||||
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
|
if id_search and len(items[mode]):
|
||||||
|
return items[mode]
|
||||||
|
|
||||||
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _build_search_strings(self, show=None, episode=None, season=None, search_term=None):
|
|
||||||
|
|
||||||
request_params = {'username': self.username, 'passkey': self.passkey, 'category': [self.categories]}
|
|
||||||
|
|
||||||
if episode or season:
|
|
||||||
param = {'id': show.indexerid}
|
|
||||||
|
|
||||||
if episode:
|
|
||||||
if show.air_by_date:
|
|
||||||
param['episode'] = str(episode.airdate).replace('-', '|')
|
|
||||||
elif show.is_sports:
|
|
||||||
param['episode'] = episode.airdate.strftime('%b')
|
|
||||||
elif show.is_anime:
|
|
||||||
param['episode'] = '%i' % int(episode.scene_absolute_number)
|
|
||||||
else:
|
|
||||||
param['season'] = episode.scene_season
|
|
||||||
param['episode'] = episode.scene_episode
|
|
||||||
|
|
||||||
if season:
|
|
||||||
if show.air_by_date or show.is_sports:
|
|
||||||
param['season'] = str(season.airdate)[:7]
|
|
||||||
elif show.is_anime:
|
|
||||||
param['season'] = '%d' % season.scene_absolute_number
|
|
||||||
else:
|
|
||||||
param['season'] = season.scene_season
|
|
||||||
|
|
||||||
request_params['tvdb'] = param
|
|
||||||
|
|
||||||
if search_term:
|
|
||||||
request_params['search'] = search_term
|
|
||||||
|
|
||||||
return json.dumps(request_params)
|
|
||||||
|
|
||||||
def get_cache_data(self):
|
|
||||||
|
|
||||||
self._check_auth()
|
|
||||||
|
|
||||||
response_json = self.get_url(self.urls['search'], post_data=self._build_search_strings(), json=True)
|
|
||||||
if response_json and 'data' in response_json and self.check_auth_from_data(response_json):
|
|
||||||
return response_json['data']
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
class HDBitsCache(tvcache.TVCache):
|
class HDBitsCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 15 # cache update frequency
|
self.update_freq = 15 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = HDBitsProvider()
|
provider = HDBitsProvider()
|
||||||
|
|
139
sickbeard/providers/hdspace.py
Normal file
|
@ -0,0 +1,139 @@
|
||||||
|
# coding=utf-8
|
||||||
|
#
|
||||||
|
# This file is part of SickGear.
|
||||||
|
#
|
||||||
|
# SickGear is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# SickGear is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from . import generic
|
||||||
|
from sickbeard import logger, tvcache
|
||||||
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
class HDSpaceProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
generic.TorrentProvider.__init__(self, 'HDSpace')
|
||||||
|
|
||||||
|
self.url_base = 'https://hd-space.org/'
|
||||||
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
|
'login': self.url_base + 'index.php?page=login',
|
||||||
|
'browse': self.url_base + 'index.php?page=torrents&' + '&'.join(['options=0', 'active=1', 'category=']),
|
||||||
|
'search': '&search=%s',
|
||||||
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
|
self.categories = {'shows': [21, 22, 24, 25, 27, 28]}
|
||||||
|
|
||||||
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
|
self.freeleech = False
|
||||||
|
self.cache = HDSpaceCache(self)
|
||||||
|
|
||||||
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
|
return super(HDSpaceProvider, self)._authorised(post_params={'uid': self.username, 'pwd': self.password})
|
||||||
|
|
||||||
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if not self._authorised():
|
||||||
|
return results
|
||||||
|
|
||||||
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'torrent-details', 'get': 'download', 'fl': 'free',
|
||||||
|
'peers': 'page=peers', 'nodots': '[\.\s]+'}.items())
|
||||||
|
for mode in search_params.keys():
|
||||||
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
|
|
||||||
|
search_url = self.urls['browse'] + self._categories_string(template='', delimiter=';')
|
||||||
|
if 'Cache' != mode:
|
||||||
|
search_url += self.urls['search'] % rc['nodots'].sub(' ', search_string)
|
||||||
|
|
||||||
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
|
cnt = len(items[mode])
|
||||||
|
try:
|
||||||
|
if not html or self._has_no_results(html):
|
||||||
|
raise generic.HaltParseException
|
||||||
|
|
||||||
|
with BS4Parser(html, features=['html5lib', 'permissive'], attr='width="100%"\Wclass="lista"') as soup:
|
||||||
|
torrent_table = soup.find_all('table', attrs={'class': 'lista'})[-1]
|
||||||
|
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||||
|
|
||||||
|
if 2 > len(torrent_rows):
|
||||||
|
raise generic.HaltParseException
|
||||||
|
|
||||||
|
for tr in torrent_rows[1:]:
|
||||||
|
if tr.find('td', class_='header'):
|
||||||
|
continue
|
||||||
|
downlink = tr.find('a', href=rc['get'])
|
||||||
|
if None is downlink:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
seeders, leechers = [int(x.get_text().strip()) for x in tr.find_all('a', href=rc['peers'])]
|
||||||
|
if self._peers_fail(mode, seeders, leechers)\
|
||||||
|
or self.freeleech and None is tr.find('img', title=rc['fl']):
|
||||||
|
continue
|
||||||
|
|
||||||
|
info = tr.find('a', href=rc['info'])
|
||||||
|
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
||||||
|
size = tr.find_all('td')[-5].get_text().strip()
|
||||||
|
|
||||||
|
download_url = self.urls['get'] % str(downlink['href']).lstrip('/')
|
||||||
|
except (AttributeError, TypeError, ValueError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if title and download_url:
|
||||||
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
|
except generic.HaltParseException:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
|
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False)
|
||||||
|
|
||||||
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
|
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class HDSpaceCache(tvcache.TVCache):
|
||||||
|
|
||||||
|
def __init__(self, this_provider):
|
||||||
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
|
self.update_freq = 17 # cache update frequency
|
||||||
|
|
||||||
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
|
provider = HDSpaceProvider()
|
|
@ -16,11 +16,10 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
@ -33,49 +32,38 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
self.url_base = 'https://iptorrents.eu/'
|
self.url_base = 'https://iptorrents.eu/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'torrents/',
|
'login': self.url_base + 'torrents/',
|
||||||
'search': self.url_base + 't?l73=1&l78=1&l66=1&l65=1&l79=1&l5=1&l4=1&qf=ti%s&q=%s#torrents',
|
'search': self.url_base + 't?%s;q=%s;qf=ti%s%s#torrents',
|
||||||
'get': self.url_base + '%s'}
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
|
self.categories = {'shows': [4, 5, 22, 23, 24, 25, 26, 55, 65, 66, 73, 78, 79], 'anime': [60]}
|
||||||
|
|
||||||
|
self.proper_search_terms = None
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.freeleech = False
|
self.freeleech = False
|
||||||
self.cache = IPTorrentsCache(self)
|
self.cache = IPTorrentsCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
|
return super(IPTorrentsProvider, self)._authorised(post_params={'php': ''})
|
||||||
if logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password, 'login': 'submit'}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
logger.log(u'Failed to authenticate with %s, abort provider' % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
freeleech = self.freeleech and '&free=on' or ''
|
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
# URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
|
# URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
|
||||||
if isinstance(search_string, unicode):
|
search_url = self.urls['search'] % (self._categories_string(mode, '%s', ';'), search_string,
|
||||||
search_string = unidecode(search_string)
|
('', ';free')[self.freeleech], (';o=seeders', '')['Cache' == mode])
|
||||||
search_url = '%s%s' % (self.urls['search'] % (freeleech, search_string),
|
|
||||||
(';o=seeders', '')['Cache' == mode])
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
|
@ -94,45 +82,41 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip())
|
seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip())
|
||||||
for x in ('t_seeders', 't_leechers')]
|
for x in ('t_seeders', 't_leechers')]
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('a', href=rc['info'])
|
info = tr.find('a', href=rc['info'])
|
||||||
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
||||||
|
size = tr.find_all('td')[-4].get_text().strip()
|
||||||
|
|
||||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
# For each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date, '')
|
|
||||||
|
|
||||||
|
|
||||||
class IPTorrentsCache(tvcache.TVCache):
|
class IPTorrentsCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = IPTorrentsProvider()
|
provider = IPTorrentsProvider()
|
||||||
|
|
|
@ -17,15 +17,15 @@
|
||||||
|
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
|
|
||||||
import re
|
|
||||||
import os
|
import os
|
||||||
import datetime
|
import re
|
||||||
import traceback
|
import traceback
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import config, logger, tvcache, show_name_helpers, helpers
|
from sickbeard import config, logger, show_name_helpers, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import (has_anime, tryInt)
|
||||||
from sickbeard.common import Quality, mediaExtensions
|
from sickbeard.common import Quality, mediaExtensions
|
||||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
@ -38,10 +38,11 @@ class KATProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
self.url_base = 'https://kat.ph/'
|
self.url_base = 'https://kat.ph/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'search': [self.url_base, 'http://katproxy.com/'],
|
'base': [self.url_base, 'http://katproxy.com/'],
|
||||||
'cache_params': 'tv/?field=time_add&sorder=desc',
|
'search': 'usearch/%s/',
|
||||||
'search_params': 'usearch/%s/?field=seeders&sorder=desc'}
|
'sorted': '?field=time_add&sorder=desc'}
|
||||||
|
|
||||||
|
self.proper_search_terms = None
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.minseed, self.minleech = 2 * [None]
|
self.minseed, self.minleech = 2 * [None]
|
||||||
|
@ -108,7 +109,7 @@ class KATProvider(generic.TorrentProvider):
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to quality parse ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to quality parse ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj, **kwargs):
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
||||||
airdate = str(ep_obj.airdate).split('-')[0]
|
airdate = str(ep_obj.airdate).split('-')[0]
|
||||||
|
@ -119,54 +120,44 @@ class KATProvider(generic.TorrentProvider):
|
||||||
season = (ep_obj.season, ep_obj.scene_season)[bool(ep_obj.show.is_scene)]
|
season = (ep_obj.season, ep_obj.scene_season)[bool(ep_obj.show.is_scene)]
|
||||||
ep_detail = ['S%(s)02i -S%(s)02iE' % {'s': season}, 'Season %s -Ep*' % season]
|
ep_detail = ['S%(s)02i -S%(s)02iE' % {'s': season}, 'Season %s -Ep*' % season]
|
||||||
|
|
||||||
return [{'Season': self._build_search_strings(ep_detail, append=(' category:tv', '')[self.show.is_anime])}]
|
return [{'Season': self._build_search_strings(ep_detail)}]
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
if not ep_obj:
|
return generic.TorrentProvider._episode_strings(self, ep_obj, date_or=True,
|
||||||
return []
|
ep_detail=lambda x: '%s|%s' % (config.naming_ep_type[2] % x,
|
||||||
|
config.naming_ep_type[0] % x),
|
||||||
|
ep_detail_anime=lambda x: '%02i' % x, **kwargs)
|
||||||
|
|
||||||
if self.show.air_by_date or self.show.is_sports:
|
def _search_provider(self, search_params, search_mode='eponly', epcount=0, **kwargs):
|
||||||
ep_detail = str(ep_obj.airdate).replace('-', ' ')
|
|
||||||
if self.show.is_sports:
|
|
||||||
ep_detail += '|' + ep_obj.airdate.strftime('%b')
|
|
||||||
elif self.show.is_anime:
|
|
||||||
ep_detail = '%02i' % ep_obj.scene_absolute_number
|
|
||||||
else:
|
|
||||||
season, episode = ((ep_obj.season, ep_obj.episode),
|
|
||||||
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
|
||||||
ep_dict = {'seasonnumber': season, 'episodenumber': episode}
|
|
||||||
ep_detail = '%s|%s' % (config.naming_ep_type[2] % ep_dict, config.naming_ep_type[0] % ep_dict)
|
|
||||||
# include provider specific appends
|
|
||||||
if not isinstance(add_string, list):
|
|
||||||
add_string = [add_string]
|
|
||||||
add_string = [x + ' category:tv' for x in add_string]
|
|
||||||
|
|
||||||
return [{'Episode': self._build_search_strings(ep_detail, append=(add_string, '')[self.show.is_anime])}]
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'link': 'normal'}.items())
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'link': 'normal', 'get': '^magnet', 'verif': 'verif'}.items())
|
||||||
url = 0
|
url = 0
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
search_show = mode in ['Season', 'Episode']
|
||||||
|
if not search_show and has_anime():
|
||||||
|
search_params[mode] *= (1, 2)['Cache' == mode]
|
||||||
|
'Propers' == mode and search_params[mode].append('v1|v2|v3|v4|v5')
|
||||||
|
|
||||||
self.url = self.urls['search'][url]
|
for enum, search_string in enumerate(search_params[mode]):
|
||||||
search_args = ('search_params', 'cache_params')['Cache' == mode]
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
search_url = self.url + self.urls[search_args]
|
|
||||||
if 'Cache' != mode:
|
|
||||||
search_url %= urllib.quote(unidecode(search_string))
|
|
||||||
|
|
||||||
html = helpers.getURL(search_url)
|
self.url = self.urls['base'][url]
|
||||||
|
search_url = self.url + (self.urls['search'] % urllib.quote('%scategory:%s' % (
|
||||||
|
('', '%s ' % search_string)['Cache' != mode],
|
||||||
|
('tv', 'anime')[(search_show and bool(self.show and self.show.is_anime)) or bool(enum)])))
|
||||||
|
|
||||||
|
self.session.headers.update({'Referer': search_url})
|
||||||
|
html = self.get_url(search_url + self.urls['sorted'])
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
try:
|
try:
|
||||||
if not html or self._has_no_results(html) or re.search(r'did not match any documents', html):
|
if not html or 'kastatic' not in html or self._has_no_results(html) or re.search(r'(?is)<(?:h\d)[^>]*>.*?(?:did\snot\smatch)', html):
|
||||||
if html and 'kastatic' not in html:
|
if html and 'kastatic' not in html:
|
||||||
url += (1, 0)[url == len(self.urls['search'])]
|
url += (1, 0)[url == len(self.urls['base'])]
|
||||||
raise generic.HaltParseException
|
raise generic.HaltParseException
|
||||||
|
|
||||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||||
|
@ -178,8 +169,9 @@ class KATProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -5)]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('div', {'class': 'torrentname'})
|
info = tr.find('div', {'class': 'torrentname'})
|
||||||
|
@ -187,11 +179,11 @@ class KATProvider(generic.TorrentProvider):
|
||||||
.strip()
|
.strip()
|
||||||
link = self.url + info.find('a', {'class': rc['link']})['href'].lstrip('/')
|
link = self.url + info.find('a', {'class': rc['link']})['href'].lstrip('/')
|
||||||
|
|
||||||
download_magnet = tr.find('a', 'imagnet')['href']
|
download_magnet = tr.find('a', href=rc['get'])['href']
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.confirmed and not tr.find('a', 'iverify'):
|
if self.confirmed and not (tr.find('a', title=rc['verif']) or tr.find('i', title=rc['verif'])):
|
||||||
logger.log(u'Skipping untrusted non-verified result: %s' % title, logger.DEBUG)
|
logger.log(u'Skipping untrusted non-verified result: %s' % title, logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -201,36 +193,31 @@ class KATProvider(generic.TorrentProvider):
|
||||||
title = self._find_season_quality(title, link, ep_number)
|
title = self._find_season_quality(title, link, ep_number)
|
||||||
|
|
||||||
if title and download_magnet:
|
if title and download_magnet:
|
||||||
items[mode].append((title, download_magnet, seeders))
|
items[mode].append((title, download_magnet, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
# For each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date, '')
|
|
||||||
|
|
||||||
|
|
||||||
class KATCache(tvcache.TVCache):
|
class KATCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = KATProvider()
|
provider = KATProvider()
|
||||||
|
|
|
@ -18,12 +18,12 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -35,8 +35,8 @@ class MoreThanProvider(generic.TorrentProvider):
|
||||||
self.url_base = 'https://www.morethan.tv/'
|
self.url_base = 'https://www.morethan.tv/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'login.php',
|
'login': self.url_base + 'login.php',
|
||||||
'search': self.url_base + 'torrents.php?searchstr=%s&tags_type=1&order_by=time&order_way=desc'
|
'search': self.url_base + 'torrents.php?searchstr=%s&' + '&'.join([
|
||||||
+ '&filter_cat[2]=1&action=basic&searchsubmit=1',
|
'tags_type=1', 'order_by=time', '&order_way=desc', 'filter_cat[2]=1', 'action=basic', 'searchsubmit=1']),
|
||||||
'get': self.url_base + '%s'}
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
@ -44,41 +44,24 @@ class MoreThanProvider(generic.TorrentProvider):
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.cache = MoreThanCache(self)
|
self.cache = MoreThanCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'session' in self.session.cookies
|
return super(MoreThanProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')),
|
||||||
if logged_in():
|
post_params={'keeplogged': '1', 'login': 'Log in'})
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password, 'login': 'submit'}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate with %s, abort provider'
|
|
||||||
if response and 'username or password was incorrect' in response:
|
|
||||||
msg = u'Invalid username or password for %s. Check settings'
|
|
||||||
logger.log(msg % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v))
|
rc = dict((k, re.compile('(?i)' + v))
|
||||||
for (k, v) in {'info': 'view', 'get': 'download', 'name': 'showname', 'nuked': 'nuked'}.items())
|
for (k, v) in {'info': 'view', 'get': 'download', 'name': 'showname', 'nuked': 'nuked'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
if isinstance(search_string, unicode):
|
|
||||||
search_string = unidecode(search_string)
|
|
||||||
|
|
||||||
search_url = self.urls['search'] % search_string
|
search_url = self.urls['search'] % search_string
|
||||||
|
|
||||||
# fetches 15 results by default, and up to 100 if allowed in user profile
|
# fetches 15 results by default, and up to 100 if allowed in user profile
|
||||||
|
@ -102,8 +85,9 @@ class MoreThanProvider(generic.TorrentProvider):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
title = tr.find('a', title=rc['info']).get_text().strip()
|
title = tr.find('a', title=rc['info']).get_text().strip()
|
||||||
|
@ -113,44 +97,36 @@ class MoreThanProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/')
|
link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/')
|
||||||
download_url = self.urls['get'] + link
|
download_url = self.urls['get'] + link
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
# for each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class MoreThanCache(tvcache.TVCache):
|
class MoreThanCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
|
||||||
|
|
||||||
provider = MoreThanProvider()
|
provider = MoreThanProvider()
|
||||||
|
|
|
@ -16,13 +16,13 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import urllib
|
|
||||||
import time
|
import time
|
||||||
|
import urllib
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
|
||||||
|
|
||||||
from sickbeard import helpers, scene_exceptions, logger, tvcache
|
from . import generic
|
||||||
|
from sickbeard import helpers, logger, scene_exceptions, tvcache
|
||||||
from sickbeard.exceptions import AuthException
|
from sickbeard.exceptions import AuthException
|
||||||
|
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ class NewznabProvider(generic.NZBProvider):
|
||||||
"""
|
"""
|
||||||
Uses the newznab provider url and apikey to get the capabilities.
|
Uses the newznab provider url and apikey to get the capabilities.
|
||||||
Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk
|
Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk
|
||||||
Returns a tuple with (succes or not, array with dicts [{"id": "5070", "name": "Anime"},
|
Returns a tuple with (succes or not, array with dicts [{"id": "5070", "name": "Anime"},
|
||||||
{"id": "5080", "name": "Documentary"}, {"id": "5020", "name": "Foreign"}...etc}], error message)
|
{"id": "5080", "name": "Documentary"}, {"id": "5020", "name": "Foreign"}...etc}], error message)
|
||||||
"""
|
"""
|
||||||
return_categories = []
|
return_categories = []
|
||||||
|
@ -110,222 +110,233 @@ class NewznabProvider(generic.NZBProvider):
|
||||||
% (self.name or '', self.url or '', self.maybe_apikey() or '', self.cat_ids or '', self.enabled,
|
% (self.name or '', self.url or '', self.maybe_apikey() or '', self.cat_ids or '', self.enabled,
|
||||||
self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog)
|
self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog)
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj):
|
def _season_strings(self, ep_obj):
|
||||||
|
|
||||||
to_return = []
|
search_params = []
|
||||||
cur_params = {}
|
base_params = {}
|
||||||
|
|
||||||
# season
|
# season
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
||||||
date_str = str(ep_obj.airdate).split('-')[0]
|
date_str = str(ep_obj.airdate).split('-')[0]
|
||||||
cur_params['season'] = date_str
|
base_params['season'] = date_str
|
||||||
cur_params['q'] = date_str.replace('-', '.')
|
base_params['q'] = date_str.replace('-', '.')
|
||||||
elif ep_obj.show.is_anime:
|
elif ep_obj.show.is_anime:
|
||||||
cur_params['season'] = '%d' % ep_obj.scene_absolute_number
|
base_params['season'] = '%d' % ep_obj.scene_absolute_number
|
||||||
else:
|
else:
|
||||||
cur_params['season'] = str((ep_obj.season, ep_obj.scene_season)[bool(ep_obj.show.is_scene)])
|
base_params['season'] = str((ep_obj.season, ep_obj.scene_season)[bool(ep_obj.show.is_scene)])
|
||||||
|
|
||||||
# search
|
# search
|
||||||
rid = helpers.mapIndexersToShow(ep_obj.show)[2]
|
ids = helpers.mapIndexersToShow(ep_obj.show)
|
||||||
if rid:
|
if ids[1]: # or ids[2]:
|
||||||
cur_return = cur_params.copy()
|
params = base_params.copy()
|
||||||
cur_return['rid'] = rid
|
use_id = False
|
||||||
to_return.append(cur_return)
|
if ids[1] and self.supports_tvdbid():
|
||||||
|
params['tvdbid'] = ids[1]
|
||||||
|
use_id = True
|
||||||
|
if ids[2]:
|
||||||
|
params['rid'] = ids[2]
|
||||||
|
use_id = True
|
||||||
|
use_id and search_params.append(params)
|
||||||
|
|
||||||
# add new query strings for exceptions
|
# add new query strings for exceptions
|
||||||
name_exceptions = list(
|
name_exceptions = list(
|
||||||
set([helpers.sanitizeSceneName(a) for a in
|
set([helpers.sanitizeSceneName(a) for a in
|
||||||
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
||||||
for cur_exception in name_exceptions:
|
for cur_exception in name_exceptions:
|
||||||
cur_return = cur_params.copy()
|
params = base_params.copy()
|
||||||
if 'q' in cur_return:
|
if 'q' in params:
|
||||||
cur_return['q'] = cur_exception + '.' + cur_return['q']
|
params['q'] = '%s.%s' % (cur_exception, params['q'])
|
||||||
to_return.append(cur_return)
|
search_params.append(params)
|
||||||
|
|
||||||
return to_return
|
return [{'Season': search_params}]
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj):
|
def _episode_strings(self, ep_obj):
|
||||||
to_return = []
|
|
||||||
params = {}
|
search_params = []
|
||||||
|
base_params = {}
|
||||||
|
|
||||||
if not ep_obj:
|
if not ep_obj:
|
||||||
return [params]
|
return [base_params]
|
||||||
|
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
||||||
date_str = str(ep_obj.airdate)
|
date_str = str(ep_obj.airdate)
|
||||||
params['season'] = date_str.partition('-')[0]
|
base_params['season'] = date_str.partition('-')[0]
|
||||||
params['ep'] = date_str.partition('-')[2].replace('-', '/')
|
base_params['ep'] = date_str.partition('-')[2].replace('-', '/')
|
||||||
elif ep_obj.show.is_anime:
|
elif ep_obj.show.is_anime:
|
||||||
params['ep'] = '%i' % int(
|
base_params['ep'] = '%i' % int(
|
||||||
ep_obj.scene_absolute_number if int(ep_obj.scene_absolute_number) > 0 else ep_obj.scene_episode)
|
ep_obj.scene_absolute_number if int(ep_obj.scene_absolute_number) > 0 else ep_obj.scene_episode)
|
||||||
else:
|
else:
|
||||||
params['season'], params['ep'] = ((ep_obj.season, ep_obj.episode),
|
base_params['season'], base_params['ep'] = (
|
||||||
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
(ep_obj.season, ep_obj.episode), (ep_obj.scene_season, ep_obj.scene_episode))[ep_obj.show.is_scene]
|
||||||
|
|
||||||
# search
|
# search
|
||||||
rid = helpers.mapIndexersToShow(ep_obj.show)[2]
|
ids = helpers.mapIndexersToShow(ep_obj.show)
|
||||||
if rid:
|
if ids[1]: # or ids[2]:
|
||||||
cur_return = params.copy()
|
params = base_params.copy()
|
||||||
cur_return['rid'] = rid
|
use_id = False
|
||||||
to_return.append(cur_return)
|
if ids[1]:
|
||||||
|
if self.supports_tvdbid():
|
||||||
|
params['tvdbid'] = ids[1]
|
||||||
|
use_id = True
|
||||||
|
if ids[2]:
|
||||||
|
params['rid'] = ids[2]
|
||||||
|
use_id = True
|
||||||
|
use_id and search_params.append(params)
|
||||||
|
|
||||||
# add new query strings for exceptions
|
# add new query strings for exceptions
|
||||||
name_exceptions = list(
|
name_exceptions = list(
|
||||||
set([helpers.sanitizeSceneName(a) for a in
|
set([helpers.sanitizeSceneName(a) for a in
|
||||||
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
||||||
|
|
||||||
for cur_exception in name_exceptions:
|
for cur_exception in name_exceptions:
|
||||||
cur_return = params.copy()
|
params = base_params.copy()
|
||||||
cur_return['q'] = cur_exception
|
params['q'] = cur_exception
|
||||||
to_return.append(cur_return)
|
search_params.append(params)
|
||||||
|
|
||||||
if ep_obj.show.is_anime:
|
if ep_obj.show.is_anime:
|
||||||
# Experimental, add a searchstring without search explicitly for the episode!
|
# Experimental, add a search string without search explicitly for the episode!
|
||||||
# Remove the ?ep=e46 paramater and use add the episode number to the query paramater.
|
# Remove the ?ep=e46 parameter and use the episode number to the query parameter.
|
||||||
# Can be usefull for newznab indexers that do not have the episodes 100% parsed.
|
# Can be useful for newznab indexers that do not have the episodes 100% parsed.
|
||||||
# Start with only applying the searchstring to anime shows
|
# Start with only applying the search string to anime shows
|
||||||
params['q'] = cur_exception
|
params = base_params.copy()
|
||||||
params_no_ep = params.copy()
|
params['q'] = '%s.%02d' % (cur_exception, int(params['ep']))
|
||||||
|
if 'ep' in params:
|
||||||
|
params.pop('ep')
|
||||||
|
search_params.append(params)
|
||||||
|
|
||||||
params_no_ep['q'] = '%s.%02d' % (params_no_ep['q'], int(params_no_ep['ep']))
|
return [{'Episode': search_params}]
|
||||||
if 'ep' in params_no_ep:
|
|
||||||
params_no_ep.pop('ep')
|
|
||||||
to_return.append(params_no_ep)
|
|
||||||
|
|
||||||
return to_return
|
def supports_tvdbid(self):
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
return self.get_id() not in ['sick_beard_index']
|
||||||
|
|
||||||
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
api_key = self._check_auth()
|
api_key = self._check_auth()
|
||||||
|
|
||||||
if 'rid' not in search_params and 'q' not in search_params:
|
base_params = {'t': 'tvsearch',
|
||||||
logger.log('Error no rid or search term given.')
|
'maxage': sickbeard.USENET_RETENTION or 0,
|
||||||
return []
|
'limit': 100,
|
||||||
|
'attrs': 'rageid',
|
||||||
params = {'t': 'tvsearch',
|
'offset': 0}
|
||||||
'maxage': sickbeard.USENET_RETENTION,
|
|
||||||
'limit': 100,
|
|
||||||
'attrs': 'rageid',
|
|
||||||
'offset': 0}
|
|
||||||
|
|
||||||
# category ids
|
|
||||||
cat = []
|
|
||||||
if self.show:
|
|
||||||
if self.show.is_sports:
|
|
||||||
cat = ['5060']
|
|
||||||
elif self.show.is_anime:
|
|
||||||
cat = ['5070']
|
|
||||||
params['cat'] = ','.join([self.cat_ids] + cat)
|
|
||||||
|
|
||||||
# if max_age is set, use it, don't allow it to be missing
|
|
||||||
if not params['maxage'] or age:
|
|
||||||
params['maxage'] = age
|
|
||||||
|
|
||||||
if search_params:
|
|
||||||
params.update(search_params)
|
|
||||||
|
|
||||||
if isinstance(api_key, basestring):
|
if isinstance(api_key, basestring):
|
||||||
params['apikey'] = api_key
|
base_params['apikey'] = api_key
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
offset = total = 0
|
total = 0
|
||||||
|
|
||||||
# hardcoded to stop after a max of 4 hits (400 items) per query
|
for mode in search_params.keys():
|
||||||
while (offset <= total) and (offset < 400):
|
for i, params in enumerate(search_params[mode]):
|
||||||
search_url = '%sapi?%s' % (self.url, urllib.urlencode(params))
|
|
||||||
logger.log(u'Search url: ' + search_url, logger.DEBUG)
|
|
||||||
|
|
||||||
data = self.cache.getRSSFeed(search_url)
|
# category ids
|
||||||
time.sleep(1.1)
|
cat = []
|
||||||
if not data or not self.check_auth_from_data(data):
|
cat_anime = ('5070', '6070')['nzbs_org' == self.get_id()]
|
||||||
break
|
cat_sport = '5060'
|
||||||
|
if 'Episode' == mode or 'Season' == mode:
|
||||||
|
if not ('rid' in params or 'tvdbid' in params or 'q' in params or not self.supports_tvdbid()):
|
||||||
|
logger.log('Error no rid, tvdbid, or search term available for search.')
|
||||||
|
continue
|
||||||
|
|
||||||
for item in data.entries:
|
if self.show:
|
||||||
|
if self.show.is_sports:
|
||||||
title, url = self._get_title_and_url(item)
|
cat = [cat_sport]
|
||||||
if title and url:
|
elif self.show.is_anime:
|
||||||
results.append(item)
|
cat = [cat_anime]
|
||||||
else:
|
else:
|
||||||
logger.log(u'The data returned from %s is incomplete, this result is unusable' % self.name,
|
cat = [cat_sport, cat_anime]
|
||||||
logger.DEBUG)
|
|
||||||
|
|
||||||
# get total and offset attribs
|
if self.cat_ids or len(cat):
|
||||||
try:
|
base_params['cat'] = ','.join(sorted(set(self.cat_ids.split(',') + cat)))
|
||||||
if 0 == total:
|
|
||||||
total = int(data.feed.newznab_response['total'] or 0)
|
|
||||||
hits = (total / 100 + int(0 < (total % 100)))
|
|
||||||
hits += int(0 == hits)
|
|
||||||
offset = int(data.feed.newznab_response['offset'] or 0)
|
|
||||||
except AttributeError:
|
|
||||||
break
|
|
||||||
|
|
||||||
# No items found, prevent from doing another search
|
request_params = base_params.copy()
|
||||||
if 0 == total:
|
request_params.update(params)
|
||||||
break
|
|
||||||
|
|
||||||
if offset != params['offset']:
|
offset = 0
|
||||||
logger.log('Tell your newznab provider to fix their bloody newznab responses')
|
batch_count = not 0
|
||||||
break
|
|
||||||
|
|
||||||
params['offset'] += params['limit']
|
# hardcoded to stop after a max of 4 hits (400 items) per query
|
||||||
if total <= params['offset']:
|
while (offset <= total) and (offset < (200, 400)[self.supports_tvdbid()]) and batch_count:
|
||||||
logger.log('%s item%s found that will be used for episode matching' % (total, helpers.maybe_plural(total)),
|
cnt = len(results)
|
||||||
logger.DEBUG)
|
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
|
||||||
break
|
|
||||||
|
data = self.cache.getRSSFeed(search_url)
|
||||||
|
i and time.sleep(1.1)
|
||||||
|
|
||||||
|
if not data or not self.check_auth_from_data(data):
|
||||||
|
break
|
||||||
|
|
||||||
|
for item in data.entries:
|
||||||
|
|
||||||
|
title, url = self._title_and_url(item)
|
||||||
|
if title and url:
|
||||||
|
results.append(item)
|
||||||
|
else:
|
||||||
|
logger.log(u'The data returned from %s is incomplete, this result is unusable' % self.name,
|
||||||
|
logger.DEBUG)
|
||||||
|
|
||||||
|
# get total and offset attribs
|
||||||
|
try:
|
||||||
|
if 0 == total:
|
||||||
|
total = int(data.feed.newznab_response['total'] or 0)
|
||||||
|
hits = (total / 100 + int(0 < (total % 100)))
|
||||||
|
hits += int(0 == hits)
|
||||||
|
offset = int(data.feed.newznab_response['offset'] or 0)
|
||||||
|
except AttributeError:
|
||||||
|
break
|
||||||
|
|
||||||
|
# No items found or cache mode, prevent from doing another search
|
||||||
|
if 0 == total or 'Cache' == mode:
|
||||||
|
break
|
||||||
|
|
||||||
|
if offset != request_params['offset']:
|
||||||
|
logger.log('Tell your newznab provider to fix their bloody newznab responses')
|
||||||
|
break
|
||||||
|
|
||||||
|
request_params['offset'] += request_params['limit']
|
||||||
|
if total <= request_params['offset']:
|
||||||
|
logger.log('%s item%s found that will be used for episode matching' % (total, helpers.maybe_plural(total)),
|
||||||
|
logger.DEBUG)
|
||||||
|
break
|
||||||
|
|
||||||
|
# there are more items available than the amount given in one call, grab some more
|
||||||
|
items = total - request_params['offset']
|
||||||
|
logger.log('%s more item%s to fetch from a batch of up to %s items.'
|
||||||
|
% (items, helpers.maybe_plural(items), request_params['limit']), logger.DEBUG)
|
||||||
|
|
||||||
|
batch_count = len(results) - cnt
|
||||||
|
if batch_count:
|
||||||
|
self._log_search(mode, batch_count, search_url)
|
||||||
|
|
||||||
|
if 'tvdbid' in request_params and len(results):
|
||||||
|
break
|
||||||
|
|
||||||
# there are more items available than the amount given in one call, grab some more
|
|
||||||
items = total - params['offset']
|
|
||||||
logger.log('%s more item%s to fetch from a batch of up to %s items.'
|
|
||||||
% (items, helpers.maybe_plural(items), params['limit']), logger.DEBUG)
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=None):
|
|
||||||
return self._find_propers(search_date)
|
|
||||||
|
|
||||||
|
|
||||||
class NewznabCache(tvcache.TVCache):
|
class NewznabCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
|
||||||
self.minTime = 15 # cache update frequency
|
self.update_freq = 5 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
|
||||||
|
|
||||||
params = {'t': 'tvsearch',
|
|
||||||
'cat': self.provider.cat_ids + ',5060,5070',
|
|
||||||
'attrs': 'rageid'}
|
|
||||||
|
|
||||||
has_apikey = self.provider.maybe_apikey()
|
|
||||||
if has_apikey:
|
|
||||||
params['apikey'] = has_apikey
|
|
||||||
|
|
||||||
rss_url = '%sapi?%s' % (self.provider.url, urllib.urlencode(params))
|
|
||||||
|
|
||||||
logger.log(self.provider.name + ' cache update URL: ' + rss_url, logger.DEBUG)
|
|
||||||
|
|
||||||
return self.getRSSFeed(rss_url)
|
|
||||||
|
|
||||||
def updateCache(self):
|
def updateCache(self):
|
||||||
|
|
||||||
if self.shouldUpdate():
|
result = []
|
||||||
|
|
||||||
|
if True or self.shouldUpdate():
|
||||||
try:
|
try:
|
||||||
self._checkAuth()
|
self._checkAuth()
|
||||||
except Exception:
|
except Exception:
|
||||||
return []
|
return result
|
||||||
|
|
||||||
data = self._getRSSData()
|
items = self.provider.cache_data()
|
||||||
|
if items:
|
||||||
|
|
||||||
# as long as the http request worked we count this as an update
|
self._clearCache()
|
||||||
if not data:
|
self.setLastUpdate()
|
||||||
return []
|
|
||||||
|
|
||||||
# clear cache
|
|
||||||
self._clearCache()
|
|
||||||
|
|
||||||
self.setLastUpdate()
|
|
||||||
|
|
||||||
if self.provider.check_auth_from_data(data):
|
|
||||||
items = data.entries
|
|
||||||
cl = []
|
cl = []
|
||||||
for item in items:
|
for item in items:
|
||||||
ci = self._parseItem(item)
|
ci = self._parseItem(item)
|
||||||
|
@ -336,11 +347,7 @@ class NewznabCache(tvcache.TVCache):
|
||||||
my_db = self.get_db()
|
my_db = self.get_db()
|
||||||
my_db.mass_action(cl)
|
my_db.mass_action(cl)
|
||||||
|
|
||||||
else:
|
return result
|
||||||
raise AuthException(
|
|
||||||
u'Your authentication credentials for ' + self.provider.name + ' are incorrect, check your config')
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
# overwrite method with that parses the rageid from the newznab feed
|
# overwrite method with that parses the rageid from the newznab feed
|
||||||
def _parseItem(self, *item):
|
def _parseItem(self, *item):
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, show_name_helpers
|
from sickbeard import logger, show_name_helpers, tvcache
|
||||||
|
|
||||||
|
|
||||||
class NyaaProvider(generic.TorrentProvider):
|
class NyaaProvider(generic.TorrentProvider):
|
||||||
|
@ -31,7 +31,7 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
self.cache = NyaaCache(self)
|
self.cache = NyaaCache(self)
|
||||||
|
|
||||||
def _do_search(self, search_string, search_mode='eponly', epcount=0, age=0):
|
def _search_provider(self, search_string, **kwargs):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if self.show and not self.show.is_anime:
|
if self.show and not self.show.is_anime:
|
||||||
|
@ -51,7 +51,7 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
items = data.entries
|
items = data.entries
|
||||||
for curItem in items:
|
for curItem in items:
|
||||||
|
|
||||||
title, url = self._get_title_and_url(curItem)
|
title, url = self._title_and_url(curItem)
|
||||||
|
|
||||||
if title and url:
|
if title and url:
|
||||||
results.append(curItem)
|
results.append(curItem)
|
||||||
|
@ -65,13 +65,13 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search)
|
return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search)
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj, **kwargs):
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return show_name_helpers.makeSceneShowSearchStrings(self.show)
|
return show_name_helpers.makeSceneShowSearchStrings(self.show)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, **kwargs):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._get_season_search_strings(ep_obj)
|
return self._season_strings(ep_obj)
|
||||||
|
|
||||||
|
|
||||||
class NyaaCache(tvcache.TVCache):
|
class NyaaCache(tvcache.TVCache):
|
||||||
|
@ -79,9 +79,9 @@ class NyaaCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 15 # cache update frequency
|
self.update_freq = 15 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
params = {'page': 'rss', # Use RSS page
|
params = {'page': 'rss', # Use RSS page
|
||||||
'order': '1', # Sort Descending By Date
|
'order': '1', # Sort Descending By Date
|
||||||
'cats': '1_37'} # Limit to English-translated Anime (for now)
|
'cats': '1_37'} # Limit to English-translated Anime (for now)
|
||||||
|
|
|
@ -16,18 +16,19 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import re
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
import generic
|
|
||||||
import sickbeard
|
|
||||||
import urllib
|
import urllib
|
||||||
from sickbeard import tvcache, classes, logger, show_name_helpers
|
|
||||||
|
import sickbeard
|
||||||
|
|
||||||
|
from . import generic
|
||||||
|
from sickbeard import classes, logger, show_name_helpers, tvcache
|
||||||
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
from sickbeard.exceptions import AuthException
|
from sickbeard.exceptions import AuthException
|
||||||
from sickbeard.rssfeeds import RSSFeeds
|
from sickbeard.rssfeeds import RSSFeeds
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
|
||||||
|
|
||||||
|
|
||||||
class OmgwtfnzbsProvider(generic.NZBProvider):
|
class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
|
@ -80,15 +81,15 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj):
|
def _season_strings(self, ep_obj):
|
||||||
|
|
||||||
return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
|
return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj):
|
def _episode_strings(self, ep_obj):
|
||||||
|
|
||||||
return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
|
return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
|
||||||
|
|
||||||
def _get_title_and_url(self, item):
|
def _title_and_url(self, item):
|
||||||
|
|
||||||
return item['release'], item['getnzb']
|
return item['release'], item['getnzb']
|
||||||
|
|
||||||
|
@ -112,7 +113,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def get_cache_data(self):
|
def cache_data(self):
|
||||||
|
|
||||||
api_key = self._init_api()
|
api_key = self._init_api()
|
||||||
if False is api_key:
|
if False is api_key:
|
||||||
|
@ -132,11 +133,11 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
return data.entries
|
return data.entries
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def _do_search(self, search, search_mode='eponly', epcount=0, retention=0):
|
def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0):
|
||||||
|
|
||||||
api_key = self._init_api()
|
api_key = self._init_api()
|
||||||
if False is api_key:
|
if False is api_key:
|
||||||
return self.search_html(search)
|
return self.search_html(search, search_mode)
|
||||||
results = []
|
results = []
|
||||||
if None is not api_key:
|
if None is not api_key:
|
||||||
params = {'user': self.username,
|
params = {'user': self.username,
|
||||||
|
@ -156,7 +157,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
results.append(item)
|
results.append(item)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def search_html(self, search=''):
|
def search_html(self, search='', search_mode=''):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if None is self.cookies:
|
if None is self.cookies:
|
||||||
|
@ -189,7 +190,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
title = tr.find('a', href=rc['info'])['title']
|
title = tr.find('a', href=rc['info'])['title']
|
||||||
download_url = tr.find('a', href=rc['get'])
|
download_url = tr.find('a', href=rc['get'])
|
||||||
age = tr.find_all('td')[-1]['data-sort']
|
age = tr.find_all('td')[-1]['data-sort']
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url and age:
|
if title and download_url and age:
|
||||||
|
@ -202,19 +203,20 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
self._log_result(mode, len(results) - cnt, search_url)
|
mode = (mode, search_mode)['Propers' == search_mode]
|
||||||
|
self._log_search(mode, len(results) - cnt, search_url)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=None):
|
def find_propers(self, **kwargs):
|
||||||
|
|
||||||
search_terms = ['.PROPER.', '.REPACK.']
|
search_terms = ['.PROPER.', '.REPACK.']
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
for term in search_terms:
|
for term in search_terms:
|
||||||
for item in self._do_search(term, retention=4):
|
for item in self._search_provider(term, search_mode='Propers', retention=4):
|
||||||
if 'usenetage' in item:
|
if 'usenetage' in item:
|
||||||
|
|
||||||
title, url = self._get_title_and_url(item)
|
title, url = self._title_and_url(item)
|
||||||
try:
|
try:
|
||||||
result_date = datetime.fromtimestamp(int(item['usenetage']))
|
result_date = datetime.fromtimestamp(int(item['usenetage']))
|
||||||
except:
|
except:
|
||||||
|
@ -244,10 +246,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ui_string(key):
|
def ui_string(key):
|
||||||
result = ''
|
|
||||||
if 'omgwtfnzbs_api_key' == key:
|
return 'omgwtfnzbs_api_key' == key and 'Or use... \'cookie: cookname=xx; cookpass=yy\'' or ''
|
||||||
result = 'Or use... \'cookie: cookname=xx; cookpass=yy\''
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class OmgwtfnzbsCache(tvcache.TVCache):
|
class OmgwtfnzbsCache(tvcache.TVCache):
|
||||||
|
@ -255,10 +255,11 @@ class OmgwtfnzbsCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20
|
self.update_freq = 20
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
|
||||||
|
|
||||||
provider = OmgwtfnzbsProvider()
|
provider = OmgwtfnzbsProvider()
|
||||||
|
|
|
@ -14,12 +14,12 @@
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -39,43 +39,26 @@ class PiSexyProvider(generic.TorrentProvider):
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.cache = PiSexyCache(self)
|
self.cache = PiSexyCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies and\
|
return super(PiSexyProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass', 'pcode', 'pisexy']))
|
||||||
'pcode' in self.session.cookies and 'pisexy' in self.session.cookies
|
|
||||||
if logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate with %s, abort provider'
|
|
||||||
if response and 'Username or password incorrect' in response:
|
|
||||||
msg = u'Invalid username or password for %s. Check settings'
|
|
||||||
logger.log(msg % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v))
|
rc = dict((k, re.compile('(?i)' + v))
|
||||||
for (k, v) in {'info': 'download', 'get': 'download', 'valid_cat': 'cat=(?:0|50[12])',
|
for (k, v) in {'info': 'download', 'get': 'download', 'valid_cat': 'cat=(?:0|50[12])',
|
||||||
'title': r'Download\s([^\s]+).*', 'seeders': r'(^\d+)', 'leechers': r'(\d+)$'}.items())
|
'title': r'Download\s([^\s]+).*', 'seeders': r'(^\d+)', 'leechers': r'(\d+)$'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
if isinstance(search_string, unicode):
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
search_string = unidecode(search_string)
|
|
||||||
|
|
||||||
search_url = self.urls['search'] % search_string
|
search_url = self.urls['search'] % search_string
|
||||||
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
|
@ -93,61 +76,46 @@ class PiSexyProvider(generic.TorrentProvider):
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = 2 * [tr.find_all('td')[-4].get_text().strip()]
|
seeders, leechers = 2 * [tr.find_all('td')[-4].get_text().strip()]
|
||||||
seeders = int(rc['seeders'].findall(seeders)[0])
|
seeders, leechers = [tryInt(n) for n in [
|
||||||
leechers = int(rc['leechers'].findall(leechers)[0])
|
rc['seeders'].findall(seeders)[0], rc['leechers'].findall(leechers)[0]]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['valid_cat']):
|
||||||
if 'Cache' != mode:
|
continue
|
||||||
if not tr.find('a', href=rc['valid_cat']):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if seeders < self.minseed or leechers < self.minleech:
|
|
||||||
continue
|
|
||||||
|
|
||||||
info = tr.find('a', href=rc['info'])
|
info = tr.find('a', href=rc['info'])
|
||||||
title = 'title' in info.attrs and rc['title'].sub('', info.attrs['title'])\
|
title = 'title' in info.attrs and rc['title'].sub('', info.attrs['title'])\
|
||||||
or info.get_text().strip()
|
or info.get_text().strip()
|
||||||
|
size = tr.find_all('td')[3].get_text().strip()
|
||||||
|
|
||||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||||
|
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
# For each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class PiSexyCache(tvcache.TVCache):
|
class PiSexyCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 10 # cache update frequency
|
def _cache_data(self):
|
||||||
|
|
||||||
def _getRSSData(self):
|
return self.provider.cache_data()
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
|
||||||
|
|
||||||
|
|
||||||
provider = PiSexyProvider()
|
provider = PiSexyProvider()
|
||||||
|
|
|
@ -15,8 +15,6 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import tvcache
|
from sickbeard import tvcache
|
||||||
from sickbeard.rssfeeds import RSSFeeds
|
from sickbeard.rssfeeds import RSSFeeds
|
||||||
|
@ -31,7 +29,7 @@ class PreToMeProvider(generic.TorrentProvider):
|
||||||
self.url_base = 'https://pretome.info/'
|
self.url_base = 'https://pretome.info/'
|
||||||
|
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'cache': self.url_base + 'rss.php?cat[]=7&sort=0&type=d&key=%s',
|
'browse': self.url_base + 'rss.php?cat[]=7&sort=0&type=d&key=%s',
|
||||||
'search': '&st=1&tf=all&search=%s'}
|
'search': '&st=1&tf=all&search=%s'}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
@ -39,24 +37,23 @@ class PreToMeProvider(generic.TorrentProvider):
|
||||||
self.passkey = None
|
self.passkey = None
|
||||||
self.cache = PreToMeCache(self)
|
self.cache = PreToMeCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
return self._check_auth()
|
return self._check_auth()
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
self._do_login()
|
self._authorised()
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
url = self.urls['cache'] % self.passkey
|
url = self.urls['browse'] % self.passkey
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
if isinstance(search_string, unicode):
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
search_string = unidecode(search_string)
|
search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode]
|
||||||
|
|
||||||
search_url = (url + self.urls['search'] % search_string, url)['Cache' == mode]
|
|
||||||
data = RSSFeeds(self).get_feed(search_url)
|
data = RSSFeeds(self).get_feed(search_url)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
|
@ -64,35 +61,27 @@ class PreToMeProvider(generic.TorrentProvider):
|
||||||
for entry in data['entries']:
|
for entry in data['entries']:
|
||||||
try:
|
try:
|
||||||
if entry['title'] and 'download' in entry['link']:
|
if entry['title'] and 'download' in entry['link']:
|
||||||
items[mode].append((entry['title'], entry['link']))
|
items[mode].append((entry['title'], entry['link'], None, None))
|
||||||
except KeyError:
|
except KeyError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class PreToMeCache(tvcache.TVCache):
|
class PreToMeCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 6 # cache update frequency
|
self.update_freq = 6 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = PreToMeProvider()
|
provider = PreToMeProvider()
|
||||||
|
|
|
@ -21,7 +21,7 @@ import datetime
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import helpers, logger, tvcache
|
||||||
from sickbeard.indexers.indexer_config import INDEXER_TVDB
|
from sickbeard.indexers.indexer_config import INDEXER_TVDB
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,16 +38,14 @@ class RarbgProvider(generic.TorrentProvider):
|
||||||
'api_list': self.url_api + 'mode=list',
|
'api_list': self.url_api + 'mode=list',
|
||||||
'api_search': self.url_api + 'mode=search'}
|
'api_search': self.url_api + 'mode=search'}
|
||||||
|
|
||||||
self.categories = '18;41'
|
self.params = {'defaults': '&format=json_extended&category=18;41&limit=100&sort=last&ranked=%(ranked)s&token=%(token)s',
|
||||||
self.params = {'defaults': '&category=%(cat)s&limit=100&sort=last' % {'cat': self.categories}
|
|
||||||
+ '&ranked=%(ranked)s&token=%(token)s',
|
|
||||||
'param_iid': '&search_imdb=%(sid)s',
|
'param_iid': '&search_imdb=%(sid)s',
|
||||||
'param_tid': '&search_tvdb=%(sid)s',
|
'param_tid': '&search_tvdb=%(sid)s',
|
||||||
'param_rid': '&search_tvrage=%(sid)s',
|
|
||||||
'param_str': '&search_string=%(str)s',
|
'param_str': '&search_string=%(str)s',
|
||||||
'param_seed': '&min_seeders=%(min_seeds)s',
|
'param_seed': '&min_seeders=%(min_seeds)s',
|
||||||
'param_peer': '&min_leechers=%(min_peers)s'}
|
'param_peer': '&min_leechers=%(min_peers)s'}
|
||||||
|
|
||||||
|
self.proper_search_terms = '{{.proper.|.repack.}}'
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.minseed, self.minleech, self.token, self.token_expiry = 4 * [None]
|
self.minseed, self.minleech, self.token, self.token_expiry = 4 * [None]
|
||||||
|
@ -55,48 +53,50 @@ class RarbgProvider(generic.TorrentProvider):
|
||||||
self.request_throttle = datetime.datetime.now()
|
self.request_throttle = datetime.datetime.now()
|
||||||
self.cache = RarbgCache(self)
|
self.cache = RarbgCache(self)
|
||||||
|
|
||||||
def _do_login(self, reset=False):
|
def _authorised(self, reset=False, **kwargs):
|
||||||
|
|
||||||
if not reset and self.token and self.token_expiry and datetime.datetime.now() < self.token_expiry:
|
if not reset and self.token and self.token_expiry and datetime.datetime.now() < self.token_expiry:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
response = helpers.getURL(self.urls['api_token'], session=self.session, json=True)
|
for r in range(0, 3):
|
||||||
if response and 'token' in response:
|
response = helpers.getURL(self.urls['api_token'], session=self.session, json=True)
|
||||||
self.token = response['token']
|
if response and 'token' in response:
|
||||||
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
|
self.token = response['token']
|
||||||
return True
|
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
|
||||||
|
return True
|
||||||
|
time.sleep(1.1)
|
||||||
|
|
||||||
logger.log(u'No usable API token returned from: %s' % self.urls['api_token'], logger.ERROR)
|
logger.log(u'No usable API token returned from: %s' % self.urls['api_token'], logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login(reset=True):
|
if not self._authorised(reset=True):
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
id_search = None
|
id_search = None
|
||||||
if hasattr(self, 'show') and self.show and self.show.indexer and self.show.indexerid:
|
if hasattr(self, 'show') and self.show and self.show.indexer and self.show.indexerid:
|
||||||
|
sid, search_with = 2 * [None]
|
||||||
if 0 < len(self.show.imdb_info):
|
if 0 < len(self.show.imdb_info):
|
||||||
sid = self.show.imdb_info['imdb_id']
|
sid = self.show.imdb_info['imdb_id']
|
||||||
search_with = 'param_iid'
|
search_with = 'param_iid'
|
||||||
else:
|
elif INDEXER_TVDB == self.show.indexer:
|
||||||
sid = self.show.indexerid
|
sid = self.show.indexerid
|
||||||
if INDEXER_TVDB == self.show.indexer:
|
search_with = 'param_tid'
|
||||||
search_with = 'param_tid'
|
|
||||||
else: # INDEXER_TVRAGE == self.show.indexer:
|
if sid and search_with:
|
||||||
search_with = 'param_rid'
|
id_search = self.params[search_with] % {'sid': sid}
|
||||||
id_search = self.params[search_with] % {'sid': sid}
|
|
||||||
|
|
||||||
dedupe = []
|
dedupe = []
|
||||||
search_types = sorted([x for x in search_params.items()], key=lambda tup: tup[1], reverse=True) # sort type "_only" as first to process
|
search_types = sorted([x for x in search_params.items()], key=lambda tup: tup[0], reverse=True) # sort type "_only" as first to process
|
||||||
for mode_params in search_types:
|
for mode_params in search_types:
|
||||||
mode_search = mode_params[0]
|
mode_search = mode_params[0]
|
||||||
mode_base = mode_search.replace('_only', '')
|
mode = mode_search.replace('_only', '')
|
||||||
for search_string in mode_params[1]:
|
for search_string in mode_params[1]:
|
||||||
search_url = ''
|
searched_url = search_url = ''
|
||||||
url = 'api_list'
|
url = 'api_list'
|
||||||
if 'Cache' != mode_search:
|
if 'Cache' != mode_search:
|
||||||
url = 'api_search'
|
url = 'api_search'
|
||||||
|
@ -115,62 +115,61 @@ class RarbgProvider(generic.TorrentProvider):
|
||||||
if self.minleech:
|
if self.minleech:
|
||||||
search_url += self.params['param_peer'] % {'min_peers': self.minleech}
|
search_url += self.params['param_peer'] % {'min_peers': self.minleech}
|
||||||
|
|
||||||
cnt = len(items[mode_base])
|
cnt = len(items[mode])
|
||||||
for r in range(0, 3):
|
for r in range(0, 3):
|
||||||
time_out = 0
|
time_out = 0
|
||||||
while(self.request_throttle > datetime.datetime.now()) and 2 >= time_out:
|
while(self.request_throttle > datetime.datetime.now()) and 2 >= time_out:
|
||||||
time_out += 1
|
time_out += 1
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
data = self.get_url(search_url % {'ranked': int(self.confirmed), 'token': self.token}, json=True)
|
searched_url = search_url % {'ranked': int(self.confirmed), 'token': self.token}
|
||||||
|
|
||||||
|
data = self.get_url(searched_url, json=True)
|
||||||
|
|
||||||
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
|
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
|
||||||
self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3)
|
self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3)
|
||||||
|
if not data:
|
||||||
|
continue
|
||||||
|
|
||||||
if 'error' in data:
|
if 'error' in data:
|
||||||
if 5 == data['error_code']: # Too many requests per second.
|
if 5 == data['error_code']: # Too many requests per second.
|
||||||
continue
|
continue
|
||||||
|
|
||||||
elif 2 == data['error_code']: # Invalid token set
|
elif 2 == data['error_code']: # Invalid token set
|
||||||
if self._do_login(reset=True):
|
if self._authorised(reset=True):
|
||||||
continue
|
continue
|
||||||
self._log_result(mode_base, len(items[mode_base]) - cnt, search_url)
|
self.log_result(mode, len(items[mode]) - cnt, searched_url)
|
||||||
return results
|
return items[mode]
|
||||||
break
|
break
|
||||||
|
|
||||||
if 'error' not in data:
|
if 'error' not in data:
|
||||||
for item in data['torrent_results']:
|
for item in data['torrent_results']:
|
||||||
try:
|
title, download_magnet, seeders, size = [
|
||||||
title = item['filename']
|
item.get(x) for x in 'title', 'download', 'seeders', 'size']
|
||||||
get = item['download']
|
title = None is title and item.get('filename') or title
|
||||||
if not (title and get) or get in dedupe:
|
if not (title and download_magnet) or download_magnet in dedupe:
|
||||||
continue
|
continue
|
||||||
dedupe += [get]
|
dedupe += [download_magnet]
|
||||||
items[mode_base].append((title, get))
|
items[mode].append((title, download_magnet, seeders, self._bytesizer(size)))
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if 0 < len(items[mode_base]):
|
self._log_search(mode, len(items[mode]) - cnt, searched_url)
|
||||||
results += items[mode_base]
|
|
||||||
items[mode_base] = []
|
|
||||||
|
|
||||||
self._log_result(mode_base, len(items[mode_base]) - cnt, search_url)
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
if '_only' in mode_search and 0 < len(results):
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
|
if '_only' in mode_search and len(results):
|
||||||
break
|
break
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._find_propers(search_date, '{{.proper.|.repack.}}')
|
return generic.TorrentProvider._season_strings(self, ep_obj, detail_only=True)
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj, **kwargs):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return generic.TorrentProvider._get_season_search_strings(self, ep_obj, detail_only=True)
|
search_params = generic.TorrentProvider._episode_strings(self, ep_obj, detail_only=True, date_or=True, **kwargs)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
search_params = generic.TorrentProvider._get_episode_search_strings(self, ep_obj, detail_only=True)
|
|
||||||
if self.show.air_by_date and self.show.is_sports:
|
if self.show.air_by_date and self.show.is_sports:
|
||||||
for x, types in enumerate(search_params):
|
for x, types in enumerate(search_params):
|
||||||
for y, ep_type in enumerate(types):
|
for y, ep_type in enumerate(types):
|
||||||
|
@ -184,9 +183,9 @@ class RarbgCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = RarbgProvider()
|
provider = RarbgProvider()
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Author: Mr_Orange
|
# coding=utf-8
|
||||||
#
|
#
|
||||||
# This file is part of SickGear.
|
# This file is part of SickGear.
|
||||||
#
|
#
|
||||||
|
@ -19,8 +19,8 @@ import re
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.rssfeeds import RSSFeeds
|
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
from sickbeard.rssfeeds import RSSFeeds
|
||||||
from lib.bencode import bdecode
|
from lib.bencode import bdecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ class TorrentRssProvider(generic.TorrentProvider):
|
||||||
self.enable_recentsearch,
|
self.enable_recentsearch,
|
||||||
self.enable_backlog)
|
self.enable_backlog)
|
||||||
|
|
||||||
def _get_title_and_url(self, item):
|
def _title_and_url(self, item):
|
||||||
|
|
||||||
title, url = None, None
|
title, url = None, None
|
||||||
|
|
||||||
|
@ -86,10 +86,10 @@ class TorrentRssProvider(generic.TorrentProvider):
|
||||||
return success, err_msg
|
return success, err_msg
|
||||||
|
|
||||||
try:
|
try:
|
||||||
items = self.get_cache_data()
|
items = self.cache_data()
|
||||||
|
|
||||||
for item in items:
|
for item in items:
|
||||||
title, url = self._get_title_and_url(item)
|
title, url = self._title_and_url(item)
|
||||||
if not (title and url):
|
if not (title and url):
|
||||||
continue
|
continue
|
||||||
if url.startswith('magnet:'):
|
if url.startswith('magnet:'):
|
||||||
|
@ -111,7 +111,7 @@ class TorrentRssProvider(generic.TorrentProvider):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return False, 'Error when trying to load RSS: ' + ex(e)
|
return False, 'Error when trying to load RSS: ' + ex(e)
|
||||||
|
|
||||||
def get_cache_data(self):
|
def cache_data(self):
|
||||||
|
|
||||||
logger.log(u'TorrentRssCache cache update URL: ' + self.url, logger.DEBUG)
|
logger.log(u'TorrentRssCache cache update URL: ' + self.url, logger.DEBUG)
|
||||||
|
|
||||||
|
@ -125,8 +125,8 @@ class TorrentRssCache(tvcache.TVCache):
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
|
||||||
self.minTime = 15
|
self.update_freq = 15
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
|
@ -16,13 +16,13 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -44,37 +44,23 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.cache = SCCCache(self)
|
self.cache = SCCCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
|
return super(SCCProvider, self)._authorised(post_params={'submit': 'come+on+in'})
|
||||||
if logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password, 'submit': 'come on in'}
|
|
||||||
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
logger.log(u'Failed to authenticate with %s, abort provider.' % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
search_string, void = self._get_title_and_url((search_string, None))
|
search_string, void = self._title_and_url((search_string, None))
|
||||||
if isinstance(search_string, unicode):
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
search_string = unidecode(search_string)
|
|
||||||
|
|
||||||
if 'Season' == mode:
|
if 'Season' == mode:
|
||||||
searches = [self.urls['archive'] % search_string]
|
searches = [self.urls['archive'] % search_string]
|
||||||
|
@ -83,6 +69,7 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
self.urls['nonscene'] % search_string]
|
self.urls['nonscene'] % search_string]
|
||||||
|
|
||||||
for search_url in searches:
|
for search_url in searches:
|
||||||
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
|
@ -99,9 +86,10 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
for tr in torrent_table.find_all('tr')[1:]:
|
for tr in torrent_table.find_all('tr')[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip())
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
for x in ('ttr_seeders', 'ttr_leechers')]
|
tr.find('td', attrs={'class': x}).get_text().strip()
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
for x in ('ttr_seeders', 'ttr_leechers', 'ttr_size')]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('a', href=rc['info'])
|
info = tr.find('a', href=rc['info'])
|
||||||
|
@ -109,32 +97,28 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
link = str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
link = str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||||
download_url = self.urls['get'] % link
|
download_url = self.urls['get'] % link
|
||||||
except (AttributeError, TypeError):
|
|
||||||
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
time.sleep(1.1)
|
time.sleep(1.1)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
# For each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, sep_date='.', use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class SCCCache(tvcache.TVCache):
|
class SCCCache(tvcache.TVCache):
|
||||||
|
@ -142,11 +126,11 @@ class SCCCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = SCCProvider()
|
provider = SCCProvider()
|
||||||
|
|
|
@ -15,13 +15,14 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import ast
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,52 +34,47 @@ class SceneTimeProvider(generic.TorrentProvider):
|
||||||
self.url_base = 'https://www.scenetime.com/'
|
self.url_base = 'https://www.scenetime.com/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'takelogin.php',
|
'login': self.url_base + 'takelogin.php',
|
||||||
'search': self.url_base + 'browse.php?%ssearch=%s',
|
'browse': self.url_base + 'browse_API.php',
|
||||||
|
'params': {'sec': 'jax', 'cata': 'yes'},
|
||||||
'get': self.url_base + 'download.php/%(id)s/%(title)s.torrent'}
|
'get': self.url_base + 'download.php/%(id)s/%(title)s.torrent'}
|
||||||
|
|
||||||
self.categories = 'c2=1&c43=1&c9=1&c63=1&c77=1&c79=1&c101=1&cata=yes&'
|
self.categories = {'shows': [2, 43, 9, 63, 77, 79, 101]}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
|
self.freeleech = False
|
||||||
self.cache = SceneTimeCache(self)
|
self.cache = SceneTimeCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
|
return super(SceneTimeProvider, self)._authorised(post_params={'submit': 'Log in'})
|
||||||
if logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password, 'submit': 'Log in'}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate with %s, abort provider'
|
|
||||||
if response and 'Username or password incorrect' in response:
|
|
||||||
msg = u'Invalid username or password for %s. Check settings'
|
|
||||||
logger.log(msg % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': '.*id=(\d+).*',
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': '.*id=(\d+).*', 'fl': '\[freeleech\]',
|
||||||
'cats': 'cat=(?:2|9|43|63|77|79|101)'}.items())
|
'cats': 'cat=(?:%s)' % self._categories_string(template='', delimiter='|')
|
||||||
|
}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
if isinstance(search_string, unicode):
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
search_string = unidecode(search_string)
|
|
||||||
|
|
||||||
search_url = self.urls['search'] % (self.categories, search_string)
|
post_data = self.urls['params'].copy()
|
||||||
html = self.get_url(search_url)
|
post_data.update(ast.literal_eval('{%s}' % self._categories_string(template='"c%s": "1"', delimiter=',')))
|
||||||
|
if 'Cache' != mode:
|
||||||
|
post_data['search'] = '+'.join(search_string.split())
|
||||||
|
|
||||||
|
if self.freeleech:
|
||||||
|
post_data.update({'freeleech': 'on'})
|
||||||
|
|
||||||
|
self.session.headers.update({'Referer': self.url + 'browse.php', 'X-Requested-With': 'XMLHttpRequest'})
|
||||||
|
html = self.get_url(self.urls['browse'], post_data=post_data)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
try:
|
try:
|
||||||
|
@ -86,7 +82,7 @@ class SceneTimeProvider(generic.TorrentProvider):
|
||||||
raise generic.HaltParseException
|
raise generic.HaltParseException
|
||||||
|
|
||||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||||
torrent_table = soup.find('div', id='torrenttable').find('table')
|
torrent_table = soup.find('table', attrs={'cellpadding': 5})
|
||||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||||
|
|
||||||
if 2 > len(torrent_rows):
|
if 2 > len(torrent_rows):
|
||||||
|
@ -94,9 +90,11 @@ class SceneTimeProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
|
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]]
|
||||||
if None is tr.find('a', href=rc['cats'])\
|
if None is tr.find('a', href=rc['cats'])\
|
||||||
or ('Cache' != mode and (seeders < self.minseed or leechers < self.minleech)):
|
or self.freeleech and None is rc['fl'].search(tr.find_all('td')[1].get_text())\
|
||||||
|
or self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('a', href=rc['info'])
|
info = tr.find('a', href=rc['info'])
|
||||||
|
@ -104,45 +102,37 @@ class SceneTimeProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
download_url = self.urls['get'] % {'id': re.sub(rc['get'], r'\1', str(info.attrs['href'])),
|
download_url = self.urls['get'] % {'id': re.sub(rc['get'], r'\1', str(info.attrs['href'])),
|
||||||
'title': str(title).replace(' ', '.')}
|
'title': str(title).replace(' ', '.')}
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt,
|
||||||
|
('search string: ' + search_string, self.name)['Cache' == mode])
|
||||||
|
|
||||||
# For each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class SceneTimeCache(tvcache.TVCache):
|
class SceneTimeCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 15 # cache update frequency
|
self.update_freq = 15 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = SceneTimeProvider()
|
provider = SceneTimeProvider()
|
||||||
|
|
162
sickbeard/providers/shazbat.py
Normal file
|
@ -0,0 +1,162 @@
|
||||||
|
# coding=utf-8
|
||||||
|
#
|
||||||
|
# Author: SickGear
|
||||||
|
#
|
||||||
|
# This file is part of SickGear.
|
||||||
|
#
|
||||||
|
# SickGear is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# SickGear is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from . import generic
|
||||||
|
from sickbeard import helpers, logger, tvcache
|
||||||
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
class ShazbatProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
|
||||||
|
generic.TorrentProvider.__init__(self, 'Shazbat')
|
||||||
|
|
||||||
|
self.url_base = 'https://www.shazbat.tv/'
|
||||||
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
|
'login_action': self.url_base + 'login',
|
||||||
|
'feeds': self.url_base + 'rss_feeds',
|
||||||
|
'browse': self.url_base + 'torrents?portlet=true',
|
||||||
|
'search': self.url_base + 'search?portlet=true&search=%s',
|
||||||
|
'show': self.url_base + 'show?id=%s&show_mode=torrents',
|
||||||
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
|
self.freeleech = False
|
||||||
|
self.cache = ShazbatCache(self)
|
||||||
|
|
||||||
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
|
return super(ShazbatProvider, self)._authorised(
|
||||||
|
logged_in=(lambda x=None: '<input type="password"' not in helpers.getURL(
|
||||||
|
self.urls['feeds'], session=self.session)),
|
||||||
|
post_params={'tv_login': self.username, 'tv_password': self.password,
|
||||||
|
'referer': 'login', 'query': '', 'email': ''})
|
||||||
|
|
||||||
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if not self._authorised():
|
||||||
|
return results
|
||||||
|
|
||||||
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'show_id': '"show\?id=(\d+)[^>]+>([^<]+)<\/a>',
|
||||||
|
'get': 'load_torrent'}.items())
|
||||||
|
search_types = sorted([x for x in search_params.items()], key=lambda tup: tup[0], reverse=True)
|
||||||
|
maybe_only = search_types[0][0]
|
||||||
|
show_detail = '_only' in maybe_only and search_params.pop(maybe_only)[0] or ''
|
||||||
|
for mode in search_params.keys():
|
||||||
|
for search_string in search_params[mode]:
|
||||||
|
if 'Cache' == mode:
|
||||||
|
search_url = self.urls['browse']
|
||||||
|
html = self.get_url(search_url)
|
||||||
|
else:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
|
search_string = search_string.replace(show_detail, '').strip()
|
||||||
|
search_url = self.urls['search'] % search_string
|
||||||
|
html = self.get_url(search_url)
|
||||||
|
shows = rc['show_id'].findall(html)
|
||||||
|
if not any(shows):
|
||||||
|
continue
|
||||||
|
html = ''
|
||||||
|
for show in shows:
|
||||||
|
sid, title = show
|
||||||
|
if title not in search_string:
|
||||||
|
continue
|
||||||
|
html and time.sleep(1.1)
|
||||||
|
html += self.get_url(self.urls['show'] % sid)
|
||||||
|
|
||||||
|
cnt = len(items[mode])
|
||||||
|
try:
|
||||||
|
if not html or self._has_no_results(html):
|
||||||
|
raise generic.HaltParseException
|
||||||
|
|
||||||
|
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||||
|
torrent_rows = soup.tbody.find_all('tr') or soup.table.find_all('tr') or []
|
||||||
|
|
||||||
|
if 2 > len(torrent_rows):
|
||||||
|
raise generic.HaltParseException
|
||||||
|
|
||||||
|
for tr in torrent_rows[0:]:
|
||||||
|
try:
|
||||||
|
stats = tr.find_all('td')[3].get_text().strip()
|
||||||
|
seeders, leechers = [(tryInt(x[0], 0), tryInt(x[1], 0)) for x in
|
||||||
|
re.findall('(?::(\d+))(?:\W*[/]\W*:(\d+))?', stats) if x[0]][0]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
|
continue
|
||||||
|
sizes = [(tryInt(x[0], x[0]), tryInt(x[1], False)) for x in
|
||||||
|
re.findall('([\d\.]+\w+)?(?:\s*[\(\[](\d+)[\)\]])?', stats) if x[0]][0]
|
||||||
|
size = sizes[(0, 1)[1 < len(sizes)]]
|
||||||
|
|
||||||
|
for element in [x for x in tr.find_all('td')[2].contents[::-1] if unicode(x).strip()]:
|
||||||
|
if 'NavigableString' in str(element.__class__):
|
||||||
|
title = unicode(element).strip()
|
||||||
|
break
|
||||||
|
|
||||||
|
link = str(tr.find('a', href=rc['get'])['href']).replace('&', '&').lstrip('/')
|
||||||
|
download_url = self.urls['get'] % link
|
||||||
|
except (AttributeError, TypeError, ValueError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if title and download_url:
|
||||||
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
|
except generic.HaltParseException:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
|
return generic.TorrentProvider._season_strings(self, ep_obj, detail_only=True, scene=False)
|
||||||
|
|
||||||
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
|
return generic.TorrentProvider._episode_strings(self, ep_obj, detail_only=True, scene=False, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class ShazbatCache(tvcache.TVCache):
|
||||||
|
|
||||||
|
def __init__(self, this_provider):
|
||||||
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
|
provider = ShazbatProvider()
|
|
@ -16,27 +16,27 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import tvcache
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
|
|
||||||
|
|
||||||
class SpeedCDProvider(generic.TorrentProvider):
|
class SpeedCDProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
generic.TorrentProvider.__init__(self, 'Speedcd')
|
generic.TorrentProvider.__init__(self, 'SpeedCD')
|
||||||
|
|
||||||
self.url_base = 'http://speed.cd/'
|
self.url_base = 'http://speed.cd/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'take_login.php',
|
'login_action': self.url_base + 'login.php',
|
||||||
'search': self.url_base + 'V3/API/API.php',
|
'search': self.url_base + 'V3/API/API.php',
|
||||||
'get': self.url_base + 'download.php?torrent=%s'}
|
'get': self.url_base + 'download.php?torrent=%s'}
|
||||||
|
|
||||||
self.categories = {'Season': {'c14': 1},
|
self.categories = {'Season': {'c41': 1, 'c53': 1},
|
||||||
'Episode': {'c2': 1, 'c49': 1},
|
'Episode': {'c2': 1, 'c49': 1, 'c50': 1, 'c55': 1},
|
||||||
'Cache': {'c14': 1, 'c2': 1, 'c49': 1}}
|
'Cache': {'c41': 1, 'c2': 1, 'c49': 1, 'c50': 1, 'c53': 1, 'c55': 1}}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
|
@ -44,80 +44,65 @@ class SpeedCDProvider(generic.TorrentProvider):
|
||||||
self.freeleech = False
|
self.freeleech = False
|
||||||
self.cache = SpeedCDCache(self)
|
self.cache = SpeedCDCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'inSpeed_speedian' in self.session.cookies
|
return super(SpeedCDProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('inSpeed_speedian')))
|
||||||
if logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate with %s, abort provider'
|
|
||||||
if response and re.search('Incorrect username or Password. Please try again.', response):
|
|
||||||
msg = u'Invalid username or password for %s. Check settings'
|
|
||||||
logger.log(msg % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
remove_tag = re.compile(r'<[^>]*>')
|
remove_tag = re.compile(r'<[^>]*>')
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
|
search_mode = (mode, 'Episode')['Propers' == mode]
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
search_string = '+'.join(search_string.split())
|
search_string = '+'.join(search_string.split())
|
||||||
post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 4, 'jxw': 'b', 'search': search_string},
|
post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 4, 'jxw': 'b', 'search': search_string},
|
||||||
**self.categories[mode])
|
**self.categories[search_mode])
|
||||||
|
if self.freeleech:
|
||||||
|
post_data['freeleech'] = 'on'
|
||||||
|
|
||||||
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
|
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
try:
|
try:
|
||||||
if not data_json:
|
if not data_json:
|
||||||
raise generic.HaltParseException
|
raise generic.HaltParseException
|
||||||
torrents = data_json.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
torrents = data_json.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
||||||
|
|
||||||
for torrent in torrents:
|
for item in torrents:
|
||||||
|
|
||||||
if self.freeleech and not torrent['free']:
|
if self.freeleech and not item.get('free'):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
seeders, leechers = int(torrent['seed']), int(torrent['leech'])
|
seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in 'seed', 'leech', 'size']]
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
title = remove_tag.sub('', torrent['name'])
|
title = remove_tag.sub('', item.get('name'))
|
||||||
url = self.urls['get'] % (torrent['id'])
|
download_url = self.urls['get'] % item.get('id')
|
||||||
if title and url:
|
if title and download_url:
|
||||||
items[mode].append((title, url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
time.sleep(1.1)
|
time.sleep(1.1)
|
||||||
|
|
||||||
self._log_result(mode, len(items[mode]) - cnt,
|
self._log_search(mode, len(items[mode]) - cnt,
|
||||||
('search string: ' + search_string, self.name)['Cache' == mode])
|
('search string: ' + search_string, self.name)['Cache' == mode])
|
||||||
|
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, sep_date='.', use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class SpeedCDCache(tvcache.TVCache):
|
class SpeedCDCache(tvcache.TVCache):
|
||||||
|
@ -125,11 +110,11 @@ class SpeedCDCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = SpeedCDProvider()
|
provider = SpeedCDProvider()
|
||||||
|
|
84
sickbeard/providers/strike.py
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
# coding=utf-8
|
||||||
|
#
|
||||||
|
# This file is part of SickGear.
|
||||||
|
#
|
||||||
|
# SickGear is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# SickGear is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import re
|
||||||
|
from . import generic
|
||||||
|
from sickbeard import helpers
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
|
|
||||||
|
|
||||||
|
class StrikeProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
generic.TorrentProvider.__init__(self, 'Strike')
|
||||||
|
|
||||||
|
self.url_base = 'https://getstrike.net/'
|
||||||
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
|
'search': self.url_base + 'api/v2/torrents/search/?category=%s&phrase=%s'}
|
||||||
|
|
||||||
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
|
self.minseed, self.minleech = 2 * [None]
|
||||||
|
|
||||||
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
|
results = []
|
||||||
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
|
for mode in search_params.keys():
|
||||||
|
search_show = mode in ['Season', 'Episode']
|
||||||
|
if not search_show and helpers.has_anime():
|
||||||
|
search_params[mode] *= (1, 2)['Cache' == mode]
|
||||||
|
|
||||||
|
for enum, search_string in enumerate(search_params[mode]):
|
||||||
|
search_url = self.urls['search'] % \
|
||||||
|
(('tv', 'anime')[(search_show and bool(self.show and self.show.is_anime)) or bool(enum)],
|
||||||
|
(re.sub('[\.\s]+', ' ', search_string), 'x264')['Cache' == mode])
|
||||||
|
|
||||||
|
data_json = self.get_url(search_url, json=True)
|
||||||
|
|
||||||
|
cnt = len(items[mode])
|
||||||
|
try:
|
||||||
|
for item in data_json['torrents']:
|
||||||
|
seeders, leechers, title, download_magnet, size = [tryInt(n, n) for n in [item.get(x) for x in [
|
||||||
|
'seeds', 'leeches', 'torrent_title', 'magnet_uri', 'size']]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if title and download_magnet:
|
||||||
|
items[mode].append((title, download_magnet, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
|
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False)
|
||||||
|
|
||||||
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
|
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
provider = StrikeProvider()
|
|
@ -19,15 +19,14 @@ from __future__ import with_statement
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import urllib
|
|
||||||
import traceback
|
import traceback
|
||||||
|
import urllib
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import config, logger, tvcache, show_name_helpers
|
from sickbeard import config, logger, tvcache, show_name_helpers
|
||||||
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
from sickbeard.common import Quality, mediaExtensions
|
from sickbeard.common import Quality, mediaExtensions
|
||||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -40,8 +39,9 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
||||||
'https://thepiratebay.mn/', 'https://thepiratebay.vg/',
|
'https://thepiratebay.mn/', 'https://thepiratebay.vg/',
|
||||||
'https://thepiratebay.la/'],
|
'https://thepiratebay.la/'],
|
||||||
'search': 'search/%s/0/7/200',
|
'search': 'search/%s/0/7/200',
|
||||||
'cache': 'tv/latest/'} # order by seed
|
'browse': 'tv/latest/'} # order by seed
|
||||||
|
|
||||||
|
self.proper_search_terms = None
|
||||||
self.url = self.urls['config_provider_home_uri'][0]
|
self.url = self.urls['config_provider_home_uri'][0]
|
||||||
|
|
||||||
self.minseed, self.minleech = 2 * [None]
|
self.minseed, self.minleech = 2 * [None]
|
||||||
|
@ -115,7 +115,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
return title
|
return title
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj, **kwargs):
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
airdate = str(ep_obj.airdate).split('-')[0]
|
airdate = str(ep_obj.airdate).split('-')[0]
|
||||||
|
@ -128,26 +128,17 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
return [{'Season': self._build_search_strings(ep_detail)}]
|
return [{'Season': self._build_search_strings(ep_detail)}]
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
if self.show.air_by_date or self.show.is_sports:
|
return generic.TorrentProvider._episode_strings(self, ep_obj, date_or=True,
|
||||||
ep_detail = str(ep_obj.airdate).replace('-', ' ')
|
ep_detail=lambda x: '%s|%s' % (config.naming_ep_type[2] % x,
|
||||||
if self.show.is_sports:
|
config.naming_ep_type[0] % x),
|
||||||
ep_detail += '|' + ep_obj.airdate.strftime('%b')
|
ep_detail_anime=lambda x: '%02i' % x, **kwargs)
|
||||||
elif self.show.is_anime:
|
|
||||||
ep_detail = '%02i' % ep_obj.scene_absolute_number
|
|
||||||
else:
|
|
||||||
season, episode = ((ep_obj.season, ep_obj.episode),
|
|
||||||
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
|
||||||
ep_dict = {'seasonnumber': season, 'episodenumber': episode}
|
|
||||||
ep_detail = '%s|%s' % (config.naming_ep_type[2] % ep_dict, config.naming_ep_type[0] % ep_dict)
|
|
||||||
|
|
||||||
return [{'Episode': self._build_search_strings(ep_detail, append=(add_string, '')[self.show.is_anime])}]
|
def _search_provider(self, search_params, search_mode='eponly', epcount=0, **kwargs):
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v))
|
rc = dict((k, re.compile('(?i)' + v))
|
||||||
for (k, v) in {'info': 'detail', 'get': 'download[^"]+magnet', 'tid': r'.*/(\d{5,}).*',
|
for (k, v) in {'info': 'detail', 'get': 'download[^"]+magnet', 'tid': r'.*/(\d{5,}).*',
|
||||||
|
@ -155,17 +146,17 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
||||||
has_signature = False
|
has_signature = False
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
if isinstance(search_string, unicode):
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
search_string = unidecode(search_string)
|
|
||||||
|
|
||||||
log_url = '%s %s' % (self.name, search_string) # placebo value
|
log_url = '%s %s' % (self.name, search_string) # placebo value
|
||||||
for idx, search_url in enumerate(self.urls['config_provider_home_uri']):
|
for idx, search_url in enumerate(self.urls['config_provider_home_uri']):
|
||||||
search_url += self.urls['cache'] if 'Cache' == mode\
|
search_url += self.urls['browse'] if 'Cache' == mode\
|
||||||
else self.urls['search'] % (urllib.quote(search_string))
|
else self.urls['search'] % (urllib.quote(search_string))
|
||||||
|
|
||||||
log_url = u'(%s/%s): %s' % (idx + 1, len(self.urls['config_provider_home_uri']), search_url)
|
log_url = u'(%s/%s): %s' % (idx + 1, len(self.urls['config_provider_home_uri']), search_url)
|
||||||
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
if html and re.search(r'Pirate\sBay', html[33:7632:]):
|
if html and re.search(r'Pirate\sBay', html[33:7632:]):
|
||||||
has_signature = True
|
has_signature = True
|
||||||
break
|
break
|
||||||
|
@ -177,7 +168,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
||||||
if not html or self._has_no_results(html):
|
if not html or self._has_no_results(html):
|
||||||
raise generic.HaltParseException
|
raise generic.HaltParseException
|
||||||
|
|
||||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
with BS4Parser(html, features=['html5lib', 'permissive'], attr='id="searchResult"') as soup:
|
||||||
torrent_table = soup.find('table', attrs={'id': 'searchResult'})
|
torrent_table = soup.find('table', attrs={'id': 'searchResult'})
|
||||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||||
|
|
||||||
|
@ -187,7 +178,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
||||||
for tr in torrent_table.find_all('tr')[1:]:
|
for tr in torrent_table.find_all('tr')[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('a', title=rc['info'])
|
info = tr.find('a', title=rc['info'])
|
||||||
|
@ -195,7 +186,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
||||||
tid = rc['tid'].sub(r'\1', str(info['href']))
|
tid = rc['tid'].sub(r'\1', str(info['href']))
|
||||||
|
|
||||||
download_magnet = tr.find('a', title=rc['get'])['href']
|
download_magnet = tr.find('a', title=rc['get'])['href']
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.confirmed and not tr.find('img', title=rc['verify']):
|
if self.confirmed and not tr.find('img', title=rc['verify']):
|
||||||
|
@ -209,39 +200,41 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
||||||
title = self._find_season_quality(title, tid, ep_number)
|
title = self._find_season_quality(title, tid, ep_number)
|
||||||
|
|
||||||
if title and download_magnet:
|
if title and download_magnet:
|
||||||
items[mode].append((title, download_magnet, seeders))
|
size = None
|
||||||
|
try:
|
||||||
|
size = re.findall('(?i)size[^\d]+(\d+(?:[\.,]\d+)?\W*[bkmgt]\w+)',
|
||||||
|
tr.find_all(class_='detDesc')[0].get_text())[0]
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
items[mode].append((title, download_magnet, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, log_url)
|
self._log_search(mode, len(items[mode]) - cnt, log_url)
|
||||||
|
|
||||||
# For each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
if not has_signature:
|
if not has_signature:
|
||||||
logger.log(u'Failed to identify a page from ThePirateBay at %s attempted urls (tpb blocked? general network issue or site dead)' % len(self.urls['config_provider_home_uri']), logger.ERROR)
|
logger.log(u'Failed to identify a page from ThePirateBay at %s attempted urls (tpb blocked? general network issue or site dead)' % len(self.urls['config_provider_home_uri']), logger.ERROR)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date, '')
|
|
||||||
|
|
||||||
|
|
||||||
class ThePirateBayCache(tvcache.TVCache):
|
class ThePirateBayCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = ThePirateBayProvider()
|
provider = ThePirateBayProvider()
|
||||||
|
|
|
@ -20,7 +20,7 @@ import traceback
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, show_name_helpers
|
from sickbeard import logger, show_name_helpers, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ class TokyoToshokanProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
self.cache = TokyoToshokanCache(self)
|
self.cache = TokyoToshokanCache(self)
|
||||||
|
|
||||||
def _do_search(self, search_string, search_mode='eponly', epcount=0, age=0):
|
def _search_provider(self, search_string, search_mode='eponly', **kwargs):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if self.show and not self.show.is_anime:
|
if self.show and not self.show.is_anime:
|
||||||
|
@ -70,11 +70,11 @@ class TokyoToshokanProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search)
|
return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search)
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj, **kwargs):
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
|
return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, **kwargs):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
|
return [x.replace('.', ' ') for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)]
|
||||||
|
|
||||||
|
@ -84,9 +84,9 @@ class TokyoToshokanCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 15 # cache update frequency
|
self.update_freq = 15 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
params = {'filter': '1'}
|
params = {'filter': '1'}
|
||||||
|
|
||||||
url = self.provider.url + 'rss.php?' + urllib.urlencode(params)
|
url = self.provider.url + 'rss.php?' + urllib.urlencode(params)
|
||||||
|
|
|
@ -16,12 +16,12 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,58 +33,44 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
||||||
self.url_base = 'https://www.torrentbytes.net/'
|
self.url_base = 'https://www.torrentbytes.net/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'takelogin.php',
|
'login': self.url_base + 'takelogin.php',
|
||||||
'search': self.url_base + 'browse.php?search=%s%s',
|
'search': self.url_base + 'browse.php?search=%s&%s',
|
||||||
'get': self.url_base + '%s'}
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
self.categories = '&c41=1&c33=1&c38=1&c32=1&c37=1'
|
self.categories = {'shows': [41, 33, 38, 32, 37]}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
|
self.freeleech = False
|
||||||
self.cache = TorrentBytesCache(self)
|
self.cache = TorrentBytesCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
|
return super(TorrentBytesProvider, self)._authorised(post_params={'login': 'Log in!'})
|
||||||
if logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password, 'login': 'Log in!'}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate with %s, abort provider'
|
|
||||||
if response and 'Username or password incorrect' in response:
|
|
||||||
msg = u'Invalid username or password for %s. Check settings'
|
|
||||||
logger.log(msg % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download', 'fl': '\[\W*F\W?L\W*\]'
|
||||||
|
}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
if isinstance(search_string, unicode):
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
search_string = unidecode(search_string)
|
search_url = self.urls['search'] % (search_string, self._categories_string())
|
||||||
|
|
||||||
search_url = self.urls['search'] % (search_string, self.categories)
|
html = self.get_url(search_url, timeout=90)
|
||||||
html = self.get_url(search_url)
|
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
try:
|
try:
|
||||||
if not html or self._has_no_results(html):
|
if not html or self._has_no_results(html):
|
||||||
raise generic.HaltParseException
|
raise generic.HaltParseException
|
||||||
|
|
||||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
with BS4Parser(html, features=['html5lib', 'permissive'], attr='border="1"') as soup:
|
||||||
torrent_table = soup.find('table', attrs={'border': '1'})
|
torrent_table = soup.find('table', attrs={'border': '1'})
|
||||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||||
|
|
||||||
|
@ -93,53 +79,46 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
info = tr.find('a', href=rc['info'])
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
|
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]]
|
||||||
|
if self.freeleech and (len(info.contents) < 2 or not rc['fl'].search(info.contents[1].string.strip())) \
|
||||||
|
or self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('a', href=rc['info'])
|
title = 'title' in info.attrs and info.attrs['title'] or info.contents[0]
|
||||||
title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
|
title = (isinstance(title, list) and title[0] or title).strip()
|
||||||
|
|
||||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
# For each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class TorrentBytesCache(tvcache.TVCache):
|
class TorrentBytesCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = TorrentBytesProvider()
|
provider = TorrentBytesProvider()
|
||||||
|
|
|
@ -16,11 +16,11 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import tvcache
|
||||||
|
from sickbeard.helpers import (has_anime, tryInt)
|
||||||
|
|
||||||
|
|
||||||
class TorrentDayProvider(generic.TorrentProvider):
|
class TorrentDayProvider(generic.TorrentProvider):
|
||||||
|
@ -34,90 +34,82 @@ class TorrentDayProvider(generic.TorrentProvider):
|
||||||
'search': self.url_base + 'V3/API/API.php',
|
'search': self.url_base + 'V3/API/API.php',
|
||||||
'get': self.url_base + 'download.php/%s/%s'}
|
'get': self.url_base + 'download.php/%s/%s'}
|
||||||
|
|
||||||
self.categories = {'Season': {'c14': 1},
|
self.categories = {'Season': {'c31': 1, 'c33': 1, 'c14': 1},
|
||||||
'Episode': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1},
|
'Episode': {'c32': 1, 'c26': 1, 'c7': 1, 'c2': 1},
|
||||||
'Cache': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1, 'c14': 1}}
|
'Cache': {'c31': 1, 'c33': 1, 'c14': 1, 'c32': 1, 'c26': 1, 'c7': 1, 'c2': 1}}
|
||||||
|
|
||||||
|
self.proper_search_terms = None
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.freeleech = False
|
self.freeleech = False
|
||||||
self.cache = TorrentDayCache(self)
|
self.cache = TorrentDayCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
|
return super(TorrentDayProvider, self)._authorised(
|
||||||
if logged_in():
|
post_params={'submit.x': 0, 'submit.y': 0},
|
||||||
return True
|
failed_msg=(lambda x=None: re.search(r'(?i)tried((<[^>]+>)|\W)*too((<[^>]+>)|\W)*often', x) and
|
||||||
|
u'Abort %s, Too many login attempts. Settings must be checked' or (
|
||||||
|
re.search(r'(?i)username((<[^>]+>)|\W)*or((<[^>]+>)|\W)*password', x) and
|
||||||
|
u'Invalid username or password for %s. Check settings' or
|
||||||
|
u'Failed to authenticate with %s, abort provider')))
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password, 'submit.x': 0, 'submit.y': 0}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate'
|
|
||||||
if response and 'tried too often' in response:
|
|
||||||
msg = u'Too many login attempts'
|
|
||||||
logger.log(u'%s, abort provider %s' % (msg, self.name), logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
search_string = '+'.join(search_string.split())
|
search_string = '+'.join(search_string.split())
|
||||||
post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 8, 'jxw': 'b', 'search': search_string},
|
post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 8, 'jxw': 'b', 'search': search_string},
|
||||||
**self.categories[mode])
|
**self.categories[(mode, 'Episode')['Propers' == mode]])
|
||||||
|
if ('Cache' == mode and has_anime()) or (
|
||||||
|
mode in ['Season', 'Episode'] and self.show and self.show.is_anime):
|
||||||
|
post_data.update({'c29': 1})
|
||||||
|
|
||||||
if self.freeleech:
|
if self.freeleech:
|
||||||
post_data.update({'free': 'on'})
|
post_data.update({'free': 'on'})
|
||||||
|
|
||||||
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
|
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
try:
|
try:
|
||||||
if not data_json:
|
if not data_json:
|
||||||
raise generic.HaltParseException
|
raise generic.HaltParseException
|
||||||
torrents = data_json.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
torrents = data_json.get('Fs')[0].get('Cn').get('torrents')
|
||||||
|
|
||||||
for torrent in torrents:
|
for item in torrents:
|
||||||
seeders, leechers = int(torrent['seed']), int(torrent['leech'])
|
seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in 'seed', 'leech', 'size']]
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
title = re.sub(r'\[.*=.*\].*\[/.*\]', '', torrent['name'])
|
title = re.sub(r'\[.*=.*\].*\[/.*\]', '', item['name'])
|
||||||
|
|
||||||
download_url = self.urls['get'] % (torrent['id'], torrent['fname'])
|
download_url = self.urls['get'] % (item['id'], item['fname'])
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
time.sleep(1.1)
|
time.sleep(1.1)
|
||||||
|
|
||||||
self._log_result(mode, len(items[mode]) - cnt,
|
self._log_search(mode, len(items[mode]) - cnt,
|
||||||
('search string: ' + search_string, self.name)['Cache' == mode])
|
('search string: ' + search_string, self.name)['Cache' == mode])
|
||||||
|
|
||||||
# For each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._find_propers(search_date, '')
|
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', date_or=True, **kwargs)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, sep_date='.')
|
|
||||||
|
|
||||||
|
|
||||||
class TorrentDayCache(tvcache.TVCache):
|
class TorrentDayCache(tvcache.TVCache):
|
||||||
|
@ -125,9 +117,9 @@ class TorrentDayCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
provider = TorrentDayProvider()
|
provider = TorrentDayProvider()
|
||||||
|
|
|
@ -16,12 +16,12 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -34,53 +34,33 @@ class TorrentingProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
self.api = 'https://ttonline.us/'
|
self.api = 'https://ttonline.us/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login_test': self.api + 'rss.php',
|
'login': self.api + 'secure.php',
|
||||||
'search': self.api + 'browse.php?%ssearch=%s',
|
'search': self.api + 'browse.php?%s&search=%s',
|
||||||
'get': self.api + '%s'}
|
'get': self.api + '%s'}
|
||||||
|
|
||||||
self.categories = 'c4=1&c5=1&'
|
self.categories = {'shows': [4, 5]}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.digest, self.minseed, self.minleech = 3 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.cache = TorrentingCache(self)
|
self.cache = TorrentingCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'uid' in self.session.cookies and self.session.cookies['uid'] in self.digest and \
|
|
||||||
'pass' in self.session.cookies and self.session.cookies['pass'] in self.digest
|
|
||||||
if logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', self.digest)
|
|
||||||
success, msg = self._check_cookie()
|
|
||||||
if not success:
|
|
||||||
logger.log(u'%s: [%s]' % (msg, self.cookies), logger.WARNING)
|
|
||||||
else:
|
|
||||||
response = helpers.getURL(self.urls['login_test'], session=self.session)
|
|
||||||
if response and logged_in() and 'Generate RSS' in response[8550:]:
|
|
||||||
return True
|
|
||||||
logger.log(u'Invalid cookie details for %s. Check settings' % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
self.cookies = None
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download',
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download',
|
||||||
'cats': 'cat=(?:4|5)'}.items())
|
'cats': 'cat=(?:%s)' % self._categories_string(template='', delimiter='|')
|
||||||
|
}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
if isinstance(search_string, unicode):
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
search_string = unidecode(search_string)
|
search_url = self.urls['search'] % (self._categories_string(), search_string)
|
||||||
|
|
||||||
search_url = self.urls['search'] % (self.categories, search_string)
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
|
@ -97,57 +77,42 @@ class TorrentingProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
if None is tr.find('a', href=rc['cats'])\
|
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]]
|
||||||
or ('Cache' != mode and (seeders < self.minseed or leechers < self.minleech)):
|
if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('a', href=rc['info'])
|
info = tr.find('a', href=rc['info'])
|
||||||
title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
|
title = 'title' in info.attrs and info.attrs['title'] or info.get_text().strip()
|
||||||
download_url = self.urls['get'] % tr.find('a', href=rc['get']).get('href')
|
download_url = self.urls['get'] % tr.find('a', href=rc['get']).get('href')
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
results += items[mode]
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, use_or=False)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def ui_string(key):
|
|
||||||
result = ''
|
|
||||||
if 'torrenting_digest' == key:
|
|
||||||
result = 'use... \'uid=xx; pass=yy\''
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class TorrentingCache(tvcache.TVCache):
|
class TorrentingCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 7 # cache update frequency
|
def _cache_data(self):
|
||||||
|
|
||||||
def _getRSSData(self):
|
return self.provider.cache_data()
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
|
||||||
|
|
||||||
|
|
||||||
provider = TorrentingProvider()
|
provider = TorrentingProvider()
|
||||||
|
|
|
@ -16,69 +16,51 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
class TorrentLeechProvider(generic.TorrentProvider):
|
class TorrentLeechProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
generic.TorrentProvider.__init__(self, 'TorrentLeech')
|
generic.TorrentProvider.__init__(self, 'TorrentLeech')
|
||||||
|
|
||||||
self.url_base = 'https://torrentleech.org/'
|
self.url_base = 'https://torrentleech.org/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'user/account/login/',
|
'login': self.url_base + 'user/account/login/',
|
||||||
'search': self.url_base + 'torrents/browse/index/query/%s/categories/%s',
|
'browse': self.url_base + 'torrents/browse/index/categories/%(cats)s',
|
||||||
'cache': self.url_base + 'torrents/browse/index/categories/%s',
|
'search': self.url_base + 'torrents/browse/index/query/%(query)s/categories/%(cats)s',
|
||||||
'get': self.url_base + '%s'}
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
self.categories = '2,26,27,32'
|
self.categories = {'shows': [2, 26, 27, 32], 'anime': [7, 34, 35]}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.cache = TorrentLeechCache(self)
|
self.cache = TorrentLeechCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'tluid' in self.session.cookies and 'tlpass' in self.session.cookies
|
return super(TorrentLeechProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='tl')),
|
||||||
if logged_in():
|
post_params={'remember_me': 'on', 'login': 'submit'})
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password, 'remember_me': 'on', 'login': 'submit'}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
logger.log(u'Failed to authenticate with %s, abort provider.' % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': 'download'}.items())
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': 'download'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
search_url = self.urls[('search', 'browse')['Cache' == mode]] % {
|
||||||
if isinstance(search_string, unicode):
|
'cats': self._categories_string(mode, '', ','),
|
||||||
search_string = unidecode(search_string)
|
'query': isinstance(search_string, unicode) and unidecode(search_string) or search_string}
|
||||||
|
|
||||||
if 'Cache' == mode:
|
|
||||||
search_url = self.urls['cache'] % self.categories
|
|
||||||
else:
|
|
||||||
search_url = self.urls['search'] % (search_string, self.categories)
|
|
||||||
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
|
@ -98,49 +80,45 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip())
|
seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip())
|
||||||
for x in ('seeders', 'leechers')]
|
for x in ('seeders', 'leechers')]
|
||||||
if mode != 'Cache' and (seeders < self.minseed or leechers < self.minleech):
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('td', {'class': 'name'}).a
|
info = tr.find('td', {'class': 'name'}).a
|
||||||
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
||||||
|
size = tr.find_all('td')[-5].get_text().strip()
|
||||||
|
|
||||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, sep_date='|', use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class TorrentLeechCache(tvcache.TVCache):
|
class TorrentLeechCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
|
||||||
|
|
||||||
provider = TorrentLeechProvider()
|
provider = TorrentLeechProvider()
|
||||||
|
|
|
@ -18,12 +18,12 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -35,54 +35,38 @@ class TorrentShackProvider(generic.TorrentProvider):
|
||||||
self.url_base = 'https://torrentshack.me/'
|
self.url_base = 'https://torrentshack.me/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'login.php?lang=',
|
'login': self.url_base + 'login.php?lang=',
|
||||||
'search': self.url_base + 'torrents.php?searchstr=%s'
|
'search': self.url_base + 'torrents.php?searchstr=%s&%s&' + '&'.join(
|
||||||
+ '&release_type=both&searchtags=&tags_type=0&order_by=s3&order_way=desc&torrent_preset=all'
|
['release_type=both', 'searchtags=', 'tags_type=0', 'order_by=s3', 'order_way=desc', 'torrent_preset=all']),
|
||||||
+ '&filter_cat[600]=1&filter_cat[620]=1&filter_cat[700]=1'
|
|
||||||
+ '&filter_cat[850]=1&filter_cat[980]=1&filter_cat[981]=1',
|
|
||||||
'get': self.url_base + '%s'}
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
|
self.categories = {'shows': [600, 620, 700, 981, 980], 'anime': [850]}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
self.cache = TorrentShackCache(self)
|
self.cache = TorrentShackCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'session' in self.session.cookies
|
return super(TorrentShackProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')),
|
||||||
if logged_in():
|
post_params={'keeplogged': '1', 'login': 'Login'})
|
||||||
return True
|
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'username': self.username, 'password': self.password, 'keeplogged': '1', 'login': 'Login'}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
msg = u'Failed to authenticate with %s, abort provider'
|
|
||||||
if response and 'username or password was incorrect' in response:
|
|
||||||
msg = u'Invalid username or password for %s. Check settings'
|
|
||||||
logger.log(msg % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v))
|
rc = dict((k, re.compile('(?i)' + v))
|
||||||
for (k, v) in {'info': 'view', 'get': 'download', 'title': 'view\s+torrent\s+'}.items())
|
for (k, v) in {'info': 'view', 'get': 'download', 'title': 'view\s+torrent\s+'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
if isinstance(search_string, unicode):
|
|
||||||
search_string = unidecode(search_string)
|
|
||||||
|
|
||||||
# fetch 15 results by default, and up to 100 if allowed in user profile
|
# fetch 15 results by default, and up to 100 if allowed in user profile
|
||||||
search_url = self.urls['search'] % search_string
|
search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'filter_cat[%s]=1'))
|
||||||
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
|
@ -99,8 +83,9 @@ class TorrentShackProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
try:
|
try:
|
||||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
if 'Cache' != mode and (seeders < self.minseed or leechers < self.minleech):
|
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
info = tr.find('a', title=rc['info'])
|
info = tr.find('a', title=rc['info'])
|
||||||
|
@ -109,32 +94,27 @@ class TorrentShackProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/')
|
link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/')
|
||||||
download_url = self.urls['get'] % link
|
download_url = self.urls['get'] % link
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
items[mode].append((title, download_url, seeders))
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
|
||||||
except generic.HaltParseException:
|
except generic.HaltParseException:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
# for each search mode sort all the items by seeders
|
self._sort_seeders(mode, items)
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, sep_date='.', use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class TorrentShackCache(tvcache.TVCache):
|
class TorrentShackCache(tvcache.TVCache):
|
||||||
|
@ -142,10 +122,11 @@ class TorrentShackCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 20 # cache update frequency
|
self.update_freq = 20 # cache update frequency
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
|
||||||
|
|
||||||
provider = TorrentShackProvider()
|
provider = TorrentShackProvider()
|
||||||
|
|
|
@ -16,12 +16,11 @@
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import datetime
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from . import generic
|
from . import generic
|
||||||
from sickbeard import logger, tvcache, helpers
|
from sickbeard import helpers, logger, tvcache
|
||||||
from sickbeard.bs4_parser import BS4Parser
|
from sickbeard.helpers import tryInt
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,111 +31,92 @@ class TransmithenetProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
self.url_base = 'https://transmithe.net/'
|
self.url_base = 'https://transmithe.net/'
|
||||||
self.urls = {'config_provider_home_uri': self.url_base,
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
'login': self.url_base + 'index.php?page=login',
|
'login_action': self.url_base + 'login.php',
|
||||||
'cache': self.url_base + 'index.php?page=torrents&options=0&active=1',
|
'user': self.url_base + 'ajax.php?action=index',
|
||||||
'search': '&search=%s',
|
'browse': self.url_base + 'ajax.php?action=browse&auth=%s&passkey=%s',
|
||||||
'get': self.url_base + '%s'}
|
'search': '&searchstr=%s',
|
||||||
|
'get': self.url_base + 'torrents.php?action=download&authkey=%s&torrent_pass=%s&id=%s'}
|
||||||
|
|
||||||
self.url = self.urls['config_provider_home_uri']
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
self.user_authkey, self.user_passkey = 2 * [None]
|
||||||
|
|
||||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||||
|
self.freeleech = False
|
||||||
self.cache = TransmithenetCache(self)
|
self.cache = TransmithenetCache(self)
|
||||||
|
|
||||||
def _do_login(self):
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
logged_in = lambda: 'uid' in self.session.cookies and 'pass' in self.session.cookies
|
if not super(TransmithenetProvider, self)._authorised(
|
||||||
if logged_in():
|
logged_in=(lambda x=None: self.has_all_cookies('session')),
|
||||||
return True
|
post_params={'keeplogged': '1', 'login': 'Login'}):
|
||||||
|
return False
|
||||||
|
if not self.user_authkey:
|
||||||
|
response = helpers.getURL(self.urls['user'], session=self.session, json=True)
|
||||||
|
if 'response' in response:
|
||||||
|
self.user_authkey, self.user_passkey = [response['response'].get(v) for v in 'authkey', 'passkey']
|
||||||
|
return self.user_authkey
|
||||||
|
|
||||||
if self._check_auth():
|
def _search_provider(self, search_params, **kwargs):
|
||||||
login_params = {'uid': self.username, 'pwd': self.password, 'remember_me': 'on', 'login': 'submit'}
|
|
||||||
response = helpers.getURL(self.urls['login'], post_data=login_params, session=self.session)
|
|
||||||
if response and logged_in():
|
|
||||||
return True
|
|
||||||
|
|
||||||
logger.log(u'Failed to authenticate with %s, abort provider.' % self.name, logger.ERROR)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
if not self._do_login():
|
if not self._authorised():
|
||||||
return results
|
return results
|
||||||
|
|
||||||
items = {'Season': [], 'Episode': [], 'Cache': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'torrent-details', 'get': 'download',
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'nodots': '[\.\s]+'}.items())
|
||||||
'peers': 'page=peers', 'nodots': '[\.\s]+'}.items())
|
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
|
|
||||||
if isinstance(search_string, unicode):
|
search_url = self.urls['browse'] % (self.user_authkey, self.user_passkey)
|
||||||
search_string = unidecode(search_string)
|
|
||||||
|
|
||||||
search_url = self.urls['cache']
|
|
||||||
if 'Cache' != mode:
|
if 'Cache' != mode:
|
||||||
search_url += self.urls['search'] % rc['nodots'].sub(' ', search_string)
|
search_url += self.urls['search'] % rc['nodots'].sub('+', search_string)
|
||||||
|
|
||||||
html = self.get_url(search_url)
|
data_json = self.get_url(search_url, json=True)
|
||||||
|
|
||||||
cnt = len(items[mode])
|
cnt = len(items[mode])
|
||||||
try:
|
try:
|
||||||
if not html or self._has_no_results(html):
|
for item in data_json['response']['results']:
|
||||||
raise generic.HaltParseException
|
if self.freeleech and not item.get('isFreeleech'):
|
||||||
|
continue
|
||||||
|
|
||||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
seeders, leechers, group_name, torrent_id, size = [tryInt(n, n) for n in [item.get(x) for x in [
|
||||||
torrent_table = soup.find_all('table', 'lista')[-1]
|
'seeders', 'leechers', 'groupName', 'torrentId', 'size']]]
|
||||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
|
continue
|
||||||
|
|
||||||
if 2 > len(torrent_rows):
|
try:
|
||||||
raise generic.HaltParseException
|
title_parts = group_name.split('[')
|
||||||
|
maybe_res = re.findall('((?:72|108)0\w)', title_parts[1])
|
||||||
|
detail = title_parts[1].split('/')
|
||||||
|
detail[1] = detail[1].strip().lower().replace('mkv', 'x264')
|
||||||
|
title = '%s.%s' % (title_parts[0].strip(), '.'.join(
|
||||||
|
(len(maybe_res) and [maybe_res[0]] or []) + [detail[0].strip(), detail[1]]))
|
||||||
|
except (IndexError, KeyError):
|
||||||
|
title = group_name
|
||||||
|
download_url = self.urls['get'] % (self.user_authkey, self.user_passkey, torrent_id)
|
||||||
|
|
||||||
for tr in torrent_rows[1:]:
|
if title and download_url:
|
||||||
if tr.find('td', class_='header'):
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
continue
|
|
||||||
downlink = tr.find('a', href=rc['get'])
|
|
||||||
if None is downlink:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
seeders, leechers = [int(x.get_text().strip()) for x in tr.find_all('a', href=rc['peers'])]
|
|
||||||
if mode != 'Cache' and (seeders < self.minseed or leechers < self.minleech):
|
|
||||||
continue
|
|
||||||
|
|
||||||
info = tr.find('a', href=rc['info'])
|
|
||||||
title = ('data-src' in info.attrs and info['data-src']) or\
|
|
||||||
('title' in info.attrs and info['title']) or info.get_text().strip()
|
|
||||||
|
|
||||||
download_url = self.urls['get'] % str(downlink['href']).lstrip('/')
|
|
||||||
except (AttributeError, TypeError):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if title and download_url:
|
|
||||||
items[mode].append((title, download_url, seeders))
|
|
||||||
|
|
||||||
except generic.HaltParseException:
|
|
||||||
pass
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
self._log_result(mode, len(items[mode]) - cnt, search_url)
|
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||||
|
|
||||||
items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
results += items[mode]
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def find_propers(self, search_date=datetime.datetime.today()):
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return self._find_propers(search_date)
|
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False)
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj, **kwargs):
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
return generic.TorrentProvider._get_season_search_strings(self, ep_obj, scene=False)
|
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string='', **kwargs):
|
|
||||||
|
|
||||||
return generic.TorrentProvider._get_episode_search_strings(self, ep_obj, add_string, scene=False, use_or=False)
|
|
||||||
|
|
||||||
|
|
||||||
class TransmithenetCache(tvcache.TVCache):
|
class TransmithenetCache(tvcache.TVCache):
|
||||||
|
@ -144,10 +124,11 @@ class TransmithenetCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 17 # cache update frequency
|
self.update_freq = 17
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
return self.provider.get_cache_data()
|
|
||||||
|
|
||||||
provider = TransmithenetProvider()
|
provider = TransmithenetProvider()
|
||||||
|
|
189
sickbeard/providers/tvchaosuk.py
Normal file
|
@ -0,0 +1,189 @@
|
||||||
|
# coding=utf-8
|
||||||
|
#
|
||||||
|
# This file is part of SickGear.
|
||||||
|
#
|
||||||
|
# SickGear is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# SickGear is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from . import generic
|
||||||
|
from sickbeard import logger, tvcache
|
||||||
|
from sickbeard.bs4_parser import BS4Parser
|
||||||
|
from sickbeard.helpers import tryInt
|
||||||
|
from sickbeard.config import naming_ep_type
|
||||||
|
from dateutil.parser import parse
|
||||||
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
|
|
||||||
|
class TVChaosUKProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
generic.TorrentProvider.__init__(self, 'TVChaosUK')
|
||||||
|
|
||||||
|
self.url_base = 'https://tvchaosuk.com/'
|
||||||
|
self.urls = {'config_provider_home_uri': self.url_base,
|
||||||
|
'login': self.url_base + 'takelogin.php',
|
||||||
|
'search': self.url_base + 'browse.php',
|
||||||
|
'get': self.url_base + '%s'}
|
||||||
|
|
||||||
|
self.url = self.urls['config_provider_home_uri']
|
||||||
|
|
||||||
|
self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
|
||||||
|
self.search_fallback = True
|
||||||
|
self.cache = TVChaosUKCache(self)
|
||||||
|
|
||||||
|
def _authorised(self, **kwargs):
|
||||||
|
|
||||||
|
return super(TVChaosUKProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='c_secure_')))
|
||||||
|
|
||||||
|
def _search_provider(self, search_params, **kwargs):
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if not self._authorised():
|
||||||
|
return results
|
||||||
|
|
||||||
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download', 'fl': 'free'}.items())
|
||||||
|
for mode in search_params.keys():
|
||||||
|
for search_string in search_params[mode]:
|
||||||
|
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||||
|
|
||||||
|
if 'Cache' != mode:
|
||||||
|
kwargs = dict(post_data={'keywords': search_string, 'do': 'quick_sort', 'page': '0',
|
||||||
|
'category': '0', 'search_type': 't_name', 'sort': 'added',
|
||||||
|
'order': 'desc', 'daysprune': '-1'})
|
||||||
|
|
||||||
|
html = self.get_url(self.urls['search'], **kwargs)
|
||||||
|
|
||||||
|
cnt = len(items[mode])
|
||||||
|
try:
|
||||||
|
if not html or self._has_no_results(html):
|
||||||
|
raise generic.HaltParseException
|
||||||
|
|
||||||
|
with BS4Parser(html, 'html.parser') as soup:
|
||||||
|
torrent_table = soup.find('table', id='sortabletable')
|
||||||
|
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||||
|
get_detail = True
|
||||||
|
|
||||||
|
if 2 > len(torrent_rows):
|
||||||
|
raise generic.HaltParseException
|
||||||
|
|
||||||
|
for tr in torrent_rows[1:]:
|
||||||
|
try:
|
||||||
|
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||||
|
tr.find_all('td')[x].get_text().strip() for x in (-3, -2, -5)]]
|
||||||
|
if self._peers_fail(mode, seeders, leechers) \
|
||||||
|
or self.freeleech and None is tr.find_all('td')[1].find('img', title=rc['fl']):
|
||||||
|
continue
|
||||||
|
|
||||||
|
info = tr.find('a', href=rc['info'])
|
||||||
|
title = (tr.find('div', attrs={'class': 'tooltip-content'}).get_text() or info.get_text()).strip()
|
||||||
|
title = re.findall('(?m)(^[^\r\n]+)', title)[0]
|
||||||
|
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip(
|
||||||
|
'/').replace(self.urls['config_provider_home_uri'], '')
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if get_detail and title.endswith('...'):
|
||||||
|
try:
|
||||||
|
with BS4Parser(self.get_url('%s%s' % (self.urls['config_provider_home_uri'], info['href'].lstrip(
|
||||||
|
'/').replace(self.urls['config_provider_home_uri'], ''))), 'html.parser') as soup_detail:
|
||||||
|
title = soup_detail.find('td', attrs={'colspan': '3', 'class': 'thead'}).get_text().strip()
|
||||||
|
title = re.findall('(?m)(^[^\r\n]+)', title)[0]
|
||||||
|
except IndexError:
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
get_detail = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
has_series = re.findall('(?i)(.*?series[^\d]*?\d+)(.*)', title)
|
||||||
|
if has_series:
|
||||||
|
rc_xtras = re.compile('(?i)([. _-]|^)(special|extra)s?\w*([. _-]|$)')
|
||||||
|
has_special = rc_xtras.findall(has_series[0][1])
|
||||||
|
if has_special:
|
||||||
|
title = has_series[0][0] + rc_xtras.sub(list(
|
||||||
|
set(list(has_special[0][0]) + list(has_special[0][2])))[0], has_series[0][1])
|
||||||
|
title = re.sub('(?i)series', r'Season', title)
|
||||||
|
|
||||||
|
title_parts = re.findall('(?im)^(.*?)(?:Season[^\d]*?(\d+).*?)?(?:(?:pack|part|pt)\W*?)?(\d+)[^\d]*?of[^\d]*?(?:\d+)(.*?)$', title)
|
||||||
|
if len(title_parts):
|
||||||
|
new_parts = [tryInt(part, part.strip()) for part in title_parts[0]]
|
||||||
|
if not new_parts[1]:
|
||||||
|
new_parts[1] = 1
|
||||||
|
new_parts[2] = ('E%02d', ' Pack %d')[mode in 'Season'] % new_parts[2]
|
||||||
|
title = '%s.S%02d%s.%s' % tuple(new_parts)
|
||||||
|
|
||||||
|
dated = re.findall('(?i)([\(\s]*)((?:\d\d\s)?[adfjmnos]\w{2,}\s+(?:19|20)\d\d)([\)\s]*)', title)
|
||||||
|
if dated:
|
||||||
|
title = title.replace(''.join(dated[0]), '%s%s%s' % (
|
||||||
|
('', ' ')[1 < len(dated[0][0])], parse(dated[0][1]).strftime('%Y-%m-%d'),
|
||||||
|
('', ' ')[1 < len(dated[0][2])]))
|
||||||
|
add_pad = re.findall('((?:19|20)\d\d\-\d\d\-\d\d)([\w\W])', title)
|
||||||
|
if len(add_pad) and add_pad[0][1] not in [' ', '.']:
|
||||||
|
title = title.replace(''.join(add_pad[0]), '%s %s' % (add_pad[0][0], add_pad[0][1]))
|
||||||
|
|
||||||
|
if title and download_url:
|
||||||
|
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
except generic.HaltParseException:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
|
self._log_search(mode, len(items[mode]) - cnt,
|
||||||
|
('search string: ' + search_string.replace('%', ' '), self.name)['Cache' == mode])
|
||||||
|
|
||||||
|
if mode in 'Season' and len(items[mode]):
|
||||||
|
break
|
||||||
|
|
||||||
|
self._sort_seeders(mode, items)
|
||||||
|
|
||||||
|
results = list(set(results + items[mode]))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _season_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
|
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, prefix='%', sp_detail=(
|
||||||
|
lambda e: [(('', 'Series %(seasonnumber)d%%')[1 < tryInt(e.get('seasonnumber'))] + '%(episodenumber)dof') % e,
|
||||||
|
'Series %(seasonnumber)d' % e]))
|
||||||
|
|
||||||
|
def _episode_strings(self, ep_obj, **kwargs):
|
||||||
|
|
||||||
|
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, prefix='%', date_detail=(
|
||||||
|
lambda d: [d.strftime('%d %b %Y')] + ([d.strftime('%d %B %Y')], [])[d.strftime('%b') == d.strftime('%B')]),
|
||||||
|
ep_detail=(lambda e: [naming_ep_type[2] % e] + (
|
||||||
|
[], ['%(episodenumber)dof' % e])[1 == tryInt(e.get('seasonnumber'))]), **kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def ui_string(key):
|
||||||
|
|
||||||
|
return 'tvchaosuk_tip' == key and 'has missing quality data so you must add quality Custom/Unknown to any wanted show' or ''
|
||||||
|
|
||||||
|
|
||||||
|
class TVChaosUKCache(tvcache.TVCache):
|
||||||
|
|
||||||
|
def __init__(self, this_provider):
|
||||||
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
|
def _cache_data(self):
|
||||||
|
|
||||||
|
return self.provider.cache_data()
|
||||||
|
|
||||||
|
|
||||||
|
provider = TVChaosUKProvider()
|
|
@ -16,8 +16,7 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import generic
|
from . import generic
|
||||||
|
|
||||||
from sickbeard import logger, tvcache
|
from sickbeard import logger, tvcache
|
||||||
|
|
||||||
|
|
||||||
|
@ -35,7 +34,7 @@ class WombleCache(tvcache.TVCache):
|
||||||
def __init__(self, this_provider):
|
def __init__(self, this_provider):
|
||||||
tvcache.TVCache.__init__(self, this_provider)
|
tvcache.TVCache.__init__(self, this_provider)
|
||||||
|
|
||||||
self.minTime = 15 # cache update frequency
|
self.update_freq = 15 # cache update frequency
|
||||||
|
|
||||||
def updateCache(self):
|
def updateCache(self):
|
||||||
|
|
||||||
|
@ -60,7 +59,7 @@ class WombleCache(tvcache.TVCache):
|
||||||
|
|
||||||
# By now we know we've got data and no auth errors, all we need to do is put it in the database
|
# By now we know we've got data and no auth errors, all we need to do is put it in the database
|
||||||
for item in data.entries:
|
for item in data.entries:
|
||||||
title, url = self._get_title_and_url(item)
|
title, url = self._title_and_url(item)
|
||||||
ci = self._parseItem(title, url)
|
ci = self._parseItem(title, url)
|
||||||
if None is not ci:
|
if None is not ci:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
|
@ -40,11 +40,10 @@ from sickbeard import encodingKludge as ek
|
||||||
from sickbeard import failed_history
|
from sickbeard import failed_history
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
from sickbeard.providers.generic import GenericProvider
|
from sickbeard.providers.generic import GenericProvider
|
||||||
from sickbeard.blackandwhitelist import BlackAndWhiteList
|
|
||||||
from sickbeard import common
|
from sickbeard import common
|
||||||
|
|
||||||
|
|
||||||
def _downloadResult(result):
|
def _download_result(result):
|
||||||
"""
|
"""
|
||||||
Downloads a result to the appropriate black hole folder.
|
Downloads a result to the appropriate black hole folder.
|
||||||
|
|
||||||
|
@ -53,44 +52,44 @@ def _downloadResult(result):
|
||||||
result: SearchResult instance to download.
|
result: SearchResult instance to download.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
resProvider = result.provider
|
res_provider = result.provider
|
||||||
if resProvider == None:
|
if None is res_provider:
|
||||||
logger.log(u"Invalid provider name - this is a coding error, report it please", logger.ERROR)
|
logger.log(u'Invalid provider name - this is a coding error, report it please', logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# nzbs with an URL can just be downloaded from the provider
|
# nzbs with an URL can just be downloaded from the provider
|
||||||
if result.resultType == "nzb":
|
if 'nzb' == result.resultType:
|
||||||
newResult = resProvider.download_result(result)
|
new_result = res_provider.download_result(result)
|
||||||
# if it's an nzb data result
|
# if it's an nzb data result
|
||||||
elif result.resultType == "nzbdata":
|
elif 'nzbdata' == result.resultType:
|
||||||
|
|
||||||
# get the final file path to the nzb
|
# get the final file path to the nzb
|
||||||
fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, result.name + ".nzb")
|
file_name = ek.ek(os.path.join, sickbeard.NZB_DIR, u'%s.nzb' % result.name)
|
||||||
|
|
||||||
logger.log(u"Saving NZB to " + fileName)
|
logger.log(u'Saving NZB to %s' % file_name)
|
||||||
|
|
||||||
newResult = True
|
new_result = True
|
||||||
|
|
||||||
# save the data to disk
|
# save the data to disk
|
||||||
try:
|
try:
|
||||||
with ek.ek(open, fileName, 'w') as fileOut:
|
with ek.ek(open, file_name, 'w') as file_out:
|
||||||
fileOut.write(result.extraInfo[0])
|
file_out.write(result.extraInfo[0])
|
||||||
|
|
||||||
helpers.chmodAsParent(fileName)
|
helpers.chmodAsParent(file_name)
|
||||||
|
|
||||||
except EnvironmentError as e:
|
except EnvironmentError as e:
|
||||||
logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR)
|
logger.log(u'Error trying to save NZB to black hole: %s' % ex(e), logger.ERROR)
|
||||||
newResult = False
|
new_result = False
|
||||||
elif resProvider.providerType == "torrent":
|
elif 'torrent' == res_provider.providerType:
|
||||||
newResult = resProvider.download_result(result)
|
new_result = res_provider.download_result(result)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR)
|
logger.log(u'Invalid provider type - this is a coding error, report it please', logger.ERROR)
|
||||||
newResult = False
|
new_result = False
|
||||||
|
|
||||||
return newResult
|
return new_result
|
||||||
|
|
||||||
|
|
||||||
def snatchEpisode(result, endStatus=SNATCHED):
|
def snatch_episode(result, end_status=SNATCHED):
|
||||||
"""
|
"""
|
||||||
Contains the internal logic necessary to actually "snatch" a result that
|
Contains the internal logic necessary to actually "snatch" a result that
|
||||||
has been found.
|
has been found.
|
||||||
|
@ -101,158 +100,140 @@ def snatchEpisode(result, endStatus=SNATCHED):
|
||||||
endStatus: the episode status that should be used for the episode object once it's snatched.
|
endStatus: the episode status that should be used for the episode object once it's snatched.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if result is None:
|
if None is result:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
result.priority = 0 # -1 = low, 0 = normal, 1 = high
|
result.priority = 0 # -1 = low, 0 = normal, 1 = high
|
||||||
if sickbeard.ALLOW_HIGH_PRIORITY:
|
if sickbeard.ALLOW_HIGH_PRIORITY:
|
||||||
# if it aired recently make it high priority
|
# if it aired recently make it high priority
|
||||||
for curEp in result.episodes:
|
for cur_ep in result.episodes:
|
||||||
if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
|
if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7):
|
||||||
result.priority = 1
|
result.priority = 1
|
||||||
if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None:
|
if None is not re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I):
|
||||||
endStatus = SNATCHED_PROPER
|
end_status = SNATCHED_PROPER
|
||||||
|
|
||||||
# NZBs can be sent straight to SAB or saved to disk
|
# NZBs can be sent straight to SAB or saved to disk
|
||||||
if result.resultType in ("nzb", "nzbdata"):
|
if result.resultType in ('nzb', 'nzbdata'):
|
||||||
if sickbeard.NZB_METHOD == "blackhole":
|
if 'blackhole' == sickbeard.NZB_METHOD:
|
||||||
dlResult = _downloadResult(result)
|
dl_result = _download_result(result)
|
||||||
elif sickbeard.NZB_METHOD == "sabnzbd":
|
elif 'sabnzbd' == sickbeard.NZB_METHOD:
|
||||||
dlResult = sab.sendNZB(result)
|
dl_result = sab.sendNZB(result)
|
||||||
elif sickbeard.NZB_METHOD == "nzbget":
|
elif 'nzbget' == sickbeard.NZB_METHOD:
|
||||||
is_proper = True if endStatus == SNATCHED_PROPER else False
|
is_proper = True if SNATCHED_PROPER == end_status else False
|
||||||
dlResult = nzbget.sendNZB(result, is_proper)
|
dl_result = nzbget.sendNZB(result, is_proper)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR)
|
logger.log(u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR)
|
||||||
dlResult = False
|
dl_result = False
|
||||||
|
|
||||||
# TORRENTs can be sent to clients or saved to disk
|
# TORRENTs can be sent to clients or saved to disk
|
||||||
elif result.resultType == "torrent":
|
elif 'torrent' == result.resultType:
|
||||||
# torrents are saved to disk when blackhole mode
|
# torrents are saved to disk when blackhole mode
|
||||||
if sickbeard.TORRENT_METHOD == "blackhole":
|
if 'blackhole' == sickbeard.TORRENT_METHOD:
|
||||||
dlResult = _downloadResult(result)
|
dl_result = _download_result(result)
|
||||||
else:
|
else:
|
||||||
# make sure we have the torrent file content
|
# make sure we have the torrent file content
|
||||||
if not result.content and not result.url.startswith('magnet'):
|
if not result.content and not result.url.startswith('magnet'):
|
||||||
result.content = result.provider.get_url(result.url)
|
result.content = result.provider.get_url(result.url)
|
||||||
if not result.content:
|
if not result.content:
|
||||||
logger.log(u'Torrent content failed to download from ' + result.url, logger.ERROR)
|
logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR)
|
||||||
return False
|
return False
|
||||||
# Snatches torrent with client
|
# Snatches torrent with client
|
||||||
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
|
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
|
||||||
dlResult = client.sendTORRENT(result)
|
dl_result = client.sendTORRENT(result)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Unknown result type, unable to download it", logger.ERROR)
|
logger.log(u'Unknown result type, unable to download it', logger.ERROR)
|
||||||
dlResult = False
|
dl_result = False
|
||||||
|
|
||||||
if not dlResult:
|
if not dl_result:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if sickbeard.USE_FAILED_DOWNLOADS:
|
if sickbeard.USE_FAILED_DOWNLOADS:
|
||||||
failed_history.logSnatch(result)
|
failed_history.logSnatch(result)
|
||||||
|
|
||||||
ui.notifications.message('Episode snatched', result.name)
|
ui.notifications.message(u'Episode snatched', result.name)
|
||||||
|
|
||||||
history.logSnatch(result)
|
history.logSnatch(result)
|
||||||
|
|
||||||
# don't notify when we re-download an episode
|
# don't notify when we re-download an episode
|
||||||
sql_l = []
|
sql_l = []
|
||||||
for curEpObj in result.episodes:
|
update_imdb_data = True
|
||||||
with curEpObj.lock:
|
for cur_ep_obj in result.episodes:
|
||||||
if isFirstBestMatch(result):
|
with cur_ep_obj.lock:
|
||||||
curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
|
if is_first_best_match(result):
|
||||||
|
cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
|
||||||
else:
|
else:
|
||||||
curEpObj.status = Quality.compositeStatus(endStatus, result.quality)
|
cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality)
|
||||||
|
|
||||||
item = curEpObj.get_sql()
|
item = cur_ep_obj.get_sql()
|
||||||
if None is not item:
|
if None is not item:
|
||||||
sql_l.append(item)
|
sql_l.append(item)
|
||||||
|
|
||||||
if curEpObj.status not in Quality.DOWNLOADED:
|
if cur_ep_obj.status not in Quality.DOWNLOADED:
|
||||||
notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
|
notifiers.notify_snatch(cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
|
||||||
|
|
||||||
curEpObj.show.load_imdb_info()
|
update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info()
|
||||||
|
|
||||||
if 0 < len(sql_l):
|
if 0 < len(sql_l):
|
||||||
myDB = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
myDB.mass_action(sql_l)
|
my_db.mass_action(sql_l)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def pick_best_result(results, show, quality_list=None):
|
||||||
def filter_release_name(name, filter_words):
|
logger.log(u'Picking the best result out of %s' % [x.name for x in results], logger.DEBUG)
|
||||||
"""
|
|
||||||
Filters out results based on filter_words
|
|
||||||
|
|
||||||
name: name to check
|
|
||||||
filter_words : Words to filter on, separated by comma
|
|
||||||
|
|
||||||
Returns: False if the release name is OK, True if it contains one of the filter_words
|
|
||||||
"""
|
|
||||||
if filter_words:
|
|
||||||
filters = [re.compile('.*%s.*' % filter.strip(), re.I) for filter in filter_words.split(',')]
|
|
||||||
for regfilter in filters:
|
|
||||||
if regfilter.search(name):
|
|
||||||
logger.log(u"" + name + " contains pattern: " + regfilter.pattern, logger.DEBUG)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def pickBestResult(results, show, quality_list=None):
|
|
||||||
logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG)
|
|
||||||
|
|
||||||
# find the best result for the current episode
|
# find the best result for the current episode
|
||||||
bestResult = None
|
best_result = None
|
||||||
for cur_result in results:
|
for cur_result in results:
|
||||||
|
|
||||||
logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality])
|
|
||||||
|
|
||||||
if show.is_anime:
|
logger.log(u'Quality of %s is %s' % (cur_result.name, Quality.qualityStrings[cur_result.quality]))
|
||||||
if not show.release_groups.is_valid(cur_result):
|
|
||||||
continue
|
if show.is_anime and not show.release_groups.is_valid(cur_result):
|
||||||
|
continue
|
||||||
|
|
||||||
if quality_list and cur_result.quality not in quality_list:
|
if quality_list and cur_result.quality not in quality_list:
|
||||||
logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG)
|
logger.log(u'%s is an unwanted quality, rejecting it' % cur_result.name, logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if show.rls_ignore_words and filter_release_name(cur_result.name, show.rls_ignore_words):
|
re_extras = dict(re_prefix='.*', re_suffix='.*')
|
||||||
logger.log(u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words,
|
result = show_name_helpers.contains_any(cur_result.name, show.rls_ignore_words, **re_extras)
|
||||||
logger.MESSAGE)
|
if None is not result and result:
|
||||||
|
logger.log(u'Ignored: %s for containing ignore word' % cur_result.name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if show.rls_require_words and not filter_release_name(cur_result.name, show.rls_require_words):
|
result = show_name_helpers.contains_any(cur_result.name, show.rls_require_words, **re_extras)
|
||||||
logger.log(u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words,
|
if None is not result and not result:
|
||||||
logger.MESSAGE)
|
logger.log(u'Ignored: %s for not containing any required word match' % cur_result.name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cur_size = getattr(cur_result, 'size', None)
|
cur_size = getattr(cur_result, 'size', None)
|
||||||
if sickbeard.USE_FAILED_DOWNLOADS and None is not cur_size and failed_history.hasFailed(
|
if sickbeard.USE_FAILED_DOWNLOADS and None is not cur_size and failed_history.hasFailed(
|
||||||
cur_result.name, cur_size, cur_result.provider.name):
|
cur_result.name, cur_size, cur_result.provider.name):
|
||||||
logger.log(cur_result.name + u" has previously failed, rejecting it")
|
logger.log(u'%s has previously failed, rejecting it' % cur_result.name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN:
|
if not best_result or best_result.quality < cur_result.quality != Quality.UNKNOWN:
|
||||||
bestResult = cur_result
|
best_result = cur_result
|
||||||
|
|
||||||
elif bestResult.quality == cur_result.quality:
|
elif best_result.quality == cur_result.quality:
|
||||||
if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower():
|
if re.search('(?i)(proper|repack)', cur_result.name) or \
|
||||||
bestResult = cur_result
|
show.is_anime and re.search('(?i)(v1|v2|v3|v4|v5)', cur_result.name):
|
||||||
elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower():
|
best_result = cur_result
|
||||||
bestResult = cur_result
|
elif 'internal' in best_result.name.lower() and 'internal' not in cur_result.name.lower():
|
||||||
elif "xvid" in bestResult.name.lower() and "x264" in cur_result.name.lower():
|
best_result = cur_result
|
||||||
logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)")
|
elif 'xvid' in best_result.name.lower() and 'x264' in cur_result.name.lower():
|
||||||
bestResult = cur_result
|
logger.log(u'Preferring %s (x264 over xvid)' % cur_result.name)
|
||||||
|
best_result = cur_result
|
||||||
|
|
||||||
if bestResult:
|
if best_result:
|
||||||
logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG)
|
logger.log(u'Picked %s as the best' % best_result.name, logger.DEBUG)
|
||||||
else:
|
else:
|
||||||
logger.log(u"No result picked.", logger.DEBUG)
|
logger.log(u'No result picked.', logger.DEBUG)
|
||||||
|
|
||||||
return bestResult
|
return best_result
|
||||||
|
|
||||||
|
|
||||||
def isFinalResult(result):
|
def is_final_result(result):
|
||||||
"""
|
"""
|
||||||
Checks if the given result is good enough quality that we can stop searching for other ones.
|
Checks if the given result is good enough quality that we can stop searching for other ones.
|
||||||
|
|
||||||
|
@ -261,16 +242,14 @@ def isFinalResult(result):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.log(u"Checking if we should keep searching after we've found " + result.name, logger.DEBUG)
|
logger.log(u'Checking if searching should continue after finding %s' % result.name, logger.DEBUG)
|
||||||
|
|
||||||
show_obj = result.episodes[0].show
|
show_obj = result.episodes[0].show
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
||||||
|
|
||||||
# if there is a redownload that's higher than this then we definitely need to keep looking
|
# if there is a redownload that's higher than this then we definitely need to keep looking
|
||||||
if best_qualities and result.quality < max(best_qualities):
|
if best_qualities and max(best_qualities) > result.quality:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# if it does not match the shows black and white list its no good
|
# if it does not match the shows black and white list its no good
|
||||||
|
@ -281,10 +260,10 @@ def isFinalResult(result):
|
||||||
elif any_qualities and result.quality in any_qualities:
|
elif any_qualities and result.quality in any_qualities:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
elif best_qualities and result.quality == max(best_qualities):
|
elif best_qualities and max(best_qualities) == result.quality:
|
||||||
|
|
||||||
# if this is the best redownload but we have a higher initial download then keep looking
|
# if this is the best redownload but we have a higher initial download then keep looking
|
||||||
if any_qualities and result.quality < max(any_qualities):
|
if any_qualities and max(any_qualities) > result.quality:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# if this is the best redownload and we don't have a higher initial download then we're done
|
# if this is the best redownload and we don't have a higher initial download then we're done
|
||||||
|
@ -296,13 +275,12 @@ def isFinalResult(result):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def isFirstBestMatch(result):
|
def is_first_best_match(result):
|
||||||
"""
|
"""
|
||||||
Checks if the given result is a best quality match and if we want to archive the episode on first match.
|
Checks if the given result is a best quality match and if we want to archive the episode on first match.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.log(u"Checking if we should archive our first best quality match for episode " + result.name,
|
logger.log(u'Checking if the first best quality match should be archived for episode %s' % result.name, logger.DEBUG)
|
||||||
logger.DEBUG)
|
|
||||||
|
|
||||||
show_obj = result.episodes[0].show
|
show_obj = result.episodes[0].show
|
||||||
|
|
||||||
|
@ -315,25 +293,25 @@ def isFirstBestMatch(result):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def wantedEpisodes(show, fromDate, make_dict=False):
|
def wanted_episodes(show, from_date, make_dict=False):
|
||||||
initialQualities, archiveQualities = common.Quality.splitQuality(show.quality)
|
initial_qualities, archive_qualities = common.Quality.splitQuality(show.quality)
|
||||||
allQualities = list(set(initialQualities + archiveQualities))
|
all_qualities = list(set(initial_qualities + archive_qualities))
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
|
|
||||||
if show.air_by_date:
|
if show.air_by_date:
|
||||||
sqlString = 'SELECT ep.status, ep.season, ep.episode, ep.airdate FROM [tv_episodes] AS ep, [tv_shows] AS show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.showid = ? AND show.air_by_date = 1'
|
sql_string = 'SELECT ep.status, ep.season, ep.episode, ep.airdate FROM [tv_episodes] AS ep, [tv_shows] AS show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.showid = ? AND show.air_by_date = 1'
|
||||||
else:
|
else:
|
||||||
sqlString = 'SELECT status, season, episode, airdate FROM [tv_episodes] WHERE showid = ? AND season > 0'
|
sql_string = 'SELECT status, season, episode, airdate FROM [tv_episodes] WHERE showid = ? AND season > 0'
|
||||||
|
|
||||||
if sickbeard.SEARCH_UNAIRED:
|
if sickbeard.SEARCH_UNAIRED:
|
||||||
statusList = [common.WANTED, common.FAILED, common.UNAIRED]
|
status_list = [common.WANTED, common.FAILED, common.UNAIRED]
|
||||||
sqlString += ' AND ( airdate > ? OR airdate = 1 )'
|
sql_string += ' AND ( airdate > ? OR airdate = 1 )'
|
||||||
else:
|
else:
|
||||||
statusList = [common.WANTED, common.FAILED]
|
status_list = [common.WANTED, common.FAILED]
|
||||||
sqlString += ' AND airdate > ?'
|
sql_string += ' AND airdate > ?'
|
||||||
|
|
||||||
sqlResults = myDB.select(sqlString, [show.indexerid, fromDate.toordinal()])
|
sql_results = my_db.select(sql_string, [show.indexerid, from_date.toordinal()])
|
||||||
|
|
||||||
# check through the list of statuses to see if we want any
|
# check through the list of statuses to see if we want any
|
||||||
if make_dict:
|
if make_dict:
|
||||||
|
@ -341,52 +319,59 @@ def wantedEpisodes(show, fromDate, make_dict=False):
|
||||||
else:
|
else:
|
||||||
wanted = []
|
wanted = []
|
||||||
total_wanted = total_replacing = total_unaired = 0
|
total_wanted = total_replacing = total_unaired = 0
|
||||||
downloadedStatusList = (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST)
|
downloaded_status_list = (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST)
|
||||||
for result in sqlResults:
|
for result in sql_results:
|
||||||
not_downloaded = True
|
not_downloaded = True
|
||||||
curCompositeStatus = int(result["status"])
|
cur_composite_status = int(result['status'])
|
||||||
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
|
cur_status, cur_quality = common.Quality.splitCompositeStatus(cur_composite_status)
|
||||||
|
|
||||||
if show.archive_firstmatch and curStatus in downloadedStatusList and curQuality in archiveQualities:
|
if show.archive_firstmatch and cur_status in downloaded_status_list and cur_quality in archive_qualities:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# special case: already downloaded quality is not in any of the wanted Qualities
|
# special case: already downloaded quality is not in any of the wanted Qualities
|
||||||
other_quality_downloaded = False
|
other_quality_downloaded = False
|
||||||
if curStatus in downloadedStatusList and curQuality not in allQualities:
|
if cur_status in downloaded_status_list and cur_quality not in all_qualities:
|
||||||
other_quality_downloaded = True
|
other_quality_downloaded = True
|
||||||
wantedQualities = allQualities
|
wanted_qualities = all_qualities
|
||||||
else:
|
else:
|
||||||
wantedQualities = archiveQualities
|
wanted_qualities = archive_qualities
|
||||||
|
|
||||||
if archiveQualities:
|
if archive_qualities:
|
||||||
highestWantedQuality = max(wantedQualities)
|
highest_wanted_quality = max(wanted_qualities)
|
||||||
else:
|
else:
|
||||||
if other_quality_downloaded:
|
if other_quality_downloaded:
|
||||||
highestWantedQuality = max(initialQualities)
|
highest_wanted_quality = max(initial_qualities)
|
||||||
else:
|
else:
|
||||||
highestWantedQuality = 0
|
highest_wanted_quality = 0
|
||||||
|
|
||||||
# if we need a better one then say yes
|
# if we need a better one then say yes
|
||||||
if (curStatus in downloadedStatusList and curQuality < highestWantedQuality) or curStatus in statusList or (sickbeard.SEARCH_UNAIRED and result['airdate'] == 1 and curStatus in (common.SKIPPED, common.IGNORED, common.UNAIRED, common.UNKNOWN, common.FAILED)):
|
if (cur_status in downloaded_status_list and cur_quality < highest_wanted_quality) or \
|
||||||
|
cur_status in status_list or \
|
||||||
|
(sickbeard.SEARCH_UNAIRED and 1 == result['airdate'] and cur_status in (common.SKIPPED, common.IGNORED,
|
||||||
|
common.UNAIRED, common.UNKNOWN,
|
||||||
|
common.FAILED)):
|
||||||
|
|
||||||
if curStatus in (common.WANTED, common.FAILED):
|
if cur_status in (common.WANTED, common.FAILED):
|
||||||
total_wanted += 1
|
total_wanted += 1
|
||||||
elif curStatus in (common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN):
|
elif cur_status in (common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN):
|
||||||
total_unaired += 1
|
total_unaired += 1
|
||||||
else:
|
else:
|
||||||
total_replacing += 1
|
total_replacing += 1
|
||||||
not_downloaded = False
|
not_downloaded = False
|
||||||
|
|
||||||
epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
|
ep_obj = show.getEpisode(int(result['season']), int(result['episode']))
|
||||||
if make_dict:
|
if make_dict:
|
||||||
wanted.setdefault(epObj.season, []).append(epObj)
|
wanted.setdefault(ep_obj.season, []).append(ep_obj)
|
||||||
else:
|
else:
|
||||||
epObj.wantedQuality = [i for i in (initialQualities if not_downloaded else wantedQualities) if (i > curQuality and i != common.Quality.UNKNOWN)]
|
ep_obj.wantedQuality = [i for i in (initial_qualities if not_downloaded else
|
||||||
wanted.append(epObj)
|
wanted_qualities) if (i > cur_quality and i != common.Quality.UNKNOWN)]
|
||||||
|
wanted.append(ep_obj)
|
||||||
|
|
||||||
if 0 < total_wanted + total_replacing + total_unaired:
|
if 0 < total_wanted + total_replacing + total_unaired:
|
||||||
actions = []
|
actions = []
|
||||||
for msg, total in ['%d episode%s', total_wanted], ['to upgrade %d episode%s', total_replacing], ['%d unaired episode%s', total_unaired]:
|
for msg, total in ['%d episode%s', total_wanted], \
|
||||||
|
['to upgrade %d episode%s', total_replacing], \
|
||||||
|
['%d unaired episode%s', total_unaired]:
|
||||||
if 0 < total:
|
if 0 < total:
|
||||||
actions.append(msg % (total, helpers.maybe_plural(total)))
|
actions.append(msg % (total, helpers.maybe_plural(total)))
|
||||||
logger.log(u'We want %s for %s' % (' and '.join(actions), show.name))
|
logger.log(u'We want %s for %s' % (' and '.join(actions), show.name))
|
||||||
|
@ -394,344 +379,331 @@ def wantedEpisodes(show, fromDate, make_dict=False):
|
||||||
return wanted
|
return wanted
|
||||||
|
|
||||||
|
|
||||||
def searchForNeededEpisodes(episodes):
|
def search_for_needed_episodes(episodes):
|
||||||
foundResults = {}
|
found_results = {}
|
||||||
|
|
||||||
didSearch = False
|
search_done = False
|
||||||
|
|
||||||
origThreadName = threading.currentThread().name
|
orig_thread_name = threading.currentThread().name
|
||||||
|
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
|
||||||
|
|
||||||
for curProvider in providers:
|
for cur_provider in providers:
|
||||||
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
|
threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
|
||||||
|
|
||||||
curFoundResults = curProvider.search_rss(episodes)
|
cur_found_results = cur_provider.search_rss(episodes)
|
||||||
|
|
||||||
didSearch = True
|
search_done = True
|
||||||
|
|
||||||
# pick a single result for each episode, respecting existing results
|
# pick a single result for each episode, respecting existing results
|
||||||
for curEp in curFoundResults:
|
for cur_ep in cur_found_results:
|
||||||
|
|
||||||
if curEp.show.paused:
|
if cur_ep.show.paused:
|
||||||
logger.log(
|
logger.log(u'Show %s is paused, ignoring all RSS items for %s' % (cur_ep.show.name, cur_ep.prettyName()),
|
||||||
u"Show " + curEp.show.name + " is paused, ignoring all RSS items for " + curEp.prettyName(),
|
logger.DEBUG)
|
||||||
logger.DEBUG)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# find the best result for the current episode
|
# find the best result for the current episode
|
||||||
bestResult = pickBestResult(curFoundResults[curEp], curEp.show)
|
best_result = pick_best_result(cur_found_results[cur_ep], cur_ep.show)
|
||||||
|
|
||||||
# if all results were rejected move on to the next episode
|
# if all results were rejected move on to the next episode
|
||||||
if not bestResult:
|
if not best_result:
|
||||||
logger.log(u"All found results for " + curEp.prettyName() + " were rejected.", logger.DEBUG)
|
logger.log(u'All found results for %s were rejected.' % cur_ep.prettyName(), logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if it's already in the list (from another provider) and the newly found quality is no better then skip it
|
# if it's already in the list (from another provider) and the newly found quality is no better then skip it
|
||||||
if curEp in foundResults and bestResult.quality <= foundResults[curEp].quality:
|
if cur_ep in found_results and best_result.quality <= found_results[cur_ep].quality:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# filter out possible bad torrents from providers
|
# filter out possible bad torrents from providers
|
||||||
if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole":
|
if 'torrent' == best_result.resultType and 'blackhole' != sickbeard.TORRENT_METHOD:
|
||||||
bestResult.content = None
|
best_result.content = None
|
||||||
if not bestResult.url.startswith('magnet'):
|
if not best_result.url.startswith('magnet'):
|
||||||
bestResult.content = bestResult.provider.get_url(bestResult.url)
|
best_result.content = best_result.provider.get_url(best_result.url)
|
||||||
if not bestResult.content:
|
if not best_result.content:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
foundResults[curEp] = bestResult
|
|
||||||
|
|
||||||
threading.currentThread().name = origThreadName
|
found_results[cur_ep] = best_result
|
||||||
|
|
||||||
if not didSearch:
|
threading.currentThread().name = orig_thread_name
|
||||||
logger.log(
|
|
||||||
u"No NZB/Torrent providers found or enabled in the SickGear config for recent searches. Please check your settings.",
|
|
||||||
logger.ERROR)
|
|
||||||
|
|
||||||
return foundResults.values()
|
if not search_done:
|
||||||
|
logger.log(u'No NZB/Torrent provider enabled to do recent searches. Please check provider options.', logger.ERROR)
|
||||||
|
|
||||||
|
return found_results.values()
|
||||||
|
|
||||||
|
|
||||||
def searchProviders(show, episodes, manual_search=False):
|
def search_providers(show, episodes, manual_search=False):
|
||||||
foundResults = {}
|
found_results = {}
|
||||||
finalResults = []
|
final_results = []
|
||||||
|
|
||||||
didSearch = False
|
search_done = False
|
||||||
|
|
||||||
origThreadName = threading.currentThread().name
|
orig_thread_name = threading.currentThread().name
|
||||||
|
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog]
|
provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog]
|
||||||
for providerNum, curProvider in enumerate(providers):
|
for cur_provider in provider_list:
|
||||||
if curProvider.anime_only and not show.is_anime:
|
if cur_provider.anime_only and not show.is_anime:
|
||||||
logger.log(u"" + str(show.name) + " is not an anime, skipping", logger.DEBUG)
|
logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
|
threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
|
||||||
|
provider_id = cur_provider.get_id()
|
||||||
|
|
||||||
foundResults[curProvider.name] = {}
|
found_results[provider_id] = {}
|
||||||
|
|
||||||
searchCount = 0
|
search_count = 0
|
||||||
search_mode = curProvider.search_mode
|
search_mode = cur_provider.search_mode
|
||||||
|
|
||||||
while(True):
|
while True:
|
||||||
searchCount += 1
|
search_count += 1
|
||||||
|
|
||||||
if search_mode == 'eponly':
|
if 'eponly' == search_mode:
|
||||||
logger.log(u"Performing episode search for " + show.name)
|
logger.log(u'Performing episode search for %s' % show.name)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Performing season pack search for " + show.name)
|
logger.log(u'Performing season pack search for %s' % show.name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
curProvider.cache._clearCache()
|
cur_provider.cache._clearCache()
|
||||||
searchResults = curProvider.find_search_results(show, episodes, search_mode, manual_search)
|
search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search)
|
||||||
except exceptions.AuthException as e:
|
except exceptions.AuthException as e:
|
||||||
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
|
logger.log(u'Authentication error: %s' % ex(e), logger.ERROR)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
|
logger.log(u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR)
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
break
|
break
|
||||||
finally:
|
finally:
|
||||||
threading.currentThread().name = origThreadName
|
threading.currentThread().name = orig_thread_name
|
||||||
|
|
||||||
didSearch = True
|
search_done = True
|
||||||
|
|
||||||
if len(searchResults):
|
if len(search_results):
|
||||||
# make a list of all the results for this provider
|
# make a list of all the results for this provider
|
||||||
for curEp in searchResults:
|
for cur_ep in search_results:
|
||||||
# skip non-tv crap
|
# skip non-tv crap
|
||||||
searchResults[curEp] = filter(
|
search_results[cur_ep] = filter(
|
||||||
lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, searchResults[curEp])
|
lambda item: show_name_helpers.pass_wordlist_checks(item.name, parse=False) and
|
||||||
|
item.show == show, search_results[cur_ep])
|
||||||
|
|
||||||
if curEp in foundResults:
|
if cur_ep in found_results:
|
||||||
foundResults[curProvider.name][curEp] += searchResults[curEp]
|
found_results[provider_id][cur_ep] += search_results[cur_ep]
|
||||||
else:
|
else:
|
||||||
foundResults[curProvider.name][curEp] = searchResults[curEp]
|
found_results[provider_id][cur_ep] = search_results[cur_ep]
|
||||||
|
|
||||||
break
|
break
|
||||||
elif not curProvider.search_fallback or searchCount == 2:
|
elif not cur_provider.search_fallback or search_count == 2:
|
||||||
break
|
break
|
||||||
|
|
||||||
if search_mode == 'sponly':
|
search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode]
|
||||||
logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...")
|
logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode])
|
||||||
search_mode = 'eponly'
|
|
||||||
else:
|
|
||||||
logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...")
|
|
||||||
search_mode = 'sponly'
|
|
||||||
|
|
||||||
# skip to next provider if we have no results to process
|
# skip to next provider if we have no results to process
|
||||||
if not len(foundResults[curProvider.name]):
|
if not len(found_results[provider_id]):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
anyQualities, bestQualities = Quality.splitQuality(show.quality)
|
any_qualities, best_qualities = Quality.splitQuality(show.quality)
|
||||||
|
|
||||||
# pick the best season NZB
|
# pick the best season NZB
|
||||||
bestSeasonResult = None
|
best_season_result = None
|
||||||
if SEASON_RESULT in foundResults[curProvider.name]:
|
if SEASON_RESULT in found_results[provider_id]:
|
||||||
bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show,
|
best_season_result = pick_best_result(found_results[provider_id][SEASON_RESULT], show,
|
||||||
anyQualities + bestQualities)
|
any_qualities + best_qualities)
|
||||||
|
|
||||||
highest_quality_overall = 0
|
highest_quality_overall = 0
|
||||||
for cur_episode in foundResults[curProvider.name]:
|
for cur_episode in found_results[provider_id]:
|
||||||
for cur_result in foundResults[curProvider.name][cur_episode]:
|
for cur_result in found_results[provider_id][cur_episode]:
|
||||||
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
|
if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality:
|
||||||
highest_quality_overall = cur_result.quality
|
highest_quality_overall = cur_result.quality
|
||||||
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall],
|
logger.log(u'The highest quality of any match is %s' % Quality.qualityStrings[highest_quality_overall],
|
||||||
logger.DEBUG)
|
logger.DEBUG)
|
||||||
|
|
||||||
# see if every episode is wanted
|
# see if every episode is wanted
|
||||||
if bestSeasonResult:
|
if best_season_result:
|
||||||
searchedSeasons = []
|
|
||||||
searchedSeasons = [str(x.season) for x in episodes]
|
|
||||||
# get the quality of the season nzb
|
# get the quality of the season nzb
|
||||||
seasonQual = bestSeasonResult.quality
|
season_qual = best_season_result.quality
|
||||||
logger.log(
|
logger.log(u'The quality of the season %s is %s' % (best_season_result.provider.providerType,
|
||||||
u"The quality of the season " + bestSeasonResult.provider.providerType + " is " + Quality.qualityStrings[
|
Quality.qualityStrings[season_qual]), logger.DEBUG)
|
||||||
seasonQual], logger.DEBUG)
|
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
allEps = [int(x["episode"])
|
sql = 'SELECT episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\
|
||||||
for x in myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND ( season IN ( " + ','.join(searchedSeasons) + " ) )",
|
(show.indexerid, ','.join([str(x.season) for x in episodes]))
|
||||||
[show.indexerid])]
|
ep_nums = [int(x['episode']) for x in my_db.select(sql)]
|
||||||
|
|
||||||
logger.log(u"Executed query: [SELECT episode FROM tv_episodes WHERE showid = %s AND season in %s]" % (show.indexerid, ','.join(searchedSeasons)))
|
|
||||||
logger.log(u"Episode list: " + str(allEps), logger.DEBUG)
|
|
||||||
|
|
||||||
allWanted = True
|
logger.log(u'Executed query: [%s]' % sql)
|
||||||
anyWanted = False
|
logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG)
|
||||||
for curEpNum in allEps:
|
|
||||||
|
all_wanted = True
|
||||||
|
any_wanted = False
|
||||||
|
for ep_num in ep_nums:
|
||||||
for season in set([x.season for x in episodes]):
|
for season in set([x.season for x in episodes]):
|
||||||
if not show.wantEpisode(season, curEpNum, seasonQual):
|
if not show.wantEpisode(season, ep_num, season_qual):
|
||||||
allWanted = False
|
all_wanted = False
|
||||||
else:
|
else:
|
||||||
anyWanted = True
|
any_wanted = True
|
||||||
|
|
||||||
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
|
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
|
||||||
if allWanted and bestSeasonResult.quality == highest_quality_overall:
|
if all_wanted and highest_quality_overall == best_season_result.quality:
|
||||||
logger.log(
|
logger.log(u'Every episode in this season is needed, downloading the whole %s %s' %
|
||||||
u"Every episode in this season is needed, downloading the whole " + bestSeasonResult.provider.providerType + " " + bestSeasonResult.name)
|
(best_season_result.provider.providerType, best_season_result.name))
|
||||||
epObjs = []
|
ep_objs = []
|
||||||
for curEpNum in allEps:
|
for ep_num in ep_nums:
|
||||||
epObjs.append(show.getEpisode(season, curEpNum))
|
for season in set([x.season for x in episodes]):
|
||||||
bestSeasonResult.episodes = epObjs
|
ep_objs.append(show.getEpisode(season, ep_num))
|
||||||
|
best_season_result.episodes = ep_objs
|
||||||
|
|
||||||
return [bestSeasonResult]
|
return [best_season_result]
|
||||||
|
|
||||||
elif not anyWanted:
|
|
||||||
logger.log(
|
|
||||||
u"No episodes from this season are wanted at this quality, ignoring the result of " + bestSeasonResult.name,
|
|
||||||
logger.DEBUG)
|
|
||||||
|
|
||||||
|
elif not any_wanted:
|
||||||
|
logger.log(u'No episodes from this season are wanted at this quality, ignoring the result of ' +
|
||||||
|
best_season_result.name, logger.DEBUG)
|
||||||
else:
|
else:
|
||||||
|
if GenericProvider.NZB == best_season_result.provider.providerType:
|
||||||
if bestSeasonResult.provider.providerType == GenericProvider.NZB:
|
logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG)
|
||||||
logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG)
|
|
||||||
|
|
||||||
# if not, break it apart and add them as the lowest priority results
|
# if not, break it apart and add them as the lowest priority results
|
||||||
individualResults = nzbSplitter.splitResult(bestSeasonResult)
|
individual_results = nzbSplitter.splitResult(best_season_result)
|
||||||
|
|
||||||
individualResults = filter(
|
individual_results = filter(
|
||||||
lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, individualResults)
|
lambda r: show_name_helpers.pass_wordlist_checks(r.name, parse=False) and r.show == show, individual_results)
|
||||||
|
|
||||||
for curResult in individualResults:
|
for cur_result in individual_results:
|
||||||
if len(curResult.episodes) == 1:
|
if 1 == len(cur_result.episodes):
|
||||||
epNum = curResult.episodes[0].episode
|
ep_num = cur_result.episodes[0].episode
|
||||||
elif len(curResult.episodes) > 1:
|
elif 1 < len(cur_result.episodes):
|
||||||
epNum = MULTI_EP_RESULT
|
ep_num = MULTI_EP_RESULT
|
||||||
|
|
||||||
if epNum in foundResults[curProvider.name]:
|
if ep_num in found_results[provider_id]:
|
||||||
foundResults[curProvider.name][epNum].append(curResult)
|
found_results[provider_id][ep_num].append(cur_result)
|
||||||
else:
|
else:
|
||||||
foundResults[curProvider.name][epNum] = [curResult]
|
found_results[provider_id][ep_num] = [cur_result]
|
||||||
|
|
||||||
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
|
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
|
||||||
else:
|
else:
|
||||||
|
|
||||||
# Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it.
|
# Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it.
|
||||||
logger.log(
|
logger.log(u'Adding multi episode result for full season torrent. In your torrent client, set ' +
|
||||||
u"Adding multi episode result for full season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!")
|
u'the episodes that you do not want to "don\'t download"')
|
||||||
epObjs = []
|
ep_objs = []
|
||||||
for curEpNum in allEps:
|
for ep_num in ep_nums:
|
||||||
epObjs.append(show.getEpisode(season, curEpNum))
|
for season in set([x.season for x in episodes]):
|
||||||
bestSeasonResult.episodes = epObjs
|
ep_objs.append(show.getEpisode(season, ep_num))
|
||||||
|
best_season_result.episodes = ep_objs
|
||||||
|
|
||||||
epNum = MULTI_EP_RESULT
|
ep_num = MULTI_EP_RESULT
|
||||||
if epNum in foundResults[curProvider.name]:
|
if ep_num in found_results[provider_id]:
|
||||||
foundResults[curProvider.name][epNum].append(bestSeasonResult)
|
found_results[provider_id][ep_num].append(best_season_result)
|
||||||
else:
|
else:
|
||||||
foundResults[curProvider.name][epNum] = [bestSeasonResult]
|
found_results[provider_id][ep_num] = [best_season_result]
|
||||||
|
|
||||||
# go through multi-ep results and see if we really want them or not, get rid of the rest
|
# go through multi-ep results and see if we really want them or not, get rid of the rest
|
||||||
multiResults = {}
|
multi_results = {}
|
||||||
if MULTI_EP_RESULT in foundResults[curProvider.name]:
|
if MULTI_EP_RESULT in found_results[provider_id]:
|
||||||
for multiResult in foundResults[curProvider.name][MULTI_EP_RESULT]:
|
for multi_result in found_results[provider_id][MULTI_EP_RESULT]:
|
||||||
|
|
||||||
logger.log(u"Seeing if we want to bother with multi episode result " + multiResult.name, logger.DEBUG)
|
logger.log(u'Checking usefulness of multi episode result %s' % multi_result.name, logger.DEBUG)
|
||||||
|
|
||||||
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multiResult.name, multiResult.size,
|
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multi_result.name, multi_result.size,
|
||||||
multiResult.provider.name):
|
multi_result.provider.name):
|
||||||
logger.log(multiResult.name + u" has previously failed, rejecting this multi episode result")
|
logger.log(u'%s has previously failed, rejecting this multi episode result' % multi_result.name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# see how many of the eps that this result covers aren't covered by single results
|
# see how many of the eps that this result covers aren't covered by single results
|
||||||
neededEps = []
|
needed_eps = []
|
||||||
notNeededEps = []
|
not_needed_eps = []
|
||||||
for epObj in multiResult.episodes:
|
for ep_obj in multi_result.episodes:
|
||||||
epNum = epObj.episode
|
ep_num = ep_obj.episode
|
||||||
# if we have results for the episode
|
# if we have results for the episode
|
||||||
if epNum in foundResults[curProvider.name] and len(foundResults[curProvider.name][epNum]) > 0:
|
if ep_num in found_results[provider_id] and 0 < len(found_results[provider_id][ep_num]):
|
||||||
neededEps.append(epNum)
|
needed_eps.append(ep_num)
|
||||||
else:
|
else:
|
||||||
notNeededEps.append(epNum)
|
not_needed_eps.append(ep_num)
|
||||||
|
|
||||||
logger.log(
|
logger.log(u'Single episode check result is... needed episodes: %s, not needed episodes: %s' %
|
||||||
u"Single episode check result is needed episodes: " + str(neededEps) + ", not needed episodes: " + str(notNeededEps),
|
(needed_eps, not_needed_eps), logger.DEBUG)
|
||||||
logger.DEBUG)
|
|
||||||
|
|
||||||
if not notNeededEps:
|
if not not_needed_eps:
|
||||||
logger.log(u"All of these episodes were covered by single episode results, ignoring this multi episode result", logger.DEBUG)
|
logger.log(u'All of these episodes were covered by single episode results, ignoring this multi episode result', logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# check if these eps are already covered by another multi-result
|
# check if these eps are already covered by another multi-result
|
||||||
multiNeededEps = []
|
multi_needed_eps = []
|
||||||
multiNotNeededEps = []
|
multi_not_needed_eps = []
|
||||||
for epObj in multiResult.episodes:
|
for ep_obj in multi_result.episodes:
|
||||||
epNum = epObj.episode
|
ep_num = ep_obj.episode
|
||||||
if epNum in multiResults:
|
if ep_num in multi_results:
|
||||||
multiNotNeededEps.append(epNum)
|
multi_not_needed_eps.append(ep_num)
|
||||||
else:
|
else:
|
||||||
multiNeededEps.append(epNum)
|
multi_needed_eps.append(ep_num)
|
||||||
|
|
||||||
logger.log(
|
logger.log(u'Multi episode check result is... multi needed episodes: %s, multi not needed episodes: %s' %
|
||||||
u"Multi episode check result is multi needed episodes: " + str(multiNeededEps) + ", multi not needed episodes: " + str(
|
(multi_needed_eps, multi_not_needed_eps), logger.DEBUG)
|
||||||
multiNotNeededEps), logger.DEBUG)
|
|
||||||
|
|
||||||
if not multiNeededEps:
|
if not multi_needed_eps:
|
||||||
logger.log(
|
logger.log(u'All of these episodes were covered by another multi episode nzb, ignoring this multi episode result',
|
||||||
u"All of these episodes were covered by another multi episode nzb, ignoring this multi episode result",
|
logger.DEBUG)
|
||||||
logger.DEBUG)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if we're keeping this multi-result then remember it
|
# if we're keeping this multi-result then remember it
|
||||||
for epObj in multiResult.episodes:
|
for ep_obj in multi_result.episodes:
|
||||||
multiResults[epObj.episode] = multiResult
|
multi_results[ep_obj.episode] = multi_result
|
||||||
|
|
||||||
# don't bother with the single result if we're going to get it with a multi result
|
# don't bother with the single result if we're going to get it with a multi result
|
||||||
for epObj in multiResult.episodes:
|
for ep_obj in multi_result.episodes:
|
||||||
epNum = epObj.episode
|
ep_num = ep_obj.episode
|
||||||
if epNum in foundResults[curProvider.name]:
|
if ep_num in found_results[provider_id]:
|
||||||
logger.log(
|
logger.log(u'A needed multi episode result overlaps with a single episode result for episode #%s, removing the single episode results from the list' %
|
||||||
u"A needed multi episode result overlaps with a single episode result for episode #" + str(
|
ep_num, logger.DEBUG)
|
||||||
epNum) + ", removing the single episode results from the list", logger.DEBUG)
|
del found_results[provider_id][ep_num]
|
||||||
del foundResults[curProvider.name][epNum]
|
|
||||||
|
|
||||||
# of all the single ep results narrow it down to the best one for each episode
|
# of all the single ep results narrow it down to the best one for each episode
|
||||||
finalResults += set(multiResults.values())
|
final_results += set(multi_results.values())
|
||||||
for curEp in foundResults[curProvider.name]:
|
for cur_ep in found_results[provider_id]:
|
||||||
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
|
if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if len(foundResults[curProvider.name][curEp]) == 0:
|
if 0 == len(found_results[provider_id][cur_ep]):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
bestResult = pickBestResult(foundResults[curProvider.name][curEp], show)
|
best_result = pick_best_result(found_results[provider_id][cur_ep], show)
|
||||||
|
|
||||||
# if all results were rejected move on to the next episode
|
# if all results were rejected move on to the next episode
|
||||||
if not bestResult:
|
if not best_result:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# filter out possible bad torrents from providers
|
# filter out possible bad torrents from providers
|
||||||
if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole":
|
if 'torrent' == best_result.resultType and 'blackhole' != sickbeard.TORRENT_METHOD:
|
||||||
bestResult.content = None
|
best_result.content = None
|
||||||
if not bestResult.url.startswith('magnet'):
|
if not best_result.url.startswith('magnet'):
|
||||||
bestResult.content = bestResult.provider.get_url(bestResult.url)
|
best_result.content = best_result.provider.get_url(best_result.url)
|
||||||
if not bestResult.content:
|
if not best_result.content:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# add result if its not a duplicate and
|
# add result if its not a duplicate and
|
||||||
found = False
|
found = False
|
||||||
for i, result in enumerate(finalResults):
|
for i, result in enumerate(final_results):
|
||||||
for bestResultEp in bestResult.episodes:
|
for best_result_ep in best_result.episodes:
|
||||||
if bestResultEp in result.episodes:
|
if best_result_ep in result.episodes:
|
||||||
if result.quality < bestResult.quality:
|
if best_result.quality > result.quality:
|
||||||
finalResults.pop(i)
|
final_results.pop(i)
|
||||||
else:
|
else:
|
||||||
found = True
|
found = True
|
||||||
if not found:
|
if not found:
|
||||||
finalResults += [bestResult]
|
final_results += [best_result]
|
||||||
|
|
||||||
# check that we got all the episodes we wanted first before doing a match and snatch
|
# check that we got all the episodes we wanted first before doing a match and snatch
|
||||||
wantedEpCount = 0
|
wanted_ep_count = 0
|
||||||
for wantedEp in episodes:
|
for wanted_ep in episodes:
|
||||||
for result in finalResults:
|
for result in final_results:
|
||||||
if wantedEp in result.episodes and isFinalResult(result):
|
if wanted_ep in result.episodes and is_final_result(result):
|
||||||
wantedEpCount += 1
|
wanted_ep_count += 1
|
||||||
|
|
||||||
# make sure we search every provider for results unless we found everything we wanted
|
# make sure we search every provider for results unless we found everything we wanted
|
||||||
if wantedEpCount == len(episodes):
|
if len(episodes) == wanted_ep_count:
|
||||||
break
|
break
|
||||||
|
|
||||||
if not didSearch:
|
if not search_done:
|
||||||
logger.log(u"No NZB/Torrent providers found or enabled in the SickGear config for backlog searches. Please check your settings.",
|
logger.log(u'No NZB/Torrent providers found or enabled in the SickGear config for backlog searches. Please check your settings.',
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
|
|
||||||
return finalResults
|
return final_results
|
||||||
|
|
|
@ -28,7 +28,7 @@ from sickbeard import search_queue
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard import ui
|
from sickbeard import ui
|
||||||
from sickbeard import common
|
from sickbeard import common
|
||||||
from sickbeard.search import wantedEpisodes
|
from sickbeard.search import wanted_episodes
|
||||||
|
|
||||||
NORMAL_BACKLOG = 0
|
NORMAL_BACKLOG = 0
|
||||||
LIMITED_BACKLOG = 10
|
LIMITED_BACKLOG = 10
|
||||||
|
@ -75,7 +75,7 @@ class BacklogSearcher:
|
||||||
logger.log(u'amWaiting: ' + str(self.amWaiting) + ', amActive: ' + str(self.amActive), logger.DEBUG)
|
logger.log(u'amWaiting: ' + str(self.amWaiting) + ', amActive: ' + str(self.amActive), logger.DEBUG)
|
||||||
return (not self.amWaiting) and self.amActive
|
return (not self.amWaiting) and self.amActive
|
||||||
|
|
||||||
def searchBacklog(self, which_shows=None, force_type=NORMAL_BACKLOG):
|
def search_backlog(self, which_shows=None, force_type=NORMAL_BACKLOG):
|
||||||
|
|
||||||
if self.amActive:
|
if self.amActive:
|
||||||
logger.log(u'Backlog is still running, not starting it again', logger.DEBUG)
|
logger.log(u'Backlog is still running, not starting it again', logger.DEBUG)
|
||||||
|
@ -112,7 +112,7 @@ class BacklogSearcher:
|
||||||
if curShow.paused:
|
if curShow.paused:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
segments = wantedEpisodes(curShow, fromDate, make_dict=True)
|
segments = wanted_episodes(curShow, fromDate, make_dict=True)
|
||||||
|
|
||||||
for season, segment in segments.items():
|
for season, segment in segments.items():
|
||||||
self.currentSearchInfo = {'title': curShow.name + ' Season ' + str(season)}
|
self.currentSearchInfo = {'title': curShow.name + ' Season ' + str(season)}
|
||||||
|
@ -166,7 +166,7 @@ class BacklogSearcher:
|
||||||
try:
|
try:
|
||||||
force_type = self.forcetype
|
force_type = self.forcetype
|
||||||
self.forcetype = NORMAL_BACKLOG
|
self.forcetype = NORMAL_BACKLOG
|
||||||
self.searchBacklog(force_type=force_type)
|
self.search_backlog(force_type=force_type)
|
||||||
except:
|
except:
|
||||||
self.amActive = False
|
self.amActive = False
|
||||||
raise
|
raise
|
|
@ -19,11 +19,10 @@
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
|
||||||
|
|
||||||
class ProperSearcher():
|
class ProperSearcher:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.lock = threading.Lock()
|
self.lock = threading.Lock()
|
||||||
self.amActive = False
|
self.amActive = False
|
||||||
|
@ -35,4 +34,4 @@ class ProperSearcher():
|
||||||
propersearch_queue_item = sickbeard.search_queue.ProperSearchQueueItem()
|
propersearch_queue_item = sickbeard.search_queue.ProperSearchQueueItem()
|
||||||
sickbeard.searchQueueScheduler.action.add_item(propersearch_queue_item)
|
sickbeard.searchQueueScheduler.action.add_item(propersearch_queue_item)
|
||||||
|
|
||||||
self.amActive = False
|
self.amActive = False
|
|
@ -26,7 +26,7 @@ import datetime
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
|
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
|
||||||
failed_history, history, ui, properFinder
|
failed_history, history, ui, properFinder
|
||||||
from sickbeard.search import wantedEpisodes
|
from sickbeard.search import wanted_episodes
|
||||||
|
|
||||||
|
|
||||||
search_queue_lock = threading.Lock()
|
search_queue_lock = threading.Lock()
|
||||||
|
@ -40,6 +40,7 @@ PROPER_SEARCH = 50
|
||||||
MANUAL_SEARCH_HISTORY = []
|
MANUAL_SEARCH_HISTORY = []
|
||||||
MANUAL_SEARCH_HISTORY_SIZE = 100
|
MANUAL_SEARCH_HISTORY_SIZE = 100
|
||||||
|
|
||||||
|
|
||||||
class SearchQueue(generic_queue.GenericQueue):
|
class SearchQueue(generic_queue.GenericQueue):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
generic_queue.GenericQueue.__init__(self)
|
generic_queue.GenericQueue.__init__(self)
|
||||||
|
@ -58,14 +59,14 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment:
|
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def is_show_in_queue(self, show):
|
def is_show_in_queue(self, show):
|
||||||
with self.lock:
|
with self.lock:
|
||||||
for cur_item in self.queue:
|
for cur_item in self.queue:
|
||||||
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.show.indexerid == show:
|
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.show.indexerid == show:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_all_ep_from_queue(self, show):
|
def get_all_ep_from_queue(self, show):
|
||||||
with self.lock:
|
with self.lock:
|
||||||
ep_obj_list = []
|
ep_obj_list = []
|
||||||
|
@ -76,7 +77,7 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
if ep_obj_list:
|
if ep_obj_list:
|
||||||
return ep_obj_list
|
return ep_obj_list
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def pause_backlog(self):
|
def pause_backlog(self):
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self.min_priority = generic_queue.QueuePriorities.HIGH
|
self.min_priority = generic_queue.QueuePriorities.HIGH
|
||||||
|
@ -90,10 +91,10 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
with self.lock:
|
with self.lock:
|
||||||
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
|
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
|
||||||
|
|
||||||
def _is_in_progress(self, itemType):
|
def _is_in_progress(self, item_type):
|
||||||
with self.lock:
|
with self.lock:
|
||||||
for cur_item in self.queue + [self.currentItem]:
|
for cur_item in self.queue + [self.currentItem]:
|
||||||
if isinstance(cur_item, itemType):
|
if isinstance(cur_item, item_type):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -108,7 +109,7 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
return self._is_in_progress(RecentSearchQueueItem)
|
return self._is_in_progress(RecentSearchQueueItem)
|
||||||
|
|
||||||
def is_propersearch_in_progress(self):
|
def is_propersearch_in_progress(self):
|
||||||
return self._is_in_progress(ProperSearchQueueItem)
|
return self._is_in_progress(ProperSearchQueueItem)
|
||||||
|
|
||||||
def is_standard_backlog_in_progress(self):
|
def is_standard_backlog_in_progress(self):
|
||||||
with self.lock:
|
with self.lock:
|
||||||
|
@ -155,7 +156,6 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
length['failed'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
|
length['failed'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
|
||||||
return length
|
return length
|
||||||
|
|
||||||
|
|
||||||
def add_item(self, item):
|
def add_item(self, item):
|
||||||
if isinstance(item, (RecentSearchQueueItem, ProperSearchQueueItem)):
|
if isinstance(item, (RecentSearchQueueItem, ProperSearchQueueItem)):
|
||||||
# recent and proper searches
|
# recent and proper searches
|
||||||
|
@ -185,12 +185,12 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
||||||
self.update_providers()
|
self.update_providers()
|
||||||
|
|
||||||
show_list = sickbeard.showList
|
show_list = sickbeard.showList
|
||||||
fromDate = datetime.date.fromordinal(1)
|
from_date = datetime.date.fromordinal(1)
|
||||||
for curShow in show_list:
|
for curShow in show_list:
|
||||||
if curShow.paused:
|
if curShow.paused:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.episodes.extend(wantedEpisodes(curShow, fromDate))
|
self.episodes.extend(wanted_episodes(curShow, from_date))
|
||||||
|
|
||||||
if not self.episodes:
|
if not self.episodes:
|
||||||
logger.log(u'No search of cache for episodes required')
|
logger.log(u'No search of cache for episodes required')
|
||||||
|
@ -203,7 +203,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.log(u'Beginning recent search for episodes')
|
logger.log(u'Beginning recent search for episodes')
|
||||||
found_results = search.searchForNeededEpisodes(self.episodes)
|
found_results = search.search_for_needed_episodes(self.episodes)
|
||||||
|
|
||||||
if not len(found_results):
|
if not len(found_results):
|
||||||
logger.log(u'No needed episodes found')
|
logger.log(u'No needed episodes found')
|
||||||
|
@ -211,7 +211,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
||||||
for result in found_results:
|
for result in found_results:
|
||||||
# just use the first result for now
|
# just use the first result for now
|
||||||
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
||||||
self.success = search.snatchEpisode(result)
|
self.success = search.snatch_episode(result)
|
||||||
|
|
||||||
# give the CPU a break
|
# give the CPU a break
|
||||||
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
||||||
|
@ -219,7 +219,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
if self.success is None:
|
if None is self.success:
|
||||||
self.success = False
|
self.success = False
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
@ -231,23 +231,23 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
||||||
network_timezones.update_network_dict()
|
network_timezones.update_network_dict()
|
||||||
|
|
||||||
if network_timezones.network_dict:
|
if network_timezones.network_dict:
|
||||||
curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
|
cur_date = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
|
||||||
else:
|
else:
|
||||||
curDate = (datetime.date.today() - datetime.timedelta(days=2)).toordinal()
|
cur_date = (datetime.date.today() - datetime.timedelta(days=2)).toordinal()
|
||||||
|
|
||||||
curTime = datetime.datetime.now(network_timezones.sb_timezone)
|
cur_time = datetime.datetime.now(network_timezones.sb_timezone)
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
sqlResults = myDB.select('SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?',
|
sql_results = my_db.select('SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?',
|
||||||
[common.UNAIRED, curDate])
|
[common.UNAIRED, cur_date])
|
||||||
|
|
||||||
sql_l = []
|
sql_l = []
|
||||||
show = None
|
show = None
|
||||||
wanted = False
|
wanted = False
|
||||||
|
|
||||||
for sqlEp in sqlResults:
|
for sqlEp in sql_results:
|
||||||
try:
|
try:
|
||||||
if not show or int(sqlEp['showid']) != show.indexerid:
|
if not show or show.indexerid != int(sqlEp['showid']):
|
||||||
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp['showid']))
|
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp['showid']))
|
||||||
|
|
||||||
# for when there is orphaned series in the database but not loaded into our showlist
|
# for when there is orphaned series in the database but not loaded into our showlist
|
||||||
|
@ -255,13 +255,13 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
except exceptions.MultipleShowObjectsException:
|
except exceptions.MultipleShowObjectsException:
|
||||||
logger.log(u'ERROR: expected to find a single show matching ' + str(sqlEp['showid']))
|
logger.log(u'ERROR: expected to find a single show matching %s' % sqlEp['showid'])
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))
|
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))
|
||||||
# filter out any episodes that haven't aired yet
|
# filter out any episodes that haven't aired yet
|
||||||
if end_time > curTime:
|
if end_time > cur_time:
|
||||||
continue
|
continue
|
||||||
except:
|
except:
|
||||||
# if an error occurred assume the episode hasn't aired yet
|
# if an error occurred assume the episode hasn't aired yet
|
||||||
|
@ -279,23 +279,23 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
||||||
logger.log(u'No unaired episodes marked wanted')
|
logger.log(u'No unaired episodes marked wanted')
|
||||||
|
|
||||||
if 0 < len(sql_l):
|
if 0 < len(sql_l):
|
||||||
myDB = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
myDB.mass_action(sql_l)
|
my_db.mass_action(sql_l)
|
||||||
if wanted:
|
if wanted:
|
||||||
logger.log(u'Found new episodes marked wanted')
|
logger.log(u'Found new episodes marked wanted')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def update_providers():
|
def update_providers():
|
||||||
origThreadName = threading.currentThread().name
|
orig_thread_name = threading.currentThread().name
|
||||||
threads = []
|
threads = []
|
||||||
|
|
||||||
logger.log('Updating provider caches with recent upload data')
|
logger.log('Updating provider caches with recent upload data')
|
||||||
|
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
|
||||||
for curProvider in providers:
|
for cur_provider in providers:
|
||||||
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
|
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
|
||||||
threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName +
|
threads.append(threading.Thread(target=cur_provider.cache.updateCache,
|
||||||
' :: [' + curProvider.name + ']'))
|
name='%s :: [%s]' % (orig_thread_name, cur_provider.name)))
|
||||||
# start the thread we just created
|
# start the thread we just created
|
||||||
threads[-1].start()
|
threads[-1].start()
|
||||||
|
|
||||||
|
@ -316,7 +316,7 @@ class ProperSearchQueueItem(generic_queue.QueueItem):
|
||||||
generic_queue.QueueItem.run(self)
|
generic_queue.QueueItem.run(self)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
properFinder.searchPropers()
|
properFinder.search_propers()
|
||||||
finally:
|
finally:
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
|
@ -325,7 +325,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, show, segment):
|
def __init__(self, show, segment):
|
||||||
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
|
||||||
self.priority = generic_queue.QueuePriorities.HIGH
|
self.priority = generic_queue.QueuePriorities.HIGH
|
||||||
self.name = 'MANUAL-' + str(show.indexerid)
|
self.name = 'MANUAL-%s' % show.indexerid
|
||||||
self.success = None
|
self.success = None
|
||||||
self.show = show
|
self.show = show
|
||||||
self.segment = segment
|
self.segment = segment
|
||||||
|
@ -335,30 +335,30 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
generic_queue.QueueItem.run(self)
|
generic_queue.QueueItem.run(self)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.log('Beginning manual search for: [' + self.segment.prettyName() + ']')
|
logger.log(u'Beginning manual search for: [%s]' % self.segment.prettyName())
|
||||||
self.started = True
|
self.started = True
|
||||||
|
|
||||||
searchResult = search.searchProviders(self.show, [self.segment], True)
|
|
||||||
|
|
||||||
if searchResult:
|
search_result = search.search_providers(self.show, [self.segment], True)
|
||||||
|
|
||||||
|
if search_result:
|
||||||
# just use the first result for now
|
# just use the first result for now
|
||||||
logger.log(u'Downloading ' + searchResult[0].name + ' from ' + searchResult[0].provider.name)
|
logger.log(u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name))
|
||||||
self.success = search.snatchEpisode(searchResult[0])
|
self.success = search.snatch_episode(search_result[0])
|
||||||
|
|
||||||
# give the CPU a break
|
# give the CPU a break
|
||||||
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
ui.notifications.message('No downloads were found',
|
ui.notifications.message('No downloads found',
|
||||||
'Couldn\'t find a download for <i>%s</i>' % self.segment.prettyName())
|
u'Could not find a download for <i>%s</i>' % self.segment.prettyName())
|
||||||
|
|
||||||
logger.log(u'Unable to find a download for: [' + self.segment.prettyName() + ']')
|
logger.log(u'Unable to find a download for: [%s]' % self.segment.prettyName())
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
### Keep a list with the 100 last executed searches
|
# Keep a list with the 100 last executed searches
|
||||||
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
||||||
|
|
||||||
if self.success is None:
|
if self.success is None:
|
||||||
|
@ -371,7 +371,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, show, segment, standard_backlog=False, limited_backlog=False, forced=False):
|
def __init__(self, show, segment, standard_backlog=False, limited_backlog=False, forced=False):
|
||||||
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
||||||
self.priority = generic_queue.QueuePriorities.LOW
|
self.priority = generic_queue.QueuePriorities.LOW
|
||||||
self.name = 'BACKLOG-' + str(show.indexerid)
|
self.name = 'BACKLOG-%s' % show.indexerid
|
||||||
self.success = None
|
self.success = None
|
||||||
self.show = show
|
self.show = show
|
||||||
self.segment = segment
|
self.segment = segment
|
||||||
|
@ -383,19 +383,19 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
generic_queue.QueueItem.run(self)
|
generic_queue.QueueItem.run(self)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.log('Beginning backlog search for: [' + self.show.name + ']')
|
logger.log(u'Beginning backlog search for: [%s]' % self.show.name)
|
||||||
searchResult = search.searchProviders(self.show, self.segment, False)
|
search_result = search.search_providers(self.show, self.segment, False)
|
||||||
|
|
||||||
if searchResult:
|
if search_result:
|
||||||
for result in searchResult:
|
for result in search_result:
|
||||||
# just use the first result for now
|
# just use the first result for now
|
||||||
logger.log(u'Downloading ' + result.name + ' from ' + result.provider.name)
|
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
||||||
search.snatchEpisode(result)
|
search.snatch_episode(result)
|
||||||
|
|
||||||
# give the CPU a break
|
# give the CPU a break
|
||||||
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
||||||
else:
|
else:
|
||||||
logger.log(u'No needed episodes found during backlog search for: [' + self.show.name + ']')
|
logger.log(u'No needed episodes found during backlog search for: [%s]' % self.show.name)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
|
@ -407,7 +407,7 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, show, segment):
|
def __init__(self, show, segment):
|
||||||
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
|
||||||
self.priority = generic_queue.QueuePriorities.HIGH
|
self.priority = generic_queue.QueuePriorities.HIGH
|
||||||
self.name = 'RETRY-' + str(show.indexerid)
|
self.name = 'RETRY-%s' % show.indexerid
|
||||||
self.show = show
|
self.show = show
|
||||||
self.segment = segment
|
self.segment = segment
|
||||||
self.success = None
|
self.success = None
|
||||||
|
@ -416,48 +416,49 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
def run(self):
|
def run(self):
|
||||||
generic_queue.QueueItem.run(self)
|
generic_queue.QueueItem.run(self)
|
||||||
self.started = True
|
self.started = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for epObj in self.segment:
|
for epObj in self.segment:
|
||||||
|
|
||||||
logger.log(u'Marking episode as bad: [' + epObj.prettyName() + ']')
|
logger.log(u'Marking episode as bad: [%s]' % epObj.prettyName())
|
||||||
|
|
||||||
failed_history.markFailed(epObj)
|
failed_history.markFailed(epObj)
|
||||||
|
|
||||||
(release, provider) = failed_history.findRelease(epObj)
|
(release, provider) = failed_history.findRelease(epObj)
|
||||||
if release:
|
if release:
|
||||||
failed_history.logFailed(release)
|
failed_history.logFailed(release)
|
||||||
history.logFailed(epObj, release, provider)
|
history.logFailed(epObj, release, provider)
|
||||||
|
|
||||||
failed_history.revertEpisode(epObj)
|
failed_history.revertEpisode(epObj)
|
||||||
logger.log('Beginning failed download search for: [' + epObj.prettyName() + ']')
|
logger.log(u'Beginning failed download search for: []' % epObj.prettyName())
|
||||||
|
|
||||||
searchResult = search.searchProviders(self.show, self.segment, True)
|
search_result = search.search_providers(self.show, self.segment, True)
|
||||||
|
|
||||||
if searchResult:
|
if search_result:
|
||||||
for result in searchResult:
|
for result in search_result:
|
||||||
# just use the first result for now
|
# just use the first result for now
|
||||||
logger.log(u'Downloading ' + result.name + ' from ' + result.provider.name)
|
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
||||||
search.snatchEpisode(result)
|
search.snatch_episode(result)
|
||||||
|
|
||||||
# give the CPU a break
|
# give the CPU a break
|
||||||
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
#logger.log(u"No valid episode found to retry for: [" + self.segment.prettyName() + "]")
|
# logger.log(u'No valid episode found to retry for: [%s]' % self.segment.prettyName())
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
### Keep a list with the 100 last executed searches
|
# Keep a list with the 100 last executed searches
|
||||||
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
||||||
|
|
||||||
if self.success is None:
|
if self.success is None:
|
||||||
self.success = False
|
self.success = False
|
||||||
|
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
def fifo(myList, item, maxSize = 100):
|
|
||||||
if len(myList) >= maxSize:
|
def fifo(my_list, item, max_size=100):
|
||||||
myList.pop(0)
|
if len(my_list) >= max_size:
|
||||||
myList.append(item)
|
my_list.pop(0)
|
||||||
|
my_list.append(item)
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
|
||||||
|
|
||||||
|
@ -35,4 +34,4 @@ class RecentSearcher():
|
||||||
recentsearch_queue_item = sickbeard.search_queue.RecentSearchQueueItem()
|
recentsearch_queue_item = sickbeard.search_queue.RecentSearchQueueItem()
|
||||||
sickbeard.searchQueueScheduler.action.add_item(recentsearch_queue_item)
|
sickbeard.searchQueueScheduler.action.add_item(recentsearch_queue_item)
|
||||||
|
|
||||||
self.amActive = False
|
self.amActive = False
|
|
@ -29,56 +29,101 @@ from sickbeard import logger
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||||
from lib.unidecode import unidecode
|
|
||||||
from sickbeard.blackandwhitelist import BlackAndWhiteList
|
|
||||||
|
|
||||||
|
|
||||||
def filterBadReleases(name, parse=True):
|
def pass_wordlist_checks(name, parse=True):
|
||||||
"""
|
"""
|
||||||
Filters out non-english and just all-around stupid releases by comparing them
|
Filters out non-english and just all-around stupid releases by comparing
|
||||||
to the resultFilters contents.
|
the word list contents at boundaries or the end of name.
|
||||||
|
|
||||||
name: the release name to check
|
name: the release name to check
|
||||||
|
|
||||||
Returns: True if the release name is OK, False if it's bad.
|
Returns: True if the release name is OK, False if it's bad.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
if parse:
|
||||||
if parse:
|
err_msg = u'Unable to parse the filename %s into a valid ' % name
|
||||||
|
try:
|
||||||
NameParser().parse(name)
|
NameParser().parse(name)
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
logger.log(err_msg + 'episode', logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
except InvalidShowException:
|
except InvalidShowException:
|
||||||
logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG)
|
logger.log(err_msg + 'show', logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
resultFilters = ['sub(bed|ed|pack|s)', '(dk|fin|heb|kor|nor|nordic|pl|swe)sub(bed|ed|s)?',
|
word_list = ['sub(bed|ed|pack|s)', '(dk|fin|heb|kor|nor|nordic|pl|swe)sub(bed|ed|s)?',
|
||||||
'(dir|sample|sub|nfo)fix', 'sample', '(dvd)?extras',
|
'(dir|sample|sub|nfo)fix', 'sample', '(dvd)?extras',
|
||||||
'dub(bed)?']
|
'dub(bed)?']
|
||||||
|
|
||||||
# if any of the bad strings are in the name then say no
|
# if any of the bad strings are in the name then say no
|
||||||
if sickbeard.IGNORE_WORDS:
|
if sickbeard.IGNORE_WORDS:
|
||||||
resultFilters.extend(sickbeard.IGNORE_WORDS.split(','))
|
word_list = ','.join([sickbeard.IGNORE_WORDS] + word_list)
|
||||||
filters = [re.compile('(^|[\W_])%s($|[\W_])' % re.escape(filter.strip()), re.I) for filter in resultFilters]
|
|
||||||
for regfilter in filters:
|
result = contains_any(name, word_list)
|
||||||
if regfilter.search(name):
|
if None is not result and result:
|
||||||
logger.log(u"Invalid scene release: " + name + " contained: " + regfilter.pattern + ", ignoring it",
|
logger.log(u'Ignored: %s for containing ignore word' % name, logger.DEBUG)
|
||||||
logger.DEBUG)
|
return False
|
||||||
return False
|
|
||||||
|
|
||||||
# if any of the good strings aren't in the name then say no
|
# if any of the good strings aren't in the name then say no
|
||||||
if sickbeard.REQUIRE_WORDS:
|
result = not_contains_any(name, sickbeard.REQUIRE_WORDS)
|
||||||
require_words = sickbeard.REQUIRE_WORDS.split(',')
|
if None is not result and result:
|
||||||
filters = [re.compile('(^|[\W_])%s($|[\W_])' % re.escape(filter.strip()), re.I) for filter in require_words]
|
logger.log(u'Ignored: %s for not containing required word match' % name, logger.DEBUG)
|
||||||
for regfilter in filters:
|
return False
|
||||||
if not regfilter.search(name):
|
|
||||||
logger.log(u"Invalid scene release: " + name + " didn't contain: " + regfilter.pattern + ", ignoring it",
|
|
||||||
logger.DEBUG)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def not_contains_any(subject, lookup_words, **kwargs):
|
||||||
|
|
||||||
|
return contains_any(subject, lookup_words, invert=True, **kwargs)
|
||||||
|
|
||||||
|
def contains_any(subject, lookup_words, invert=False, **kwargs):
|
||||||
|
"""
|
||||||
|
Check if subject does or does not contain a match from a list or string of regular expression lookup words
|
||||||
|
|
||||||
|
word: word to test existence of
|
||||||
|
lookup_words: List or comma separated string of words to search
|
||||||
|
re_prefix: insert string to all lookup words
|
||||||
|
re_suffix: append string to all lookup words
|
||||||
|
invert: invert function logic "contains any" into "does not contain any"
|
||||||
|
|
||||||
|
Returns: None if no checking was done. True for first match found, or if invert is False,
|
||||||
|
then True for first pattern that does not match, or False
|
||||||
|
"""
|
||||||
|
compiled_words = compile_word_list(lookup_words, **kwargs)
|
||||||
|
if subject and compiled_words:
|
||||||
|
for rc_filter in compiled_words:
|
||||||
|
match = rc_filter.search(subject)
|
||||||
|
if (match and not invert) or (not match and invert):
|
||||||
|
msg = match and not invert and 'Found match' or ''
|
||||||
|
msg = not match and invert and 'No match found' or msg
|
||||||
|
logger.log(u'%s from pattern: %s in text: %s ' % (msg, rc_filter.pattern, subject), logger.DEBUG)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
return None
|
||||||
|
|
||||||
|
def compile_word_list(lookup_words, re_prefix='(^|[\W_])', re_suffix='($|[\W_])'):
|
||||||
|
|
||||||
|
result = []
|
||||||
|
if lookup_words:
|
||||||
|
search_raw = isinstance(lookup_words, list)
|
||||||
|
if not search_raw:
|
||||||
|
search_raw = not lookup_words.startswith('regex:')
|
||||||
|
lookup_words = lookup_words[(6, 0)[search_raw]:].split(',')
|
||||||
|
lookup_words = [x.strip() for x in lookup_words]
|
||||||
|
for word in [x for x in lookup_words if x]:
|
||||||
|
try:
|
||||||
|
# !0 == regex and subject = s / 'what\'s the "time"' / what\'s\ the\ \"time\"
|
||||||
|
subject = search_raw and re.escape(word) or re.sub(r'([\" \'])', r'\\\1', word)
|
||||||
|
result.append(re.compile('(?i)%s%s%s' % (re_prefix, subject, re_suffix)))
|
||||||
|
except Exception as e:
|
||||||
|
logger.log(u'Failure to compile filter expression: %s ... Reason: %s' % (word, e.message), logger.DEBUG)
|
||||||
|
|
||||||
|
diff = len(lookup_words) - len(result)
|
||||||
|
if diff:
|
||||||
|
logger.log(u'From %s expressions, %s was discarded during compilation' % (len(lookup_words), diff), logger.DEBUG)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
def makeSceneShowSearchStrings(show, season=-1):
|
def makeSceneShowSearchStrings(show, season=-1):
|
||||||
showNames = allPossibleShowNames(show, season=season)
|
showNames = allPossibleShowNames(show, season=season)
|
||||||
|
@ -195,9 +240,9 @@ def allPossibleShowNames(show, season=-1):
|
||||||
"""
|
"""
|
||||||
Figures out every possible variation of the name for a particular show. Includes TVDB name, TVRage name,
|
Figures out every possible variation of the name for a particular show. Includes TVDB name, TVRage name,
|
||||||
country codes on the end, eg. "Show Name (AU)", and any scene exception names.
|
country codes on the end, eg. "Show Name (AU)", and any scene exception names.
|
||||||
|
|
||||||
show: a TVShow object that we should get the names of
|
show: a TVShow object that we should get the names of
|
||||||
|
|
||||||
Returns: a list of all the possible show names
|
Returns: a list of all the possible show names
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -256,13 +301,13 @@ def determineReleaseName(dir_name=None, nzb_name=None):
|
||||||
if len(results) == 1:
|
if len(results) == 1:
|
||||||
found_file = ek.ek(os.path.basename, results[0])
|
found_file = ek.ek(os.path.basename, results[0])
|
||||||
found_file = found_file.rpartition('.')[0]
|
found_file = found_file.rpartition('.')[0]
|
||||||
if filterBadReleases(found_file):
|
if pass_wordlist_checks(found_file):
|
||||||
logger.log(u"Release name (" + found_file + ") found from file (" + results[0] + ")")
|
logger.log(u"Release name (" + found_file + ") found from file (" + results[0] + ")")
|
||||||
return found_file.rpartition('.')[0]
|
return found_file.rpartition('.')[0]
|
||||||
|
|
||||||
# If that fails, we try the folder
|
# If that fails, we try the folder
|
||||||
folder = ek.ek(os.path.basename, dir_name)
|
folder = ek.ek(os.path.basename, dir_name)
|
||||||
if filterBadReleases(folder):
|
if pass_wordlist_checks(folder):
|
||||||
# NOTE: Multiple failed downloads will change the folder name.
|
# NOTE: Multiple failed downloads will change the folder name.
|
||||||
# (e.g., appending #s)
|
# (e.g., appending #s)
|
||||||
# Should we handle that?
|
# Should we handle that?
|
||||||
|
|
|
@ -139,7 +139,7 @@ class ShowQueue(generic_queue.GenericQueue):
|
||||||
|
|
||||||
if ((not after_update and self.isBeingUpdated(show)) or self.isInUpdateQueue(show)) and not force:
|
if ((not after_update and self.isBeingUpdated(show)) or self.isInUpdateQueue(show)) and not force:
|
||||||
logger.log(
|
logger.log(
|
||||||
u'A refresh was attempted but there is already an update queued or in progress. Since updates do a refresh at the end anyway I\'m skipping this request.',
|
u'Skipping this refresh as there is already an update queued or in progress and a refresh is done at the end of an update anyway.',
|
||||||
logger.DEBUG)
|
logger.DEBUG)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -449,7 +449,7 @@ class QueueItemAdd(ShowQueueItem):
|
||||||
# if started with WANTED eps then run the backlog
|
# if started with WANTED eps then run the backlog
|
||||||
if WANTED == self.default_status or items_wanted:
|
if WANTED == self.default_status or items_wanted:
|
||||||
logger.log(u'Launching backlog for this show since episodes are WANTED')
|
logger.log(u'Launching backlog for this show since episodes are WANTED')
|
||||||
sickbeard.backlogSearchScheduler.action.searchBacklog([self.show]) #@UndefinedVariable
|
sickbeard.backlogSearchScheduler.action.search_backlog([self.show]) #@UndefinedVariable
|
||||||
ui.notifications.message('Show added/search', 'Adding and searching for episodes of' + msg)
|
ui.notifications.message('Show added/search', 'Adding and searching for episodes of' + msg)
|
||||||
else:
|
else:
|
||||||
ui.notifications.message('Show added', 'Adding' + msg)
|
ui.notifications.message('Show added', 'Adding' + msg)
|
||||||
|
@ -655,4 +655,4 @@ class QueueItemForceUpdateWeb(QueueItemUpdate):
|
||||||
def __init__(self, show=None, scheduled_update=False):
|
def __init__(self, show=None, scheduled_update=False):
|
||||||
ShowQueueItem.__init__(self, ShowQueueActions.FORCEUPDATE, show, scheduled_update)
|
ShowQueueItem.__init__(self, ShowQueueActions.FORCEUPDATE, show, scheduled_update)
|
||||||
self.force = True
|
self.force = True
|
||||||
self.force_web = True
|
self.force_web = True
|
||||||
|
|
|
@ -2156,14 +2156,14 @@ class TVEpisode(object):
|
||||||
def us(name):
|
def us(name):
|
||||||
return re.sub('[ -]', '_', name)
|
return re.sub('[ -]', '_', name)
|
||||||
|
|
||||||
def release_name(show, name):
|
def release_name(name, is_anime=False):
|
||||||
if name:
|
if name:
|
||||||
name = helpers.remove_non_release_groups(helpers.remove_extension(name), show.anime)
|
name = helpers.remove_non_release_groups(helpers.remove_extension(name), is_anime)
|
||||||
return name
|
return name
|
||||||
|
|
||||||
def release_group(show, name):
|
def release_group(show, name):
|
||||||
if name:
|
if name:
|
||||||
name = helpers.remove_non_release_groups(helpers.remove_extension(name), show.anime)
|
name = helpers.remove_non_release_groups(helpers.remove_extension(name), show.is_anime)
|
||||||
else:
|
else:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
@ -2205,7 +2205,7 @@ class TVEpisode(object):
|
||||||
'%0XE': '%02d' % self.scene_episode,
|
'%0XE': '%02d' % self.scene_episode,
|
||||||
'%AB': '%(#)03d' % {'#': self.absolute_number},
|
'%AB': '%(#)03d' % {'#': self.absolute_number},
|
||||||
'%XAB': '%(#)03d' % {'#': self.scene_absolute_number},
|
'%XAB': '%(#)03d' % {'#': self.scene_absolute_number},
|
||||||
'%RN': release_name(self.show, self.release_name),
|
'%RN': release_name(self.release_name, self.show.is_anime),
|
||||||
'%RG': release_group(self.show, self.release_name),
|
'%RG': release_group(self.show, self.release_name),
|
||||||
'%AD': str(self.airdate).replace('-', ' '),
|
'%AD': str(self.airdate).replace('-', ' '),
|
||||||
'%A.D': str(self.airdate).replace('-', '.'),
|
'%A.D': str(self.airdate).replace('-', '.'),
|
||||||
|
|
|
@ -50,7 +50,7 @@ class TVCache:
|
||||||
self.provider = provider
|
self.provider = provider
|
||||||
self.providerID = self.provider.get_id()
|
self.providerID = self.provider.get_id()
|
||||||
self.providerDB = None
|
self.providerDB = None
|
||||||
self.minTime = 10
|
self.update_freq = 10
|
||||||
|
|
||||||
def get_db(self):
|
def get_db(self):
|
||||||
return CacheDBConnection(self.providerID)
|
return CacheDBConnection(self.providerID)
|
||||||
|
@ -60,11 +60,11 @@ class TVCache:
|
||||||
myDB = self.get_db()
|
myDB = self.get_db()
|
||||||
myDB.action('DELETE FROM provider_cache WHERE provider = ?', [self.providerID])
|
myDB.action('DELETE FROM provider_cache WHERE provider = ?', [self.providerID])
|
||||||
|
|
||||||
def _get_title_and_url(self, item):
|
def _title_and_url(self, item):
|
||||||
# override this in the provider if recent search has a different data layout to backlog searches
|
# override this in the provider if recent search has a different data layout to backlog searches
|
||||||
return self.provider._get_title_and_url(item)
|
return self.provider._title_and_url(item)
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _cache_data(self):
|
||||||
data = None
|
data = None
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ class TVCache:
|
||||||
|
|
||||||
if self.shouldUpdate():
|
if self.shouldUpdate():
|
||||||
# as long as the http request worked we count this as an update
|
# as long as the http request worked we count this as an update
|
||||||
data = self._getRSSData()
|
data = self._cache_data()
|
||||||
if not data:
|
if not data:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -96,7 +96,7 @@ class TVCache:
|
||||||
# parse data
|
# parse data
|
||||||
cl = []
|
cl = []
|
||||||
for item in data:
|
for item in data:
|
||||||
title, url = self._get_title_and_url(item)
|
title, url = self._title_and_url(item)
|
||||||
ci = self._parseItem(title, url)
|
ci = self._parseItem(title, url)
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
@ -182,9 +182,9 @@ class TVCache:
|
||||||
|
|
||||||
def shouldUpdate(self):
|
def shouldUpdate(self):
|
||||||
# if we've updated recently then skip the update
|
# if we've updated recently then skip the update
|
||||||
if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime):
|
if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.update_freq):
|
||||||
logger.log(u'Last update was too soon, using old cache: today()-' + str(self.lastUpdate) + '<' + str(
|
logger.log(u'Last update was too soon, using old cache: today()-' + str(self.lastUpdate) + '<' + str(
|
||||||
datetime.timedelta(minutes=self.minTime)), logger.DEBUG)
|
datetime.timedelta(minutes=self.update_freq)), logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -255,11 +255,11 @@ class TVCache:
|
||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def listPropers(self, date=None, delimiter='.'):
|
def listPropers(self, date=None):
|
||||||
myDB = self.get_db()
|
myDB = self.get_db()
|
||||||
sql = "SELECT * FROM provider_cache WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%' AND provider = ?"
|
sql = "SELECT * FROM provider_cache WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%' AND provider = ?"
|
||||||
|
|
||||||
if date != None:
|
if date:
|
||||||
sql += ' AND time >= ' + str(int(time.mktime(date.timetuple())))
|
sql += ' AND time >= ' + str(int(time.mktime(date.timetuple())))
|
||||||
|
|
||||||
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql, [self.providerID]))
|
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql, [self.providerID]))
|
||||||
|
@ -291,7 +291,7 @@ class TVCache:
|
||||||
for curResult in sqlResults:
|
for curResult in sqlResults:
|
||||||
|
|
||||||
# skip non-tv crap
|
# skip non-tv crap
|
||||||
if not show_name_helpers.filterBadReleases(curResult['name'], parse=False):
|
if not show_name_helpers.pass_wordlist_checks(curResult['name'], parse=False):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# get the show object, or if it's not one of our shows then ignore it
|
# get the show object, or if it's not one of our shows then ignore it
|
||||||
|
|
|
@ -276,9 +276,9 @@ class GitUpdateManager(UpdateManager):
|
||||||
branch = branch_info.strip().replace('refs/heads/', '', 1)
|
branch = branch_info.strip().replace('refs/heads/', '', 1)
|
||||||
if branch:
|
if branch:
|
||||||
return branch
|
return branch
|
||||||
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def _check_github_for_update(self):
|
def _check_github_for_update(self):
|
||||||
"""
|
"""
|
||||||
Uses git commands to check if there is a newer version that the provided
|
Uses git commands to check if there is a newer version that the provided
|
||||||
|
@ -459,7 +459,7 @@ class SourceUpdateManager(UpdateManager):
|
||||||
return "master"
|
return "master"
|
||||||
else:
|
else:
|
||||||
return sickbeard.CUR_COMMIT_BRANCH
|
return sickbeard.CUR_COMMIT_BRANCH
|
||||||
|
|
||||||
def need_update(self):
|
def need_update(self):
|
||||||
# need this to run first to set self._newest_commit_hash
|
# need this to run first to set self._newest_commit_hash
|
||||||
try:
|
try:
|
||||||
|
@ -622,7 +622,7 @@ class SourceUpdateManager(UpdateManager):
|
||||||
|
|
||||||
sickbeard.CUR_COMMIT_HASH = self._newest_commit_hash
|
sickbeard.CUR_COMMIT_HASH = self._newest_commit_hash
|
||||||
sickbeard.CUR_COMMIT_BRANCH = self.branch
|
sickbeard.CUR_COMMIT_BRANCH = self.branch
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.log(u"Error while trying to update: " + ex(e), logger.ERROR)
|
logger.log(u"Error while trying to update: " + ex(e), logger.ERROR)
|
||||||
logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG)
|
logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG)
|
||||||
|
@ -639,4 +639,4 @@ class SourceUpdateManager(UpdateManager):
|
||||||
|
|
||||||
def list_remote_pulls(self):
|
def list_remote_pulls(self):
|
||||||
# we don't care about testers that don't use git
|
# we don't care about testers that don't use git
|
||||||
return []
|
return []
|
|
@ -49,7 +49,7 @@ from sickbeard.scene_numbering import get_scene_numbering, set_scene_numbering,
|
||||||
from sickbeard.name_cache import buildNameCache
|
from sickbeard.name_cache import buildNameCache
|
||||||
from sickbeard.browser import foldersAtPath
|
from sickbeard.browser import foldersAtPath
|
||||||
from sickbeard.blackandwhitelist import BlackAndWhiteList, short_group_names
|
from sickbeard.blackandwhitelist import BlackAndWhiteList, short_group_names
|
||||||
from sickbeard.searchBacklog import FULL_BACKLOG, LIMITED_BACKLOG
|
from sickbeard.search_backlog import FULL_BACKLOG, LIMITED_BACKLOG
|
||||||
from tornado import gen
|
from tornado import gen
|
||||||
from tornado.web import RequestHandler, StaticFileHandler, authenticated
|
from tornado.web import RequestHandler, StaticFileHandler, authenticated
|
||||||
from lib import adba
|
from lib import adba
|
||||||
|
@ -3061,7 +3061,7 @@ class Manage(MainHandler):
|
||||||
show_obj = helpers.findCertainShow(sickbeard.showList, int(indexer_id))
|
show_obj = helpers.findCertainShow(sickbeard.showList, int(indexer_id))
|
||||||
|
|
||||||
if show_obj:
|
if show_obj:
|
||||||
sickbeard.backlogSearchScheduler.action.searchBacklog([show_obj]) # @UndefinedVariable
|
sickbeard.backlogSearchScheduler.action.search_backlog([show_obj]) # @UndefinedVariable
|
||||||
|
|
||||||
self.redirect('/manage/backlogOverview/')
|
self.redirect('/manage/backlogOverview/')
|
||||||
|
|
||||||
|
@ -3909,6 +3909,12 @@ class ConfigSearch(Config):
|
||||||
|
|
||||||
t = PageTemplate(headers=self.request.headers, file='config_search.tmpl')
|
t = PageTemplate(headers=self.request.headers, file='config_search.tmpl')
|
||||||
t.submenu = self.ConfigMenu
|
t.submenu = self.ConfigMenu
|
||||||
|
t.using_rls_ignore_words = [(show.indexerid, show.name)
|
||||||
|
for show in sickbeard.showList if show.rls_ignore_words.strip()]
|
||||||
|
t.using_rls_ignore_words.sort(lambda x, y: cmp(x[1], y[1]), reverse=False)
|
||||||
|
t.using_rls_require_words = [(show.indexerid, show.name)
|
||||||
|
for show in sickbeard.showList if show.rls_require_words.strip()]
|
||||||
|
t.using_rls_require_words.sort(lambda x, y: cmp(x[1], y[1]), reverse=False)
|
||||||
return t.respond()
|
return t.respond()
|
||||||
|
|
||||||
def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_username=None, sab_password=None,
|
def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_username=None, sab_password=None,
|
||||||
|
@ -4556,6 +4562,13 @@ class ConfigProviders(Config):
|
||||||
except:
|
except:
|
||||||
curTorrentProvider.freeleech = 0
|
curTorrentProvider.freeleech = 0
|
||||||
|
|
||||||
|
if hasattr(curTorrentProvider, 'reject_m2ts'):
|
||||||
|
try:
|
||||||
|
curTorrentProvider.reject_m2ts = config.checkbox_to_value(
|
||||||
|
kwargs[curTorrentProvider.get_id() + '_reject_m2ts'])
|
||||||
|
except:
|
||||||
|
curTorrentProvider.reject_m2ts = 0
|
||||||
|
|
||||||
if hasattr(curTorrentProvider, 'search_mode'):
|
if hasattr(curTorrentProvider, 'search_mode'):
|
||||||
try:
|
try:
|
||||||
curTorrentProvider.search_mode = str(kwargs[curTorrentProvider.get_id() + '_search_mode']).strip()
|
curTorrentProvider.search_mode = str(kwargs[curTorrentProvider.get_id() + '_search_mode']).strip()
|
||||||
|
|
|
@ -24,7 +24,7 @@ if __name__ == '__main__':
|
||||||
import unittest
|
import unittest
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
test_file_strings = [ x for x in glob.glob('*_tests.py') if not x in __file__]
|
test_file_strings = [x for x in glob.glob('*_tests.py') if x not in __file__]
|
||||||
module_strings = [file_string[0:len(file_string) - 3] for file_string in test_file_strings]
|
module_strings = [file_string[0:len(file_string) - 3] for file_string in test_file_strings]
|
||||||
suites = [unittest.defaultTestLoader.loadTestsFromName(file_string) for file_string in module_strings]
|
suites = [unittest.defaultTestLoader.loadTestsFromName(file_string) for file_string in module_strings]
|
||||||
testSuite = unittest.TestSuite(suites)
|
testSuite = unittest.TestSuite(suites)
|
||||||
|
|
111
tests/ignore_and_require_words_tests.py
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import sickbeard
|
||||||
|
from sickbeard import show_name_helpers
|
||||||
|
|
||||||
|
sys.path.insert(1, os.path.abspath('..'))
|
||||||
|
|
||||||
|
|
||||||
|
class TestCase(unittest.TestCase):
|
||||||
|
|
||||||
|
cases_pass_wordlist_checks = [
|
||||||
|
('[GroupName].Show.Name.-.%02d.[null]', '', '', True),
|
||||||
|
|
||||||
|
('[GroupName].Show.Name.-.%02d.[ignore]', '', 'required', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[required]', '', 'required', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[blahblah]', 'not_ignored', 'GroupName', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[blahblah]', 'not_ignored', '[GroupName]', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[blahblah]', 'not_ignored', 'Show.Name', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[required]', 'not_ignored', 'required', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[required]', '[not_ignored]', '[required]', True),
|
||||||
|
|
||||||
|
('[GroupName].Show.Name.-.%02d.[ignore]', '[ignore]', '', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[required]', '[GroupName]', 'required', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[required]', 'GroupName', 'required', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[ignore]', 'ignore', 'GroupName', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[required]', 'Show.Name', 'required', False),
|
||||||
|
|
||||||
|
('[GroupName].Show.Name.-.%02d.[ignore]', 'regex: no_ignore', '', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[480p]', 'ignore', 'regex: \d?\d80p', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[480p]', 'ignore', 'regex: \[\d?\d80p\]', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[ignore]', 'regex: ignore', '', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[ignore]', 'regex: \[ignore\]', '', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[ignore]', 'regex: ignore', 'required', False),
|
||||||
|
|
||||||
|
# The following test is True because a boundary is added to each regex not overridden with the prefix param
|
||||||
|
('[GroupName].Show.ONEONE.-.%02d.[required]', 'regex: (one(two)?)', '', True),
|
||||||
|
('[GroupName].Show.ONETWO.-.%02d.[required]', 'regex: ((one)?two)', 'required', False),
|
||||||
|
('[GroupName].Show.TWO.-.%02d.[required]', 'regex: ((one)?two)', 'required', False),
|
||||||
|
]
|
||||||
|
|
||||||
|
cases_contains = [
|
||||||
|
('[GroupName].Show.Name.-.%02d.[illegal_regex]', 'regex:??illegal_regex', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[480p]', 'regex:(480|1080)p', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[contains]', 'regex:\[contains\]', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[contains]', '[contains]', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[contains]', 'contains', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[contains]', '[not_contains]', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[null]', '', False)
|
||||||
|
]
|
||||||
|
|
||||||
|
cases_not_contains = [
|
||||||
|
('[GroupName].Show.Name.-.%02d.[480p]', 'regex:(480|1080)p', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[contains]', 'regex:\[contains\]', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[contains]', '[contains]', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[contains]', 'contains', False),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[not_contains]', '[blah_blah]', True),
|
||||||
|
('[GroupName].Show.Name.-.%02d.[null]', '', False)
|
||||||
|
]
|
||||||
|
|
||||||
|
def test_pass_wordlist_checks(self):
|
||||||
|
# default:[] or copy in a test case tuple to debug in isolation
|
||||||
|
isolated = []
|
||||||
|
|
||||||
|
test_cases = (self.cases_pass_wordlist_checks, isolated)[len(isolated)]
|
||||||
|
for case_num, (name, ignore_list, require_list, expected_result) in enumerate(test_cases):
|
||||||
|
name = (name, name % case_num)['%02d' in name]
|
||||||
|
sickbeard.IGNORE_WORDS = ignore_list
|
||||||
|
sickbeard.REQUIRE_WORDS = require_list
|
||||||
|
self.assertEqual(expected_result, show_name_helpers.pass_wordlist_checks(name, False),
|
||||||
|
'Expected %s with test: "%s" with ignore: "%s", require: "%s"' %
|
||||||
|
(expected_result, name, ignore_list, require_list))
|
||||||
|
|
||||||
|
def test_contains_any(self):
|
||||||
|
# default:[] or copy in a test case tuple to debug in isolation
|
||||||
|
isolated = []
|
||||||
|
|
||||||
|
test_cases = (self.cases_contains, isolated)[len(isolated)]
|
||||||
|
for case_num, (name, csv_words, expected_result) in enumerate(test_cases):
|
||||||
|
name = (name, name % case_num)['%02d' in name]
|
||||||
|
self.assertEqual(expected_result, self.call_contains_any(name, csv_words),
|
||||||
|
'Expected %s test: "%s" with csv_words: "%s"' %
|
||||||
|
(expected_result, name, csv_words))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def call_contains_any(name, csv_words):
|
||||||
|
re_extras = dict(re_prefix='.*', re_suffix='.*')
|
||||||
|
match = show_name_helpers.contains_any(name, csv_words, **re_extras)
|
||||||
|
return None is not match and match
|
||||||
|
|
||||||
|
def test_not_contains_any(self):
|
||||||
|
# default:[] or copy in a test case tuple to debug in isolation
|
||||||
|
isolated = []
|
||||||
|
|
||||||
|
test_cases = (self.cases_not_contains, isolated)[len(isolated)]
|
||||||
|
for case_num, (name, csv_words, expected_result) in enumerate(test_cases):
|
||||||
|
name = (name, name % case_num)['%02d' in name]
|
||||||
|
self.assertEqual(expected_result, self.call_not_contains_any(name, csv_words),
|
||||||
|
'Expected %s test: "%s" with csv_words:"%s"' %
|
||||||
|
(expected_result, name, csv_words))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def call_not_contains_any(name, csv_words):
|
||||||
|
re_extras = dict(re_prefix='.*', re_suffix='.*')
|
||||||
|
match = show_name_helpers.not_contains_any(name, csv_words, **re_extras)
|
||||||
|
return None is not match and match
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
|
@ -23,8 +23,8 @@ class SceneTests(test.SickbeardTestDBCase):
|
||||||
result = show_name_helpers.allPossibleShowNames(s)
|
result = show_name_helpers.allPossibleShowNames(s)
|
||||||
self.assertTrue(len(set(expected).intersection(set(result))) == len(expected))
|
self.assertTrue(len(set(expected).intersection(set(result))) == len(expected))
|
||||||
|
|
||||||
def _test_filterBadReleases(self, name, expected):
|
def _test_pass_wordlist_checks(self, name, expected):
|
||||||
result = show_name_helpers.filterBadReleases(name)
|
result = show_name_helpers.pass_wordlist_checks(name)
|
||||||
self.assertEqual(result, expected)
|
self.assertEqual(result, expected)
|
||||||
|
|
||||||
def test_allPossibleShowNames(self):
|
def test_allPossibleShowNames(self):
|
||||||
|
@ -40,12 +40,12 @@ class SceneTests(test.SickbeardTestDBCase):
|
||||||
self._test_allPossibleShowNames('Show Name Full Country Name', expected=['Show Name Full Country Name', 'Show Name (FCN)'])
|
self._test_allPossibleShowNames('Show Name Full Country Name', expected=['Show Name Full Country Name', 'Show Name (FCN)'])
|
||||||
self._test_allPossibleShowNames('Show Name (Full Country Name)', expected=['Show Name (Full Country Name)', 'Show Name (FCN)'])
|
self._test_allPossibleShowNames('Show Name (Full Country Name)', expected=['Show Name (Full Country Name)', 'Show Name (FCN)'])
|
||||||
|
|
||||||
def test_filterBadReleases(self):
|
def test_pass_wordlist_checks(self):
|
||||||
self._test_filterBadReleases('Show.S02.German.Stuff-Grp', False)
|
self._test_pass_wordlist_checks('Show.S02.German.Stuff-Grp', False)
|
||||||
self._test_filterBadReleases('Show.S02.Some.Stuff-Core2HD', False)
|
self._test_pass_wordlist_checks('Show.S02.Some.Stuff-Core2HD', False)
|
||||||
self._test_filterBadReleases('Show.S02.Some.German.Stuff-Grp', False)
|
self._test_pass_wordlist_checks('Show.S02.Some.German.Stuff-Grp', False)
|
||||||
# self._test_filterBadReleases('German.Show.S02.Some.Stuff-Grp', True)
|
# self._test_pass_wordlist_checks('German.Show.S02.Some.Stuff-Grp', True)
|
||||||
self._test_filterBadReleases('Show.S02.This.Is.German', False)
|
self._test_pass_wordlist_checks('Show.S02.This.Is.German', False)
|
||||||
|
|
||||||
|
|
||||||
class SceneExceptionTestCase(test.SickbeardTestDBCase):
|
class SceneExceptionTestCase(test.SickbeardTestDBCase):
|
||||||
|
|
|
@ -82,7 +82,7 @@ def test_generator(tvdbdid, show_name, curData, forceSearch):
|
||||||
episode.status = c.WANTED
|
episode.status = c.WANTED
|
||||||
episode.saveToDB()
|
episode.saveToDB()
|
||||||
|
|
||||||
bestResult = search.searchProviders(show, episode.season, episode.episode, forceSearch)
|
bestResult = search.search_providers(show, episode.season, episode.episode, forceSearch)
|
||||||
if not bestResult:
|
if not bestResult:
|
||||||
self.assertEqual(curData['b'], bestResult)
|
self.assertEqual(curData['b'], bestResult)
|
||||||
self.assertEqual(curData['b'], bestResult.name) #first is expected, second is choosen one
|
self.assertEqual(curData['b'], bestResult.name) #first is expected, second is choosen one
|
||||||
|
|