mirror of
https://github.com/SickGear/SickGear.git
synced 2024-11-21 12:25:04 +00:00
Change codebase cleanups.
Cleanup most init warnings. Cleanup some vars, pythonic instead of js. Some typos and python var/func names for Scheduler. Remove legacy handlers deprecated in 2020. Remove some legacy tagged stuff. Cleanup ConfigParser and 23.py Change cleanup vendored scandir. Remove redundant pkg_resources.py in favour of the vendor folder. Remove backports. Remove trakt checker. Change remove redundant WindowsSelectorEventLoopPolicy from webserveInit. Cleanup varnames and providers Various minor tidy ups to remove ide warnings.
This commit is contained in:
parent
8ddffb7882
commit
32987134ba
98 changed files with 895 additions and 6955 deletions
|
@ -3,6 +3,7 @@
|
|||
* Update package resource API 63.2.0 (3ae44cd) to 67.3.2 (b9bf2ec)
|
||||
* Change remove calls to legacy py2 fix encoding function
|
||||
* Change requirements for pure py3
|
||||
* Change codebase cleanups
|
||||
|
||||
|
||||
### 3.27.8 (2023-02-20 23:30:00 UTC)
|
||||
|
@ -1080,7 +1081,7 @@
|
|||
* Add API response field `global exclude require` to sg.listrequirewords endpoint
|
||||
* Change improve Popen resource usage under py2
|
||||
* Add overall failure monitoring to History/Connect fails (renamed from "Provider fails")
|
||||
* Change log exception during updateCache in newznab
|
||||
* Change log exception during update_cache in newznab
|
||||
* Change make Py3.9 preparations
|
||||
* Change anime "Available groups" to display "No groups listed..." when API is fine with no results instead of blank
|
||||
* Change improve clarity of anime group lists by using terms Allow list and Block list
|
||||
|
|
|
@ -37,6 +37,9 @@ if old_magic != magic_number:
|
|||
|
||||
# skip cleaned005 as used during dev by testers
|
||||
cleanups = [
|
||||
['.cleaned009.tmp', r'lib\scandir', [
|
||||
r'lib\scandir\__pycache__', r'lib\scandir',
|
||||
]],
|
||||
['.cleaned008.tmp', r'lib\tornado_py3', [
|
||||
r'lib\bs4_py2\builder\__pycache__', r'lib\bs4_py2\builder', r'lib\bs4_py2',
|
||||
r'lib\bs4_py3\builder\__pycache__', r'lib\bs4_py3\builder', r'lib\bs4_py3',
|
||||
|
|
|
@ -65,7 +65,7 @@
|
|||
|
||||
<tbody>
|
||||
#for $hItem in $cacheResults:
|
||||
#set $provider = $providers.getProviderClass($hItem['provider'])
|
||||
#set $provider = $providers.get_by_id($hItem['provider'])
|
||||
#set $tip = '%s @ %s' % ($hItem['provider'], $SGDatetime.sbfdatetime($SGDatetime.fromtimestamp($hItem['time'])))
|
||||
#set $ver = $hItem['version']
|
||||
#set $ver = ($ver, '')[-1 == $ver]
|
||||
|
|
|
@ -36,12 +36,12 @@
|
|||
<!--
|
||||
\$(document).ready(function(){
|
||||
#if $sickgear.USE_NZBS
|
||||
#for $cur_newznab_provider in $sickgear.newznabProviderList:
|
||||
#for $cur_newznab_provider in $sickgear.newznab_providers:
|
||||
\$(this).addProvider('$cur_newznab_provider.get_id()', '$cur_newznab_provider.name', '$cur_newznab_provider.url', '<%= starify(cur_newznab_provider.key) %>', '$cur_newznab_provider.cat_ids', $int($cur_newznab_provider.default), !0);
|
||||
#end for
|
||||
#end if
|
||||
#if $sickgear.USE_TORRENTS
|
||||
#for $cur_torrent_rss_provider in $sickgear.torrentRssProviderList:
|
||||
#for $cur_torrent_rss_provider in $sickgear.torrent_rss_providers:
|
||||
\$(this).addTorrentRssProvider('$cur_torrent_rss_provider.get_id()', '$cur_torrent_rss_provider.name', '$cur_torrent_rss_provider.url', '<%= starify(cur_torrent_rss_provider.cookies) %>');
|
||||
#end for
|
||||
#end if
|
||||
|
@ -101,7 +101,7 @@
|
|||
|
||||
|
||||
<ul id="provider_order_list" class="provider_order_panel">
|
||||
#for $cur_provider in [$x for $x in $sickgear.providers.sortedProviderList()
|
||||
#for $cur_provider in [$x for $x in $sickgear.providers.sorted_sources()
|
||||
if $x.providerType == $GenericProvider.NZB and $sickgear.USE_NZBS or
|
||||
$x.providerType == $GenericProvider.TORRENT and $sickgear.USE_TORRENTS]
|
||||
#set $cur_name = $cur_provider.get_id()
|
||||
|
@ -129,7 +129,7 @@
|
|||
#end for
|
||||
</ul>
|
||||
|
||||
<input type="hidden" name="provider_order" id="provider_order" value="<%=' '.join([x.get_id()+':'+str(int(x.is_enabled())) for x in sickgear.providers.sortedProviderList()])%>"/>
|
||||
<input type="hidden" name="provider_order" id="provider_order" value="<%=' '.join([x.get_id()+':'+str(int(x.is_enabled())) for x in sickgear.providers.sorted_sources()])%>"/>
|
||||
#if $sickgear.USE_NZBS or $sickgear.USE_TORRENTS
|
||||
<div id="provider_key">
|
||||
<span style="float:left;font-size:10px;vertical-align:top;font-weight:normal">(PA)</span><p class="note">Public access, no account required</p>
|
||||
|
@ -168,7 +168,7 @@
|
|||
<span class="component-desc">
|
||||
#set $provider_config_list_enabled = []
|
||||
#set $provider_config_list = []
|
||||
#for $cur_provider in [$x for $x in $sickgear.providers.sortedProviderList()
|
||||
#for $cur_provider in [$x for $x in $sickgear.providers.sorted_sources()
|
||||
if $x.providerType == $GenericProvider.NZB and $sickgear.USE_NZBS or
|
||||
$x.providerType == $GenericProvider.TORRENT and $sickgear.USE_TORRENTS]
|
||||
#if $cur_provider.is_enabled()
|
||||
|
@ -213,7 +213,7 @@
|
|||
#set $filter_scene_rej_nuked_desc = 'not scene nuked'
|
||||
#set $filter_scene_nuked_active_desc = 'nuked if no active search results'
|
||||
#set $filter_tip = 'nothing selected allows everything (i.e. no filtering, default)'
|
||||
#for $cur_newznab_provider in [$cur_provider for $cur_provider in $sickgear.newznabProviderList]
|
||||
#for $cur_newznab_provider in [$cur_provider for $cur_provider in $sickgear.newznab_providers]
|
||||
<div class="providerDiv" id="${cur_newznab_provider.get_id()}Div">
|
||||
#set $can_recent = $hasattr($cur_newznab_provider, 'enable_recentsearch')
|
||||
#set $can_backlog = $hasattr($cur_newznab_provider, 'enable_backlog')
|
||||
|
@ -345,8 +345,8 @@
|
|||
##
|
||||
|
||||
##
|
||||
#for $cur_nzb_provider in [$cur_provider for $cur_provider in $sickgear.providers.sortedProviderList()
|
||||
if $cur_provider.providerType == $GenericProvider.NZB and $cur_provider not in $sickgear.newznabProviderList]:
|
||||
#for $cur_nzb_provider in [$cur_provider for $cur_provider in $sickgear.providers.sorted_sources()
|
||||
if $cur_provider.providerType == $GenericProvider.NZB and $cur_provider not in $sickgear.newznab_providers]:
|
||||
<div class="providerDiv" id="${cur_nzb_provider.get_id()}Div">
|
||||
#set $can_recent = $hasattr($cur_nzb_provider, 'enable_recentsearch')
|
||||
#set $can_backlog = $hasattr($cur_nzb_provider, 'enable_backlog')
|
||||
|
@ -488,7 +488,7 @@
|
|||
##
|
||||
|
||||
##
|
||||
#for $cur_torrent_provider in $sickgear.USE_TORRENTS and [$cur_provider for $cur_provider in $sickgear.providers.sortedProviderList()
|
||||
#for $cur_torrent_provider in $sickgear.USE_TORRENTS and [$cur_provider for $cur_provider in $sickgear.providers.sorted_sources()
|
||||
if $cur_provider.providerType == $GenericProvider.TORRENT] or []:
|
||||
<div class="providerDiv" id="${cur_torrent_provider.get_id()}Div">
|
||||
#if callable(getattr(cur_torrent_provider, 'ui_string', None))
|
||||
|
|
|
@ -319,7 +319,7 @@
|
|||
</div>
|
||||
#end if
|
||||
|
||||
#set $anyQualities, $bestQualities = $Quality.splitQuality(int($show_obj.quality))
|
||||
#set $anyQualities, $bestQualities = $Quality.split_quality(int($show_obj.quality))
|
||||
#if $show_obj.quality in $qualityPresets
|
||||
<div>
|
||||
<span class="details-title">Quality</span>
|
||||
|
|
|
@ -202,7 +202,7 @@
|
|||
|
||||
|
||||
<div class="field-pair">
|
||||
#set $qualities = $common.Quality.splitQuality(int($show_obj.quality))
|
||||
#set $qualities = $common.Quality.split_quality(int($show_obj.quality))
|
||||
#set global $any_qualities = $qualities[0]
|
||||
#set global $best_qualities = $qualities[1]
|
||||
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_qualityChooser.tmpl')
|
||||
|
|
|
@ -133,7 +133,7 @@
|
|||
|
||||
<tbody>
|
||||
#for $hItem in $history_results
|
||||
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($hItem['action']))
|
||||
#set $curStatus, $curQuality = $Quality.split_composite_status(int($hItem['action']))
|
||||
#set $display_name = '<span data-sort="%s">%s - S%02iE%02i</span>' % (
|
||||
$hItem['data_name'],
|
||||
(('<span class="article">%s</span> %s' % ($hItem['name1'], $hItem['name2'])), $hItem['show_name'])[$sg_var('SORT_ARTICLE') or not $hItem['name1']],
|
||||
|
@ -141,7 +141,7 @@
|
|||
<tr>
|
||||
#set $curdatetime = $datetime.datetime.strptime(str($hItem['date']), $history.dateFormat)
|
||||
<td><div class="${fuzzydate}" data-sort="$time.mktime($curdatetime.timetuple())">$SGDatetime.sbfdatetime($curdatetime, show_seconds=True)</div></td>
|
||||
<td class="tvShow"><a href="$sbRoot/home/view-show?tvid_prodid=$hItem['tvid_prodid']#season-$hItem['season']">$display_name#if $Quality.splitCompositeStatus($hItem['action'])[0] == $SNATCHED_PROPER then ' <span class="quality Proper">Proper</span>' else ''#</a></td>
|
||||
<td class="tvShow"><a href="$sbRoot/home/view-show?tvid_prodid=$hItem['tvid_prodid']#season-$hItem['season']">$display_name#if $Quality.split_composite_status($hItem['action'])[0] == $SNATCHED_PROPER then ' <span class="quality Proper">Proper</span>' else ''#</a></td>
|
||||
<td#echo ('', ' class="subtitles_column"')[$SUBTITLED == $curStatus]#>
|
||||
#if $SUBTITLED == $curStatus
|
||||
<img width="16" height="11" src="$sbRoot/images/flags/<%= hItem["resource"][len(hItem["resource"])-6:len(hItem["resource"])-4] + '.png' %>">
|
||||
|
@ -156,7 +156,7 @@
|
|||
#else
|
||||
#if '-1' != $hItem['provider'] and len($hItem['provider'])
|
||||
#if $curStatus in $SNATCHED_ANY + [$FAILED]
|
||||
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($hItem['provider']))
|
||||
#set $provider = $providers.get_by_id($generic.GenericProvider.make_id($hItem['provider']))
|
||||
#if None is not $provider
|
||||
<img src="$sbRoot/images/providers/<%= provider.image_name() %>" width="16" height="16" /><span>$provider.name</span>
|
||||
#else
|
||||
|
@ -207,10 +207,10 @@
|
|||
#set $order = 1
|
||||
#set $ordinal_indicators = {'1':'st', '2':'nd', '3':'rd'}
|
||||
#for $action in reversed($hItem['actions'])
|
||||
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($action['action']))
|
||||
#set $curStatus, $curQuality = $Quality.split_composite_status(int($action['action']))
|
||||
#set $basename = $os.path.basename($action['resource'])
|
||||
#if $curStatus in $SNATCHED_ANY + [$FAILED]
|
||||
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($action['provider']))
|
||||
#set $provider = $providers.get_by_id($generic.GenericProvider.make_id($action['provider']))
|
||||
#if None is not $provider
|
||||
#set $prov_list += ['<span%s><img class="help" src="%s/images/providers/%s" width="16" height="16" alt="%s" title="%s.. %s: %s" /></span>'\
|
||||
% (('', ' class="fail"')[$FAILED == $curStatus], $sbRoot, $provider.image_name(), $provider.name,
|
||||
|
@ -262,7 +262,7 @@
|
|||
#if $sg_var('USE_SUBTITLES')
|
||||
<td>
|
||||
#for $action in reversed($hItem['actions'])
|
||||
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($action['action']))
|
||||
#set $curStatus, $curQuality = $Quality.split_composite_status(int($action['action']))
|
||||
#if $SUBTITLED == $curStatus
|
||||
<img src="$sbRoot/images/subtitles/<%= action['provider'] + '.png' %>" width="16" height="16" alt="$action['provider']" title="<%= action['provider'].capitalize() %>:$os.path.basename($action['resource'])" />
|
||||
<span> / </span>
|
||||
|
@ -575,7 +575,7 @@
|
|||
#for $hItem in $stat_results
|
||||
<tr>
|
||||
<td class="provider text-nowrap">
|
||||
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($hItem['provider']))
|
||||
#set $provider = $providers.get_by_id($generic.GenericProvider.make_id($hItem['provider']))
|
||||
#if None is not $provider
|
||||
<img src="$sbRoot/images/providers/<%= provider.image_name() %>" width="16" height="16"><span data-sort="$hItem['provider']">$provider.name</span>
|
||||
#else
|
||||
|
@ -628,7 +628,7 @@
|
|||
</thead>
|
||||
#set global $row = 0
|
||||
<tbody>
|
||||
#for $cur_provider in $sorted($sickgear.newznabProviderList, key=lambda x: x.last_recent_search or SGDatetime(2000,1,1), reverse=True)
|
||||
#for $cur_provider in $sorted($sickgear.newznab_providers, key=lambda x: x.last_recent_search or SGDatetime(2000,1,1), reverse=True)
|
||||
#set $last_rls_date = '-'
|
||||
#set $last_rls_age = None
|
||||
#set $last_rls_age_str = '-'
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
</div>
|
||||
|
||||
<div class="field-pair">
|
||||
#set $qualities = $Quality.splitQuality($sg_var('QUALITY_DEFAULT', SD))
|
||||
#set $qualities = $Quality.split_quality($sg_var('QUALITY_DEFAULT', SD))
|
||||
#set global $any_qualities = $qualities[0]
|
||||
#set global $best_qualities = $qualities[1]
|
||||
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_qualityChooser.tmpl')
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
#set $ep_str = '%sx%s' % $ep_key
|
||||
#set $epLoc = $ep['location']
|
||||
#set never_aired = 0 < int($ep['season']) and 1 == int($ep['airdate'])
|
||||
<tr class="#echo ' '.join([$Overview.overviewStrings[$ep_cats[$ep_str]], ('', 'airdate-never')[$never_aired], ('', 'archived')[$ARCHIVED == $Quality.splitCompositeStatus(int($ep['status']))[0]]])#">
|
||||
<tr class="#echo ' '.join([$Overview.overviewStrings[$ep_cats[$ep_str]], ('', 'airdate-never')[$never_aired], ('', 'archived')[$ARCHIVED == $Quality.split_composite_status(int($ep['status']))[0]]])#">
|
||||
<td class="col-checkbox">
|
||||
<input type="checkbox" class="epCheck #echo 'hide' if $UNAIRED == int($ep['status']) else ''#" id="$ep_str" name="$ep_str">
|
||||
</td>
|
||||
|
@ -99,7 +99,7 @@
|
|||
</td>
|
||||
#end if
|
||||
#slurp
|
||||
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($ep['status']))
|
||||
#set $curStatus, $curQuality = $Quality.split_composite_status(int($ep['status']))
|
||||
#if Quality.NONE != $curQuality
|
||||
<td class="col-status">#if $SUBTITLED == $curStatus#<span class="addQTip" title="$statusStrings[$curStatus]"><i class="sgicon-subtitles" style="vertical-align:middle"></i></span>#else#$statusStrings[$curStatus].replace('Downloaded', '')#end if# #if 'Unknown' != $statusStrings[$curStatus]#<span class="quality $Quality.get_quality_css($curQuality)#if $downloaded# addQTip" title="$downloaded#end if#">$Quality.get_quality_ui($curQuality)</span>#end if#</td>
|
||||
#else
|
||||
|
@ -107,7 +107,7 @@
|
|||
#end if
|
||||
<td class="col-search">
|
||||
#if 0 != int($ep['season'])
|
||||
#set $status = $Quality.splitCompositeStatus(int($ep['status']))[0]
|
||||
#set $status = $Quality.split_composite_status(int($ep['status']))[0]
|
||||
#if ($status in $SNATCHED_ANY + [$DOWNLOADED, $ARCHIVED]) and $sg_var('USE_FAILED_DOWNLOADS')
|
||||
<a class="ep-retry" href="$sbRoot/home/episode-retry?tvid_prodid=$show_obj.tvid_prodid&season=$ep['season']&episode=$ep['episode']"><img src="$sbRoot/images/search16.png" height="16" alt="retry" title="Retry download"></a>
|
||||
#else
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
#set $html_selected = ' selected="selected"'
|
||||
<div class="field-pair">
|
||||
<label for="quality-preset" class="clearfix">
|
||||
#set $overall_quality = $Quality.combineQualities($any_qualities, $best_qualities)
|
||||
#set $overall_quality = $Quality.combine_qualities($any_qualities, $best_qualities)
|
||||
<span class="component-title input">Quality to download</span>
|
||||
<span class="component-desc">
|
||||
#set $selected = None
|
||||
|
|
|
@ -222,7 +222,7 @@
|
|||
#for item in $history_compact
|
||||
#if 'tvid_prodid' in $item
|
||||
#set $action = $item['actions'][0]
|
||||
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($action['action']))
|
||||
#set $curStatus, $curQuality = $Quality.split_composite_status(int($action['action']))
|
||||
#set $status = None
|
||||
#if $curStatus in $SNATCHED_ANY + [$FAILED]
|
||||
#set $status = 'snatched'
|
||||
|
|
|
@ -62,7 +62,7 @@
|
|||
<tbody>
|
||||
#set $order = $oldest
|
||||
#for $hItem in $failed_results[::-1]
|
||||
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($hItem['provider']))
|
||||
#set $provider = $providers.get_by_id($generic.GenericProvider.make_id($hItem['provider']))
|
||||
#set $provider_name = None is not $provider and $provider.name or 'missing provider'
|
||||
#set $provider_image = None is not $provider and $provider.image_name() or 'missing.png'
|
||||
<tr>
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
#else:
|
||||
#set $initial_quality = $SD
|
||||
#end if
|
||||
#set $anyQualities, $bestQualities = $Quality.splitQuality($sg_var('QUALITY_DEFAULT', $initial_quality))
|
||||
#set $anyQualities, $bestQualities = $Quality.split_quality($sg_var('QUALITY_DEFAULT', $initial_quality))
|
||||
<script type="text/javascript" src="$sbRoot/js/qualityChooser.js?v=$sbPID"></script>
|
||||
<script type="text/javascript" src="$sbRoot/js/massEdit.js?v=$sbPID"></script>
|
||||
|
||||
|
|
48
lib/_23.py
48
lib/_23.py
|
@ -15,11 +15,24 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
from collections import deque
|
||||
from itertools import islice
|
||||
from sys import version_info
|
||||
from base64 import encodebytes as b64encodebytes
|
||||
from collections import deque
|
||||
# noinspection PyUnresolvedReferences
|
||||
from configparser import ConfigParser
|
||||
# noinspection PyUnresolvedReferences
|
||||
from enum import Enum
|
||||
from itertools import islice, zip_longest
|
||||
# noinspection PyUnresolvedReferences
|
||||
from inspect import getfullargspec as getargspec
|
||||
# noinspection PyUnresolvedReferences
|
||||
from os import scandir, DirEntry
|
||||
# noinspection PyUnresolvedReferences
|
||||
from subprocess import Popen
|
||||
from sys import version_info
|
||||
|
||||
import datetime
|
||||
# noinspection PyUnresolvedReferences, PyPep8Naming
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
from six.moves.urllib.parse import quote, quote_plus, unquote as six_unquote, unquote_plus as six_unquote_plus, \
|
||||
|
@ -40,8 +53,6 @@ if False:
|
|||
# noinspection PyTypeChecker
|
||||
urlencode = urlsplit = urlunparse = urlunsplit = None # type: Callable
|
||||
|
||||
PY38 = version_info[0:2] >= (3, 8)
|
||||
|
||||
|
||||
def map_consume(*args):
|
||||
# type: (...) -> None
|
||||
|
@ -53,13 +64,13 @@ def consume(iterator, n=None):
|
|||
# type: (Iterator, Optional[int]) -> None
|
||||
"""Advance the iterator n-steps ahead. If n is None, consume entirely. Returns nothing.
|
||||
|
||||
Useful if a method returns a Iterator but it's not used, but still all should be called,
|
||||
Useful if a method returns an Iterator that is not used, but still all should be called,
|
||||
for example if each iter element calls a function that should be called for all or
|
||||
given amount of elements in Iterator
|
||||
|
||||
examples:
|
||||
consume(filter_iter(...)) # consumes all elements of given function that returns a Iterator
|
||||
consume(filter_iter(...), 3) # consumes next 3 elements of given function that returns a Iterator
|
||||
consume(filter_iter(...)) # consumes all elements of given function that returns an Iterator
|
||||
consume(filter_iter(...), 3) # consumes next 3 elements of given function that returns an Iterator
|
||||
"""
|
||||
# Use functions that consume iterators at C speed.
|
||||
if n is None:
|
||||
|
@ -131,24 +142,6 @@ def b64encodestring(s, keep_eol=False):
|
|||
return data.rstrip()
|
||||
|
||||
|
||||
# noinspection PyUnresolvedReferences,PyProtectedMember
|
||||
# noinspection PyUnresolvedReferences,PyCompatibility
|
||||
from configparser import ConfigParser
|
||||
# noinspection PyUnresolvedReferences
|
||||
from enum import Enum
|
||||
# noinspection PyUnresolvedReferences
|
||||
from os import scandir, DirEntry
|
||||
# noinspection PyUnresolvedReferences
|
||||
from itertools import zip_longest
|
||||
# noinspection PyUnresolvedReferences
|
||||
from inspect import getfullargspec as getargspec
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
from subprocess import Popen
|
||||
|
||||
# noinspection PyUnresolvedReferences, PyPep8Naming
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
native_timestamp = datetime.datetime.timestamp # type: Callable[[datetime.datetime], float]
|
||||
|
||||
|
||||
|
@ -172,4 +165,3 @@ def decode_bytes(d, encoding='utf-8', errors='replace'):
|
|||
def map_none(*args):
|
||||
# type: (...) -> List
|
||||
return list(zip_longest(*args))
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ import threading
|
|||
from datetime import timedelta
|
||||
from time import sleep, time
|
||||
|
||||
from _23 import ConfigParser
|
||||
from configparser import ConfigParser
|
||||
|
||||
from .aniDBlink import AniDBLink
|
||||
from .aniDBcommands import *
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,274 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import abc
|
||||
import os
|
||||
|
||||
try:
|
||||
from collections.abc import MutableMapping
|
||||
except ImportError:
|
||||
from collections import MutableMapping
|
||||
|
||||
try:
|
||||
from collections import UserDict
|
||||
except ImportError:
|
||||
from UserDict import UserDict
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from ordereddict import OrderedDict
|
||||
|
||||
try:
|
||||
import pathlib
|
||||
except ImportError:
|
||||
pathlib = None
|
||||
|
||||
from io import open
|
||||
import sys
|
||||
|
||||
try:
|
||||
from thread import get_ident
|
||||
except ImportError:
|
||||
try:
|
||||
from _thread import get_ident
|
||||
except ImportError:
|
||||
from _dummy_thread import get_ident
|
||||
|
||||
|
||||
__all__ = ['UserDict', 'OrderedDict', 'open']
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
native_str = str
|
||||
str = type('str')
|
||||
|
||||
|
||||
def from_none(exc):
|
||||
"""raise from_none(ValueError('a')) == raise ValueError('a') from None"""
|
||||
exc.__cause__ = None
|
||||
exc.__suppress_context__ = True
|
||||
return exc
|
||||
|
||||
|
||||
# from reprlib 3.2.1
|
||||
def recursive_repr(fillvalue='...'):
|
||||
'Decorator to make a repr function return fillvalue for a recursive call'
|
||||
|
||||
def decorating_function(user_function):
|
||||
repr_running = set()
|
||||
|
||||
def wrapper(self):
|
||||
key = id(self), get_ident()
|
||||
if key in repr_running:
|
||||
return fillvalue
|
||||
repr_running.add(key)
|
||||
try:
|
||||
result = user_function(self)
|
||||
finally:
|
||||
repr_running.discard(key)
|
||||
return result
|
||||
|
||||
# Can't use functools.wraps() here because of bootstrap issues
|
||||
wrapper.__module__ = getattr(user_function, '__module__')
|
||||
wrapper.__doc__ = getattr(user_function, '__doc__')
|
||||
wrapper.__name__ = getattr(user_function, '__name__')
|
||||
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
|
||||
return wrapper
|
||||
|
||||
return decorating_function
|
||||
|
||||
|
||||
# from collections 3.2.1
|
||||
class _ChainMap(MutableMapping):
|
||||
''' A ChainMap groups multiple dicts (or other mappings) together
|
||||
to create a single, updateable view.
|
||||
|
||||
The underlying mappings are stored in a list. That list is public and can
|
||||
accessed or updated using the *maps* attribute. There is no other state.
|
||||
|
||||
Lookups search the underlying mappings successively until a key is found.
|
||||
In contrast, writes, updates, and deletions only operate on the first
|
||||
mapping.
|
||||
|
||||
'''
|
||||
|
||||
def __init__(self, *maps):
|
||||
'''Initialize a ChainMap by setting *maps* to the given mappings.
|
||||
If no mappings are provided, a single empty dictionary is used.
|
||||
|
||||
'''
|
||||
self.maps = list(maps) or [{}] # always at least one map
|
||||
|
||||
def __missing__(self, key):
|
||||
raise KeyError(key)
|
||||
|
||||
def __getitem__(self, key):
|
||||
for mapping in self.maps:
|
||||
try:
|
||||
# can't use 'key in mapping' with defaultdict
|
||||
return mapping[key]
|
||||
except KeyError:
|
||||
pass
|
||||
# support subclasses that define __missing__
|
||||
return self.__missing__(key)
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self[key] if key in self else default
|
||||
|
||||
def __len__(self):
|
||||
# reuses stored hash values if possible
|
||||
return len(set().union(*self.maps))
|
||||
|
||||
def __iter__(self):
|
||||
return iter(set().union(*self.maps))
|
||||
|
||||
def __contains__(self, key):
|
||||
return any(key in m for m in self.maps)
|
||||
|
||||
@recursive_repr()
|
||||
def __repr__(self):
|
||||
return '{0.__class__.__name__}({1})'.format(
|
||||
self, ', '.join(map(repr, self.maps))
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, *args):
|
||||
'Create a ChainMap with a single dict created from the iterable.'
|
||||
return cls(dict.fromkeys(iterable, *args))
|
||||
|
||||
def copy(self):
|
||||
"""
|
||||
New ChainMap or subclass with a new copy of
|
||||
maps[0] and refs to maps[1:]
|
||||
"""
|
||||
return self.__class__(self.maps[0].copy(), *self.maps[1:])
|
||||
|
||||
__copy__ = copy
|
||||
|
||||
def new_child(self): # like Django's Context.push()
|
||||
'New ChainMap with a new dict followed by all previous maps.'
|
||||
return self.__class__({}, *self.maps)
|
||||
|
||||
@property
|
||||
def parents(self): # like Django's Context.pop()
|
||||
'New ChainMap from maps[1:].'
|
||||
return self.__class__(*self.maps[1:])
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.maps[0][key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
try:
|
||||
del self.maps[0][key]
|
||||
except KeyError:
|
||||
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
|
||||
|
||||
def popitem(self):
|
||||
"""
|
||||
Remove and return an item pair from maps[0].
|
||||
Raise KeyError is maps[0] is empty.
|
||||
"""
|
||||
try:
|
||||
return self.maps[0].popitem()
|
||||
except KeyError:
|
||||
raise KeyError('No keys found in the first mapping.')
|
||||
|
||||
def pop(self, key, *args):
|
||||
"""
|
||||
Remove *key* from maps[0] and return its value.
|
||||
Raise KeyError if *key* not in maps[0].
|
||||
"""
|
||||
|
||||
try:
|
||||
return self.maps[0].pop(key, *args)
|
||||
except KeyError:
|
||||
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
|
||||
|
||||
def clear(self):
|
||||
'Clear maps[0], leaving maps[1:] intact.'
|
||||
self.maps[0].clear()
|
||||
|
||||
|
||||
try:
|
||||
from collections import ChainMap
|
||||
except ImportError:
|
||||
ChainMap = _ChainMap
|
||||
|
||||
|
||||
_ABC = getattr(
|
||||
abc,
|
||||
'ABC',
|
||||
# Python 3.3 compatibility
|
||||
abc.ABCMeta(native_str('__ABC'), (object,), dict(__metaclass__=abc.ABCMeta)),
|
||||
)
|
||||
|
||||
|
||||
class _PathLike(_ABC):
|
||||
|
||||
"""Abstract base class for implementing the file system path protocol."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __fspath__(self):
|
||||
"""Return the file system path representation of the object."""
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def __subclasshook__(cls, subclass):
|
||||
return bool(
|
||||
hasattr(subclass, '__fspath__')
|
||||
# workaround for Python 3.5
|
||||
or pathlib
|
||||
and issubclass(subclass, pathlib.Path)
|
||||
)
|
||||
|
||||
|
||||
PathLike = getattr(os, 'PathLike', _PathLike)
|
||||
|
||||
|
||||
def _fspath(path):
|
||||
"""Return the path representation of a path-like object.
|
||||
|
||||
If str or bytes is passed in, it is returned unchanged. Otherwise the
|
||||
os.PathLike interface is used to get the path representation. If the
|
||||
path representation is not str or bytes, TypeError is raised. If the
|
||||
provided path is not str, bytes, or os.PathLike, TypeError is raised.
|
||||
"""
|
||||
if isinstance(path, (str, bytes)):
|
||||
return path
|
||||
|
||||
if not hasattr(path, '__fspath__') and isinstance(path, pathlib.Path):
|
||||
# workaround for Python 3.5
|
||||
return str(path)
|
||||
|
||||
# Work from the object's type to match method resolution of other magic
|
||||
# methods.
|
||||
path_type = type(path)
|
||||
try:
|
||||
path_repr = path_type.__fspath__(path)
|
||||
except AttributeError:
|
||||
|
||||
if hasattr(path_type, '__fspath__'):
|
||||
raise
|
||||
else:
|
||||
raise TypeError(
|
||||
"expected str, bytes or os.PathLike object, "
|
||||
"not " + path_type.__name__
|
||||
)
|
||||
if isinstance(path_repr, (str, bytes)):
|
||||
return path_repr
|
||||
else:
|
||||
raise TypeError(
|
||||
"expected {}.__fspath__() to return str or bytes, "
|
||||
"not {}".format(path_type.__name__, type(path_repr).__name__)
|
||||
)
|
||||
|
||||
|
||||
fspath = getattr(os, 'fspath', _fspath)
|
|
@ -1,196 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
from collections import namedtuple
|
||||
from threading import RLock
|
||||
|
||||
_CacheInfo = namedtuple("_CacheInfo", ["hits", "misses", "maxsize", "currsize"])
|
||||
|
||||
|
||||
@functools.wraps(functools.update_wrapper)
|
||||
def update_wrapper(
|
||||
wrapper,
|
||||
wrapped,
|
||||
assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES,
|
||||
):
|
||||
"""
|
||||
Patch two bugs in functools.update_wrapper.
|
||||
"""
|
||||
# workaround for http://bugs.python.org/issue3445
|
||||
assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr))
|
||||
wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated)
|
||||
# workaround for https://bugs.python.org/issue17482
|
||||
wrapper.__wrapped__ = wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
class _HashedSeq(list):
|
||||
__slots__ = 'hashvalue'
|
||||
|
||||
def __init__(self, tup, hash=hash):
|
||||
self[:] = tup
|
||||
self.hashvalue = hash(tup)
|
||||
|
||||
def __hash__(self):
|
||||
return self.hashvalue
|
||||
|
||||
|
||||
def _make_key(
|
||||
args,
|
||||
kwds,
|
||||
typed,
|
||||
kwd_mark=(object(),),
|
||||
fasttypes=set([int, str, frozenset, type(None)]),
|
||||
sorted=sorted,
|
||||
tuple=tuple,
|
||||
type=type,
|
||||
len=len,
|
||||
):
|
||||
'Make a cache key from optionally typed positional and keyword arguments'
|
||||
key = args
|
||||
if kwds:
|
||||
sorted_items = sorted(kwds.items())
|
||||
key += kwd_mark
|
||||
for item in sorted_items:
|
||||
key += item
|
||||
if typed:
|
||||
key += tuple(type(v) for v in args)
|
||||
if kwds:
|
||||
key += tuple(type(v) for k, v in sorted_items)
|
||||
elif len(key) == 1 and type(key[0]) in fasttypes:
|
||||
return key[0]
|
||||
return _HashedSeq(key)
|
||||
|
||||
|
||||
def lru_cache(maxsize=100, typed=False): # noqa: C901
|
||||
"""Least-recently-used cache decorator.
|
||||
|
||||
If *maxsize* is set to None, the LRU features are disabled and the cache
|
||||
can grow without bound.
|
||||
|
||||
If *typed* is True, arguments of different types will be cached separately.
|
||||
For example, f(3.0) and f(3) will be treated as distinct calls with
|
||||
distinct results.
|
||||
|
||||
Arguments to the cached function must be hashable.
|
||||
|
||||
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
|
||||
f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
||||
Access the underlying function with f.__wrapped__.
|
||||
|
||||
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
|
||||
|
||||
"""
|
||||
|
||||
# Users should only access the lru_cache through its public API:
|
||||
# cache_info, cache_clear, and f.__wrapped__
|
||||
# The internals of the lru_cache are encapsulated for thread safety and
|
||||
# to allow the implementation to change (including a possible C version).
|
||||
|
||||
def decorating_function(user_function):
|
||||
|
||||
cache = dict()
|
||||
stats = [0, 0] # make statistics updateable non-locally
|
||||
HITS, MISSES = 0, 1 # names for the stats fields
|
||||
make_key = _make_key
|
||||
cache_get = cache.get # bound method to lookup key or return None
|
||||
_len = len # localize the global len() function
|
||||
lock = RLock() # because linkedlist updates aren't threadsafe
|
||||
root = [] # root of the circular doubly linked list
|
||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
||||
nonlocal_root = [root] # make updateable non-locally
|
||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
||||
|
||||
if maxsize == 0:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# no caching, just do a statistics update after a successful call
|
||||
result = user_function(*args, **kwds)
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
elif maxsize is None:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# simple caching without ordering or size limit
|
||||
key = make_key(args, kwds, typed)
|
||||
result = cache_get(
|
||||
key, root
|
||||
) # root used here as a unique not-found sentinel
|
||||
if result is not root:
|
||||
stats[HITS] += 1
|
||||
return result
|
||||
result = user_function(*args, **kwds)
|
||||
cache[key] = result
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
else:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# size limited caching that tracks accesses by recency
|
||||
key = make_key(args, kwds, typed) if kwds or typed else args
|
||||
with lock:
|
||||
link = cache_get(key)
|
||||
if link is not None:
|
||||
# record recent use of the key by moving it
|
||||
# to the front of the list
|
||||
(root,) = nonlocal_root
|
||||
link_prev, link_next, key, result = link
|
||||
link_prev[NEXT] = link_next
|
||||
link_next[PREV] = link_prev
|
||||
last = root[PREV]
|
||||
last[NEXT] = root[PREV] = link
|
||||
link[PREV] = last
|
||||
link[NEXT] = root
|
||||
stats[HITS] += 1
|
||||
return result
|
||||
result = user_function(*args, **kwds)
|
||||
with lock:
|
||||
(root,) = nonlocal_root
|
||||
if key in cache:
|
||||
# getting here means that this same key was added to the
|
||||
# cache while the lock was released. since the link
|
||||
# update is already done, we need only return the
|
||||
# computed result and update the count of misses.
|
||||
pass
|
||||
elif _len(cache) >= maxsize:
|
||||
# use the old root to store the new key and result
|
||||
oldroot = root
|
||||
oldroot[KEY] = key
|
||||
oldroot[RESULT] = result
|
||||
# empty the oldest link and make it the new root
|
||||
root = nonlocal_root[0] = oldroot[NEXT]
|
||||
oldkey = root[KEY]
|
||||
root[KEY] = root[RESULT] = None
|
||||
# now update the cache dictionary for the new links
|
||||
del cache[oldkey]
|
||||
cache[key] = oldroot
|
||||
else:
|
||||
# put result in a new link at the front of the list
|
||||
last = root[PREV]
|
||||
link = [last, root, key, result]
|
||||
last[NEXT] = root[PREV] = cache[key] = link
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
with lock:
|
||||
cache.clear()
|
||||
root = nonlocal_root[0]
|
||||
root[:] = [root, root, None, None]
|
||||
stats[:] = [0, 0]
|
||||
|
||||
wrapper.__wrapped__ = user_function
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
return update_wrapper(wrapper, user_function)
|
||||
|
||||
return decorating_function
|
|
@ -1,204 +0,0 @@
|
|||
"""The match_hostname() function from Python 3.7.0, essential when using SSL."""
|
||||
|
||||
import sys
|
||||
import socket as _socket
|
||||
|
||||
try:
|
||||
# Divergence: Python-3.7+'s _ssl has this exception type but older Pythons do not
|
||||
from _ssl import SSLCertVerificationError
|
||||
CertificateError = SSLCertVerificationError
|
||||
except:
|
||||
class CertificateError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
__version__ = '3.7.0.1'
|
||||
|
||||
|
||||
# Divergence: Added to deal with ipaddess as bytes on python2
|
||||
def _to_text(obj):
|
||||
if isinstance(obj, str) and sys.version_info < (3,):
|
||||
obj = unicode(obj, encoding='ascii', errors='strict')
|
||||
elif sys.version_info >= (3,) and isinstance(obj, bytes):
|
||||
obj = str(obj, encoding='ascii', errors='strict')
|
||||
return obj
|
||||
|
||||
|
||||
def _to_bytes(obj):
|
||||
if isinstance(obj, str) and sys.version_info >= (3,):
|
||||
obj = bytes(obj, encoding='ascii', errors='strict')
|
||||
elif sys.version_info < (3,) and isinstance(obj, unicode):
|
||||
obj = obj.encode('ascii', 'strict')
|
||||
return obj
|
||||
|
||||
|
||||
def _dnsname_match(dn, hostname):
|
||||
"""Matching according to RFC 6125, section 6.4.3
|
||||
|
||||
- Hostnames are compared lower case.
|
||||
- For IDNA, both dn and hostname must be encoded as IDN A-label (ACE).
|
||||
- Partial wildcards like 'www*.example.org', multiple wildcards, sole
|
||||
wildcard or wildcards in labels other then the left-most label are not
|
||||
supported and a CertificateError is raised.
|
||||
- A wildcard must match at least one character.
|
||||
"""
|
||||
if not dn:
|
||||
return False
|
||||
|
||||
wildcards = dn.count('*')
|
||||
# speed up common case w/o wildcards
|
||||
if not wildcards:
|
||||
return dn.lower() == hostname.lower()
|
||||
|
||||
if wildcards > 1:
|
||||
# Divergence .format() to percent formatting for Python < 2.6
|
||||
raise CertificateError(
|
||||
"too many wildcards in certificate DNS name: %s" % repr(dn))
|
||||
|
||||
dn_leftmost, sep, dn_remainder = dn.partition('.')
|
||||
|
||||
if '*' in dn_remainder:
|
||||
# Only match wildcard in leftmost segment.
|
||||
# Divergence .format() to percent formatting for Python < 2.6
|
||||
raise CertificateError(
|
||||
"wildcard can only be present in the leftmost label: "
|
||||
"%s." % repr(dn))
|
||||
|
||||
if not sep:
|
||||
# no right side
|
||||
# Divergence .format() to percent formatting for Python < 2.6
|
||||
raise CertificateError(
|
||||
"sole wildcard without additional labels are not support: "
|
||||
"%s." % repr(dn))
|
||||
|
||||
if dn_leftmost != '*':
|
||||
# no partial wildcard matching
|
||||
# Divergence .format() to percent formatting for Python < 2.6
|
||||
raise CertificateError(
|
||||
"partial wildcards in leftmost label are not supported: "
|
||||
"%s." % repr(dn))
|
||||
|
||||
hostname_leftmost, sep, hostname_remainder = hostname.partition('.')
|
||||
if not hostname_leftmost or not sep:
|
||||
# wildcard must match at least one char
|
||||
return False
|
||||
return dn_remainder.lower() == hostname_remainder.lower()
|
||||
|
||||
|
||||
def _inet_paton(ipname):
|
||||
"""Try to convert an IP address to packed binary form
|
||||
|
||||
Supports IPv4 addresses on all platforms and IPv6 on platforms with IPv6
|
||||
support.
|
||||
"""
|
||||
# inet_aton() also accepts strings like '1'
|
||||
# Divergence: We make sure we have native string type for all python versions
|
||||
try:
|
||||
b_ipname = _to_bytes(ipname)
|
||||
except UnicodeError:
|
||||
raise ValueError("%s must be an all-ascii string." % repr(ipname))
|
||||
|
||||
# Set ipname in native string format
|
||||
if sys.version_info < (3,):
|
||||
n_ipname = b_ipname
|
||||
else:
|
||||
n_ipname = ipname
|
||||
|
||||
if n_ipname.count('.') == 3:
|
||||
try:
|
||||
return _socket.inet_aton(n_ipname)
|
||||
# Divergence: OSError on late python3. socket.error earlier.
|
||||
# Null bytes generate ValueError on python3(we want to raise
|
||||
# ValueError anyway), TypeError # earlier
|
||||
except (OSError, _socket.error, TypeError):
|
||||
pass
|
||||
|
||||
try:
|
||||
return _socket.inet_pton(_socket.AF_INET6, n_ipname)
|
||||
# Divergence: OSError on late python3. socket.error earlier.
|
||||
# Null bytes generate ValueError on python3(we want to raise
|
||||
# ValueError anyway), TypeError # earlier
|
||||
except (OSError, _socket.error, TypeError):
|
||||
# Divergence .format() to percent formatting for Python < 2.6
|
||||
raise ValueError("%s is neither an IPv4 nor an IP6 "
|
||||
"address." % repr(ipname))
|
||||
except AttributeError:
|
||||
# AF_INET6 not available
|
||||
pass
|
||||
|
||||
# Divergence .format() to percent formatting for Python < 2.6
|
||||
raise ValueError("%s is not an IPv4 address." % repr(ipname))
|
||||
|
||||
|
||||
def _ipaddress_match(ipname, host_ip):
|
||||
"""Exact matching of IP addresses.
|
||||
|
||||
RFC 6125 explicitly doesn't define an algorithm for this
|
||||
(section 1.7.2 - "Out of Scope").
|
||||
"""
|
||||
# OpenSSL may add a trailing newline to a subjectAltName's IP address
|
||||
ip = _inet_paton(ipname.rstrip())
|
||||
return ip == host_ip
|
||||
|
||||
|
||||
def match_hostname(cert, hostname):
|
||||
"""Verify that *cert* (in decoded format as returned by
|
||||
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
|
||||
rules are followed.
|
||||
|
||||
The function matches IP addresses rather than dNSNames if hostname is a
|
||||
valid ipaddress string. IPv4 addresses are supported on all platforms.
|
||||
IPv6 addresses are supported on platforms with IPv6 support (AF_INET6
|
||||
and inet_pton).
|
||||
|
||||
CertificateError is raised on failure. On success, the function
|
||||
returns nothing.
|
||||
"""
|
||||
if not cert:
|
||||
raise ValueError("empty or no certificate, match_hostname needs a "
|
||||
"SSL socket or SSL context with either "
|
||||
"CERT_OPTIONAL or CERT_REQUIRED")
|
||||
try:
|
||||
# Divergence: Deal with hostname as bytes
|
||||
host_ip = _inet_paton(_to_text(hostname))
|
||||
except ValueError:
|
||||
# Not an IP address (common case)
|
||||
host_ip = None
|
||||
except UnicodeError:
|
||||
# Divergence: Deal with hostname as byte strings.
|
||||
# IP addresses should be all ascii, so we consider it not
|
||||
# an IP address if this fails
|
||||
host_ip = None
|
||||
dnsnames = []
|
||||
san = cert.get('subjectAltName', ())
|
||||
for key, value in san:
|
||||
if key == 'DNS':
|
||||
if host_ip is None and _dnsname_match(value, hostname):
|
||||
return
|
||||
dnsnames.append(value)
|
||||
elif key == 'IP Address':
|
||||
if host_ip is not None and _ipaddress_match(value, host_ip):
|
||||
return
|
||||
dnsnames.append(value)
|
||||
if not dnsnames:
|
||||
# The subject is only checked when there is no dNSName entry
|
||||
# in subjectAltName
|
||||
for sub in cert.get('subject', ()):
|
||||
for key, value in sub:
|
||||
# XXX according to RFC 2818, the most specific Common Name
|
||||
# must be used.
|
||||
if key == 'commonName':
|
||||
if _dnsname_match(value, hostname):
|
||||
return
|
||||
dnsnames.append(value)
|
||||
if len(dnsnames) > 1:
|
||||
raise CertificateError("hostname %r "
|
||||
"doesn't match either of %s"
|
||||
% (hostname, ', '.join(map(repr, dnsnames))))
|
||||
elif len(dnsnames) == 1:
|
||||
raise CertificateError("hostname %r "
|
||||
"doesn't match %r"
|
||||
% (hostname, dnsnames[0]))
|
||||
else:
|
||||
raise CertificateError("no appropriate commonName or "
|
||||
"subjectAltName fields were found")
|
|
@ -1,216 +0,0 @@
|
|||
"""
|
||||
Patch recently added ABCs into the standard lib module
|
||||
``collections.abc`` (Py3) or ``collections`` (Py2).
|
||||
|
||||
Usage::
|
||||
|
||||
import backports_abc
|
||||
backports_abc.patch()
|
||||
|
||||
or::
|
||||
|
||||
try:
|
||||
from collections.abc import Generator
|
||||
except ImportError:
|
||||
from backports_abc import Generator
|
||||
"""
|
||||
|
||||
try:
|
||||
import collections.abc as _collections_abc
|
||||
except ImportError:
|
||||
import collections as _collections_abc
|
||||
|
||||
|
||||
def get_mro(cls):
|
||||
try:
|
||||
return cls.__mro__
|
||||
except AttributeError:
|
||||
return old_style_mro(cls)
|
||||
|
||||
|
||||
def old_style_mro(cls):
|
||||
yield cls
|
||||
for base in cls.__bases__:
|
||||
for c in old_style_mro(base):
|
||||
yield c
|
||||
|
||||
|
||||
def mk_gen():
|
||||
from abc import abstractmethod
|
||||
|
||||
required_methods = (
|
||||
'__iter__', '__next__' if hasattr(iter(()), '__next__') else 'next',
|
||||
'send', 'throw', 'close')
|
||||
|
||||
class Generator(_collections_abc.Iterator):
|
||||
__slots__ = ()
|
||||
|
||||
if '__next__' in required_methods:
|
||||
def __next__(self):
|
||||
return self.send(None)
|
||||
else:
|
||||
def next(self):
|
||||
return self.send(None)
|
||||
|
||||
@abstractmethod
|
||||
def send(self, value):
|
||||
raise StopIteration
|
||||
|
||||
@abstractmethod
|
||||
def throw(self, typ, val=None, tb=None):
|
||||
if val is None:
|
||||
if tb is None:
|
||||
raise typ
|
||||
val = typ()
|
||||
if tb is not None:
|
||||
val = val.with_traceback(tb)
|
||||
raise val
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
self.throw(GeneratorExit)
|
||||
except (GeneratorExit, StopIteration):
|
||||
pass
|
||||
else:
|
||||
raise RuntimeError('generator ignored GeneratorExit')
|
||||
|
||||
@classmethod
|
||||
def __subclasshook__(cls, C):
|
||||
if cls is Generator:
|
||||
mro = get_mro(C)
|
||||
for method in required_methods:
|
||||
for base in mro:
|
||||
if method in base.__dict__:
|
||||
break
|
||||
else:
|
||||
return NotImplemented
|
||||
return True
|
||||
return NotImplemented
|
||||
|
||||
generator = type((lambda: (yield))())
|
||||
Generator.register(generator)
|
||||
return Generator
|
||||
|
||||
|
||||
def mk_awaitable():
|
||||
from abc import abstractmethod, ABCMeta
|
||||
|
||||
@abstractmethod
|
||||
def __await__(self):
|
||||
yield
|
||||
|
||||
@classmethod
|
||||
def __subclasshook__(cls, C):
|
||||
if cls is Awaitable:
|
||||
for B in get_mro(C):
|
||||
if '__await__' in B.__dict__:
|
||||
if B.__dict__['__await__']:
|
||||
return True
|
||||
break
|
||||
return NotImplemented
|
||||
|
||||
# calling metaclass directly as syntax differs in Py2/Py3
|
||||
Awaitable = ABCMeta('Awaitable', (), {
|
||||
'__slots__': (),
|
||||
'__await__': __await__,
|
||||
'__subclasshook__': __subclasshook__,
|
||||
})
|
||||
|
||||
return Awaitable
|
||||
|
||||
|
||||
def mk_coroutine():
|
||||
from abc import abstractmethod
|
||||
|
||||
class Coroutine(Awaitable):
|
||||
__slots__ = ()
|
||||
|
||||
@abstractmethod
|
||||
def send(self, value):
|
||||
"""Send a value into the coroutine.
|
||||
Return next yielded value or raise StopIteration.
|
||||
"""
|
||||
raise StopIteration
|
||||
|
||||
@abstractmethod
|
||||
def throw(self, typ, val=None, tb=None):
|
||||
"""Raise an exception in the coroutine.
|
||||
Return next yielded value or raise StopIteration.
|
||||
"""
|
||||
if val is None:
|
||||
if tb is None:
|
||||
raise typ
|
||||
val = typ()
|
||||
if tb is not None:
|
||||
val = val.with_traceback(tb)
|
||||
raise val
|
||||
|
||||
def close(self):
|
||||
"""Raise GeneratorExit inside coroutine.
|
||||
"""
|
||||
try:
|
||||
self.throw(GeneratorExit)
|
||||
except (GeneratorExit, StopIteration):
|
||||
pass
|
||||
else:
|
||||
raise RuntimeError('coroutine ignored GeneratorExit')
|
||||
|
||||
@classmethod
|
||||
def __subclasshook__(cls, C):
|
||||
if cls is Coroutine:
|
||||
mro = get_mro(C)
|
||||
for method in ('__await__', 'send', 'throw', 'close'):
|
||||
for base in mro:
|
||||
if method in base.__dict__:
|
||||
break
|
||||
else:
|
||||
return NotImplemented
|
||||
return True
|
||||
return NotImplemented
|
||||
|
||||
return Coroutine
|
||||
|
||||
|
||||
###
|
||||
# make all ABCs available in this module
|
||||
|
||||
try:
|
||||
Generator = _collections_abc.Generator
|
||||
except AttributeError:
|
||||
Generator = mk_gen()
|
||||
|
||||
try:
|
||||
Awaitable = _collections_abc.Awaitable
|
||||
except AttributeError:
|
||||
Awaitable = mk_awaitable()
|
||||
|
||||
try:
|
||||
Coroutine = _collections_abc.Coroutine
|
||||
except AttributeError:
|
||||
Coroutine = mk_coroutine()
|
||||
|
||||
try:
|
||||
from inspect import isawaitable
|
||||
except ImportError:
|
||||
def isawaitable(obj):
|
||||
return isinstance(obj, Awaitable)
|
||||
|
||||
|
||||
###
|
||||
# allow patching the stdlib
|
||||
|
||||
PATCHED = {}
|
||||
|
||||
|
||||
def patch(patch_inspect=True):
|
||||
"""
|
||||
Main entry point for patching the ``collections.abc`` and ``inspect``
|
||||
standard library modules.
|
||||
"""
|
||||
PATCHED['collections.abc.Generator'] = _collections_abc.Generator = Generator
|
||||
PATCHED['collections.abc.Coroutine'] = _collections_abc.Coroutine = Coroutine
|
||||
PATCHED['collections.abc.Awaitable'] = _collections_abc.Awaitable = Awaitable
|
||||
|
||||
if patch_inspect:
|
||||
import inspect
|
||||
PATCHED['inspect.isawaitable'] = inspect.isawaitable = isawaitable
|
|
@ -16,20 +16,25 @@
|
|||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import random
|
||||
from six import moves
|
||||
|
||||
# Browser apps represented in data
|
||||
# noinspection PyUnresolvedReferences
|
||||
__all__ = ['chrome', 'opera', 'firefox', 'safari', 'ie']
|
||||
|
||||
|
||||
# noinspection PyUnreachableCode
|
||||
if False:
|
||||
from typing import AnyStr
|
||||
|
||||
|
||||
def get_ua():
|
||||
# type: (...) -> AnyStr
|
||||
"""
|
||||
Return a random browser user agent string
|
||||
:return: A browser user agent string
|
||||
:rtype: String
|
||||
:return: A browser user agent
|
||||
"""
|
||||
ua = []
|
||||
for x in moves.xrange(1, 10):
|
||||
for x in range(1, 10):
|
||||
ua += [random.choice(browser_ua.get(random.choice(__all__)))]
|
||||
return random.choice(ua)
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import re
|
||||
from bs4 import BeautifulSoup, SoupStrainer
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import SoupStrainer
|
||||
from six import iteritems
|
||||
|
||||
|
||||
|
|
|
@ -14,8 +14,6 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from six import PY2, string_types
|
||||
|
||||
# noinspection PyUnreachableCode
|
||||
if False:
|
||||
from typing import AnyStr
|
||||
|
@ -28,6 +26,7 @@ def ex(e):
|
|||
return str(e)
|
||||
|
||||
|
||||
# noinspection DuplicatedCode
|
||||
class SickGearException(Exception):
|
||||
"""Generic SickGear Exception - should never be thrown, only subclassed"""
|
||||
|
||||
|
|
2625
lib/pkg_resources.py
2625
lib/pkg_resources.py
File diff suppressed because it is too large
Load diff
|
@ -683,8 +683,9 @@ def get_system_temp_dir():
|
|||
|
||||
def proxy_setting(setting, request_url, force=False):
|
||||
"""
|
||||
Returns a list of a) proxy_setting address value or a PAC is fetched and parsed if proxy_setting
|
||||
starts with "PAC:" (case-insensitive) and b) True/False if "PAC" is found in the proxy_setting.
|
||||
Returns a list of
|
||||
a) proxy_setting address value or a PAC is fetched and parsed if proxy_setting starts with "PAC:" (case-insensitive)
|
||||
b) True/False if "PAC" is found in the proxy_setting.
|
||||
|
||||
The PAC data parser is crude, javascript is not eval'd. The first "PROXY URL" found is extracted with a list
|
||||
of "url_a_part.url_remaining", "url_b_part.url_remaining", "url_n_part.url_remaining" and so on.
|
||||
|
@ -720,7 +721,7 @@ def proxy_setting(setting, request_url, force=False):
|
|||
request_url_match = False
|
||||
parsed_url = urlparse(request_url)
|
||||
netloc = parsed_url.netloc
|
||||
for pac_data in re.finditer(r"""(?:[^'"]*['"])([^.]+\.[^'"]*)(?:['"])""", resp, re.I):
|
||||
for pac_data in re.finditer(r"""[^'"]*['"]([^.]+\.[^'"]*)['"]""", resp, re.I):
|
||||
data = re.search(r"""PROXY\s+([^'"]+)""", pac_data.group(1), re.I)
|
||||
if data:
|
||||
if force:
|
||||
|
@ -1570,8 +1571,6 @@ def int_to_time(d_int):
|
|||
"""
|
||||
convert integer from dt_to_int back to datetime.time
|
||||
|
||||
:param d_int: integer
|
||||
:return: datetime.time
|
||||
"""
|
||||
if None is d_int:
|
||||
return None
|
||||
|
@ -1610,7 +1609,7 @@ def ast_eval(value, default=None):
|
|||
"""Convert string typed value into actual Python type and value
|
||||
|
||||
:param value: string value to convert
|
||||
:param default: value to return if cannot convert
|
||||
:param default: value to return if it cannot convert
|
||||
:return: converted type and value or default
|
||||
"""
|
||||
if not isinstance(value, string_types):
|
||||
|
@ -1667,8 +1666,8 @@ def calc_age(birthday, deathday=None, date=None):
|
|||
# type: (datetime.date, datetime.date, Optional[datetime.date]) -> Optional[int]
|
||||
"""
|
||||
returns age based on current date or given date
|
||||
:param birthday: birth date
|
||||
:param deathday: death date
|
||||
:param birthday: birthdate
|
||||
:param deathday: deathdate
|
||||
:param date:
|
||||
"""
|
||||
if isinstance(birthday, datetime.date):
|
||||
|
@ -1677,7 +1676,7 @@ def calc_age(birthday, deathday=None, date=None):
|
|||
try:
|
||||
b_d = birthday.replace(year=today.year)
|
||||
|
||||
# raised when birth date is February 29
|
||||
# raised when birthdate is February 29
|
||||
# and the current year is not a leap year
|
||||
except ValueError:
|
||||
b_d = birthday.replace(year=today.year, month=birthday.month + 1, day=1)
|
||||
|
|
|
@ -47,7 +47,7 @@ class Itasa(ServiceBase):
|
|||
quality_dict = {Quality.SDTV : '',
|
||||
Quality.SDDVD : 'dvdrip',
|
||||
Quality.RAWHDTV : '1080i',
|
||||
Quality.HDTV : '720p',
|
||||
Quality.HDTV : '720p',
|
||||
Quality.FULLHDTV : ('1080p','720p'),
|
||||
Quality.HDWEBDL : 'web-dl',
|
||||
Quality.FULLHDWEBDL : 'web-dl',
|
||||
|
@ -55,20 +55,20 @@ class Itasa(ServiceBase):
|
|||
Quality.FULLHDBLURAY : ('bdrip', 'bluray'),
|
||||
Quality.UNKNOWN : 'unknown' #Any subtitle will be downloaded
|
||||
}
|
||||
|
||||
|
||||
def init(self):
|
||||
|
||||
|
||||
super(Itasa, self).init()
|
||||
login_pattern = '<input type="hidden" name="return" value="([^\n\r\t ]+?)" /><input type="hidden" name="([^\n\r\t ]+?)" value="([^\n\r\t ]+?)" />'
|
||||
|
||||
response = requests.get(self.server_url + 'index.php')
|
||||
if response.status_code != 200:
|
||||
raise ServiceError('Initiate failed')
|
||||
|
||||
|
||||
match = re.search(login_pattern, response.content, re.IGNORECASE | re.DOTALL)
|
||||
if not match:
|
||||
raise ServiceError('Can not find unique id parameter on page')
|
||||
|
||||
|
||||
login_parameter = {'username': 'sickbeard',
|
||||
'passwd': 'subliminal',
|
||||
'remember': 'yes',
|
||||
|
@ -77,7 +77,7 @@ class Itasa(ServiceBase):
|
|||
'option': 'com_user',
|
||||
'task': 'login',
|
||||
'silent': 'true',
|
||||
'return': match.group(1),
|
||||
'return': match.group(1),
|
||||
match.group(2): match.group(3)
|
||||
}
|
||||
|
||||
|
@ -85,7 +85,7 @@ class Itasa(ServiceBase):
|
|||
r = self.session.post(self.server_url + 'index.php', data=login_parameter)
|
||||
if not re.search('logouticon.png', r.content, re.IGNORECASE | re.DOTALL):
|
||||
raise ServiceError('Itasa Login Failed')
|
||||
|
||||
|
||||
@cachedmethod
|
||||
def get_series_id(self, name):
|
||||
"""Get the show page and cache every show found in it"""
|
||||
|
@ -100,7 +100,7 @@ class Itasa(ServiceBase):
|
|||
series_id = int(match.group(1))
|
||||
self.cache_for(self.get_series_id, args=(series_name,), result=series_id)
|
||||
return self.cached_value(self.get_series_id, args=(name,))
|
||||
|
||||
|
||||
def get_episode_id(self, series, series_id, season, episode, quality):
|
||||
"""Get the id subtitle for episode with the given quality"""
|
||||
|
||||
|
@ -115,14 +115,14 @@ class Itasa(ServiceBase):
|
|||
if seasons.text.lower().strip() == 'stagione %s' % str(season):
|
||||
season_link = seasons['href']
|
||||
break
|
||||
|
||||
|
||||
if not season_link:
|
||||
logger.debug(u'Could not find season %s for series %s' % (series, str(season)))
|
||||
return None
|
||||
|
||||
|
||||
r = self.session.get(season_link)
|
||||
soup = BeautifulSoup(r.content, self.required_features)
|
||||
|
||||
|
||||
all_qualities = soup.find('div', attrs = {'id' : 'remositorycontainerlist'})
|
||||
for qualities in all_qualities.find_all(href=re.compile('func=select')):
|
||||
if qualities.text.lower().strip() in self.quality_dict[quality]:
|
||||
|
@ -131,11 +131,11 @@ class Itasa(ServiceBase):
|
|||
soup = BeautifulSoup(r.content, self.required_features)
|
||||
break
|
||||
|
||||
#If we want SDTV we are just on the right page so quality link will be None
|
||||
#If we want SDTV we are just on the right page so quality link will be None
|
||||
if not quality == Quality.SDTV and not quality_link:
|
||||
logger.debug(u'Could not find a subtitle with required quality for series %s season %s' % (series, str(season)))
|
||||
return None
|
||||
|
||||
|
||||
all_episodes = soup.find('div', attrs = {'id' : 'remositoryfilelisting'})
|
||||
for episodes in all_episodes.find_all(href=re.compile('func=fileinfo')):
|
||||
ep_string = "%(seasonnumber)dx%(episodenumber)02d" % {'seasonnumber': season, 'episodenumber': episode}
|
||||
|
@ -144,12 +144,12 @@ class Itasa(ServiceBase):
|
|||
if match:
|
||||
episode_id = match.group(1)
|
||||
return episode_id
|
||||
|
||||
|
||||
return episode_id
|
||||
|
||||
|
||||
def list_checked(self, video, languages):
|
||||
return self.query(video.path or video.release, languages, get_keywords(video.guess), video.series, video.season, video.episode)
|
||||
|
||||
|
||||
def query(self, filepath, languages, keywords, series, season, episode):
|
||||
|
||||
logger.debug(u'Getting subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages))
|
||||
|
@ -160,8 +160,8 @@ class Itasa(ServiceBase):
|
|||
except KeyError:
|
||||
logger.debug(u'Could not find series id for %s' % series)
|
||||
return []
|
||||
|
||||
episode_id = self.get_episode_id(series, series_id, season, episode, Quality.nameQuality(filepath))
|
||||
|
||||
episode_id = self.get_episode_id(series, series_id, season, episode, Quality.name_quality(filepath))
|
||||
if not episode_id:
|
||||
logger.debug(u'Could not find subtitle for series %s' % series)
|
||||
return []
|
||||
|
@ -173,11 +173,11 @@ class Itasa(ServiceBase):
|
|||
sub_language = self.get_language('it')
|
||||
path = get_subtitle_path(filepath, sub_language, self.config.multi)
|
||||
subtitle = ResultSubtitle(path, sub_language, self.__class__.__name__.lower(), sub_link)
|
||||
|
||||
|
||||
return [subtitle]
|
||||
|
||||
def download(self, subtitle):
|
||||
|
||||
|
||||
logger.info(u'Downloading %s in %s' % (subtitle.link, subtitle.path))
|
||||
try:
|
||||
r = self.session.get(subtitle.link, headers={'Referer': self.server_url, 'User-Agent': self.user_agent})
|
||||
|
@ -204,13 +204,13 @@ class Itasa(ServiceBase):
|
|||
else:
|
||||
zipsub.close()
|
||||
raise DownloadFailedError('No subtitles found in zip file')
|
||||
|
||||
|
||||
zipsub.close()
|
||||
except Exception as e:
|
||||
if os.path.exists(subtitle.path):
|
||||
os.remove(subtitle.path)
|
||||
raise DownloadFailedError(str(e))
|
||||
|
||||
|
||||
logger.debug(u'Download finished')
|
||||
|
||||
Service = Itasa
|
||||
|
||||
Service = Itasa
|
||||
|
|
11
sickgear.py
11
sickgear.py
|
@ -43,7 +43,8 @@ versions = [((3, 7, 1), (3, 8, 16)),
|
|||
((3, 9, 0), (3, 9, 2)), ((3, 9, 4), (3, 9, 16)),
|
||||
((3, 10, 0), (3, 11, 2))] # inclusive version ranges
|
||||
if not any(list(map(lambda v: v[0] <= sys.version_info[:3] <= v[1], versions))) and not int(os.environ.get('PYT', 0)):
|
||||
print('Python %s.%s.%s detected.' % sys.version_info[:3])
|
||||
major, minor, micro = sys.version_info[:3]
|
||||
print('Python %s.%s.%s detected.' % (major, minor, micro))
|
||||
print('Sorry, SickGear requires a Python version %s' % ', '.join(map(
|
||||
lambda r: '%s - %s' % tuple(map(lambda v: str(v).replace(',', '.')[1:-1], r)), versions)))
|
||||
sys.exit(1)
|
||||
|
@ -225,7 +226,7 @@ class SickGear(object):
|
|||
if o in ('-h', '--help'):
|
||||
sys.exit(self.help_message())
|
||||
|
||||
# For now we'll just silence the logging
|
||||
# For now, we'll just silence the logging
|
||||
if o in ('-q', '--quiet'):
|
||||
self.console_logging = False
|
||||
|
||||
|
@ -445,7 +446,7 @@ class SickGear(object):
|
|||
print(u'Rollback to production of [%s] successful.' % d)
|
||||
sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished')
|
||||
|
||||
# handling of production version higher then current base of test db
|
||||
# handling of production version higher than current base of test db
|
||||
if isinstance(base_v, integer_types) and max_v >= 100000 > cur_db_version > base_v:
|
||||
sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Rollback')
|
||||
print('Your [%s] database version (%s) is a db version and doesn\'t match SickGear required '
|
||||
|
@ -553,7 +554,7 @@ class SickGear(object):
|
|||
|
||||
# Build internal name cache
|
||||
sickgear.classes.loading_msg.message = 'Build name cache'
|
||||
name_cache.buildNameCache()
|
||||
name_cache.build_name_cache()
|
||||
|
||||
# load all ids from xem
|
||||
sickgear.classes.loading_msg.message = 'Loading xem data'
|
||||
|
@ -816,7 +817,7 @@ class SickGear(object):
|
|||
|
||||
@staticmethod
|
||||
def exit(code):
|
||||
# noinspection PyProtectedMember
|
||||
# noinspection PyProtectedMember,PyUnresolvedReferences
|
||||
os._exit(code)
|
||||
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ import zlib
|
|||
|
||||
from . import classes, db, helpers, image_cache, indexermapper, logger, metadata, naming, people_queue, providers, \
|
||||
scene_exceptions, scene_numbering, scheduler, search_backlog, search_propers, search_queue, search_recent, \
|
||||
show_queue, show_updater, subtitles, trakt_helpers, traktChecker, version_checker, watchedstate_queue
|
||||
show_queue, show_updater, subtitles, trakt_helpers, version_checker, watchedstate_queue
|
||||
from . import auto_post_processer, properFinder # must come after the above imports
|
||||
from .common import SD, SKIPPED, USER_AGENT
|
||||
from .config import check_section, check_setting_int, check_setting_str, ConfigMigrator, minimax
|
||||
|
@ -119,9 +119,9 @@ REMOVE_FILENAME_CHARS = None
|
|||
IMPORT_DEFAULT_CHECKED_SHOWS = 0
|
||||
# /non ui settings
|
||||
|
||||
providerList = []
|
||||
newznabProviderList = []
|
||||
torrentRssProviderList = []
|
||||
provider_list = []
|
||||
newznab_providers = []
|
||||
torrent_rss_providers = []
|
||||
metadata_provider_dict = {}
|
||||
|
||||
MODULE_UPDATE_STRING = None
|
||||
|
@ -655,7 +655,7 @@ def initialize(console_logging=True):
|
|||
def init_stage_1(console_logging):
|
||||
|
||||
# Misc
|
||||
global showList, showDict, switched_shows, providerList, newznabProviderList, torrentRssProviderList, \
|
||||
global showList, showDict, switched_shows, provider_list, newznab_providers, torrent_rss_providers, \
|
||||
WEB_HOST, WEB_ROOT, ACTUAL_CACHE_DIR, CACHE_DIR, ZONEINFO_DIR, ADD_SHOWS_WO_DIR, ADD_SHOWS_METALANG, \
|
||||
CREATE_MISSING_SHOW_DIRS, SHOW_DIRS_WITH_DOTS, \
|
||||
RECENTSEARCH_STARTUP, NAMING_FORCE_FOLDERS, SOCKET_TIMEOUT, DEBUG, TVINFO_DEFAULT, \
|
||||
|
@ -666,7 +666,7 @@ def init_stage_1(console_logging):
|
|||
# Add Show Defaults
|
||||
global QUALITY_DEFAULT, WANTED_BEGIN_DEFAULT, WANTED_LATEST_DEFAULT, SHOW_TAG_DEFAULT, PAUSE_DEFAULT, \
|
||||
STATUS_DEFAULT, SCENE_DEFAULT, SUBTITLES_DEFAULT, FLATTEN_FOLDERS_DEFAULT, ANIME_DEFAULT
|
||||
# Post processing
|
||||
# Post-processing
|
||||
global KEEP_PROCESSED_DIR, PROCESS_LAST_DIR, PROCESS_LAST_METHOD, PROCESS_LAST_CLEANUP
|
||||
# Views
|
||||
global GUI_NAME, HOME_LAYOUT, FOOTER_TIME_LAYOUT, POSTER_SORTBY, POSTER_SORTDIR, DISPLAY_SHOW_SPECIALS, \
|
||||
|
@ -1370,16 +1370,16 @@ def init_stage_1(console_logging):
|
|||
sg_helpers.DOMAIN_FAILURES.load_from_db()
|
||||
|
||||
# initialize NZB and TORRENT providers
|
||||
providerList = providers.makeProviderList()
|
||||
provider_list = providers.provider_modules()
|
||||
|
||||
NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '')
|
||||
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
||||
newznab_providers = providers.newznab_source_list(NEWZNAB_DATA)
|
||||
|
||||
torrentrss_data = check_setting_str(CFG, 'TorrentRss', 'torrentrss_data', '')
|
||||
torrentRssProviderList = providers.getTorrentRssProviderList(torrentrss_data)
|
||||
torrent_rss_providers = providers.torrent_rss_source_list(torrentrss_data)
|
||||
|
||||
# dynamically load provider settings
|
||||
for torrent_prov in [curProvider for curProvider in providers.sortedProviderList()
|
||||
for torrent_prov in [curProvider for curProvider in providers.sorted_sources()
|
||||
if GenericProvider.TORRENT == curProvider.providerType]:
|
||||
prov_id = torrent_prov.get_id()
|
||||
prov_id_uc = torrent_prov.get_id().upper()
|
||||
|
@ -1424,7 +1424,7 @@ def init_stage_1(console_logging):
|
|||
elif isinstance(default, int):
|
||||
setattr(torrent_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default))
|
||||
|
||||
for nzb_prov in [curProvider for curProvider in providers.sortedProviderList()
|
||||
for nzb_prov in [curProvider for curProvider in providers.sorted_sources()
|
||||
if GenericProvider.NZB == curProvider.providerType]:
|
||||
prov_id = nzb_prov.get_id()
|
||||
prov_id_uc = nzb_prov.get_id().upper()
|
||||
|
@ -1453,7 +1453,7 @@ def init_stage_1(console_logging):
|
|||
for cur_provider in filter(lambda p: abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in (
|
||||
1449593765, 1597250020, 1524942228, 160758496, 2925374331
|
||||
) or (p.url and abs(zlib.crc32(decode_bytes(re.sub(r'[./]', '', p.url[-10:])))) + 40000400 in (
|
||||
2417143804,)), providers.sortedProviderList()):
|
||||
2417143804,)), providers.sorted_sources()):
|
||||
header = {'User-Agent': get_ua()}
|
||||
if hasattr(cur_provider, 'nn'):
|
||||
cur_provider.nn = False
|
||||
|
@ -1574,40 +1574,40 @@ def init_stage_2():
|
|||
update_now = datetime.timedelta(minutes=0)
|
||||
update_software_scheduler = scheduler.Scheduler(
|
||||
version_checker.SoftwareUpdater(),
|
||||
cycleTime=datetime.timedelta(hours=UPDATE_INTERVAL),
|
||||
threadName='SOFTWAREUPDATER',
|
||||
cycle_time=datetime.timedelta(hours=UPDATE_INTERVAL),
|
||||
thread_name='SOFTWAREUPDATER',
|
||||
silent=False)
|
||||
|
||||
update_packages_scheduler = scheduler.Scheduler(
|
||||
version_checker.PackagesUpdater(),
|
||||
cycleTime=datetime.timedelta(hours=UPDATE_PACKAGES_INTERVAL),
|
||||
cycle_time=datetime.timedelta(hours=UPDATE_PACKAGES_INTERVAL),
|
||||
# run_delay=datetime.timedelta(minutes=2),
|
||||
threadName='PACKAGESUPDATER',
|
||||
thread_name='PACKAGESUPDATER',
|
||||
silent=False)
|
||||
|
||||
show_queue_scheduler = scheduler.Scheduler(
|
||||
show_queue.ShowQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName='SHOWQUEUE')
|
||||
cycle_time=datetime.timedelta(seconds=3),
|
||||
thread_name='SHOWQUEUE')
|
||||
|
||||
show_update_scheduler = scheduler.Scheduler(
|
||||
show_updater.ShowUpdater(),
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
cycle_time=datetime.timedelta(hours=1),
|
||||
start_time=datetime.time(hour=SHOW_UPDATE_HOUR),
|
||||
threadName='SHOWUPDATER',
|
||||
thread_name='SHOWUPDATER',
|
||||
prevent_cycle_run=show_queue_scheduler.action.is_show_update_running) # 3AM
|
||||
|
||||
people_queue_scheduler = scheduler.Scheduler(
|
||||
people_queue.PeopleQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName='PEOPLEQUEUE'
|
||||
cycle_time=datetime.timedelta(seconds=3),
|
||||
thread_name='PEOPLEQUEUE'
|
||||
)
|
||||
|
||||
# searchers
|
||||
search_queue_scheduler = scheduler.Scheduler(
|
||||
search_queue.SearchQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName='SEARCHQUEUE')
|
||||
cycle_time=datetime.timedelta(seconds=3),
|
||||
thread_name='SEARCHQUEUE')
|
||||
|
||||
init_search_delay = int(os.environ.get('INIT_SEARCH_DELAY', 0))
|
||||
|
||||
|
@ -1615,13 +1615,13 @@ def init_stage_2():
|
|||
update_interval = datetime.timedelta(minutes=(RECENTSEARCH_INTERVAL, 1)[4499 == RECENTSEARCH_INTERVAL])
|
||||
recent_search_scheduler = scheduler.Scheduler(
|
||||
search_recent.RecentSearcher(),
|
||||
cycleTime=update_interval,
|
||||
cycle_time=update_interval,
|
||||
run_delay=update_now if RECENTSEARCH_STARTUP else datetime.timedelta(minutes=init_search_delay or 5),
|
||||
threadName='RECENTSEARCHER',
|
||||
thread_name='RECENTSEARCHER',
|
||||
prevent_cycle_run=search_queue_scheduler.action.is_recentsearch_in_progress)
|
||||
|
||||
if [x for x in providers.sortedProviderList() if x.is_active() and
|
||||
getattr(x, 'enable_backlog', None) and GenericProvider.NZB == x.providerType]:
|
||||
if [x for x in providers.sorted_sources()
|
||||
if x.is_active() and getattr(x, 'enable_backlog', None) and GenericProvider.NZB == x.providerType]:
|
||||
nextbacklogpossible = datetime.datetime.fromtimestamp(
|
||||
search_backlog.BacklogSearcher().last_runtime) + datetime.timedelta(hours=23)
|
||||
now = datetime.datetime.now()
|
||||
|
@ -1637,9 +1637,9 @@ def init_stage_2():
|
|||
backlogdelay = 10
|
||||
backlog_search_scheduler = search_backlog.BacklogSearchScheduler(
|
||||
search_backlog.BacklogSearcher(),
|
||||
cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()),
|
||||
cycle_time=datetime.timedelta(minutes=get_backlog_cycle_time()),
|
||||
run_delay=datetime.timedelta(minutes=init_search_delay or backlogdelay),
|
||||
threadName='BACKLOG',
|
||||
thread_name='BACKLOG',
|
||||
prevent_cycle_run=search_queue_scheduler.action.is_standard_backlog_in_progress)
|
||||
|
||||
propers_searcher = search_propers.ProperSearcher()
|
||||
|
@ -1652,26 +1652,22 @@ def init_stage_2():
|
|||
|
||||
proper_finder_scheduler = scheduler.Scheduler(
|
||||
propers_searcher,
|
||||
cycleTime=datetime.timedelta(days=1),
|
||||
cycle_time=datetime.timedelta(days=1),
|
||||
run_delay=datetime.timedelta(minutes=init_search_delay or properdelay),
|
||||
threadName='FINDPROPERS',
|
||||
thread_name='FINDPROPERS',
|
||||
prevent_cycle_run=search_queue_scheduler.action.is_propersearch_in_progress)
|
||||
|
||||
# processors
|
||||
media_process_scheduler = scheduler.Scheduler(
|
||||
auto_post_processer.PostProcesser(),
|
||||
cycleTime=datetime.timedelta(minutes=MEDIAPROCESS_INTERVAL),
|
||||
threadName='POSTPROCESSER',
|
||||
cycle_time=datetime.timedelta(minutes=MEDIAPROCESS_INTERVAL),
|
||||
thread_name='POSTPROCESSER',
|
||||
silent=not PROCESS_AUTOMATICALLY)
|
||||
"""
|
||||
trakt_checker_scheduler = scheduler.Scheduler(
|
||||
traktChecker.TraktChecker(), cycleTime=datetime.timedelta(hours=1),
|
||||
threadName='TRAKTCHECKER', silent=not USE_TRAKT)
|
||||
"""
|
||||
|
||||
subtitles_finder_scheduler = scheduler.Scheduler(
|
||||
subtitles.SubtitlesFinder(),
|
||||
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_INTERVAL),
|
||||
threadName='FINDSUBTITLES',
|
||||
cycle_time=datetime.timedelta(hours=SUBTITLES_FINDER_INTERVAL),
|
||||
thread_name='FINDSUBTITLES',
|
||||
silent=not USE_SUBTITLES)
|
||||
|
||||
background_mapping_task = threading.Thread(name='MAPPINGSUPDATER', target=indexermapper.load_mapped_ids,
|
||||
|
@ -1679,20 +1675,20 @@ def init_stage_2():
|
|||
|
||||
watched_state_queue_scheduler = scheduler.Scheduler(
|
||||
watchedstate_queue.WatchedStateQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName='WATCHEDSTATEQUEUE')
|
||||
cycle_time=datetime.timedelta(seconds=3),
|
||||
thread_name='WATCHEDSTATEQUEUE')
|
||||
|
||||
emby_watched_state_scheduler = scheduler.Scheduler(
|
||||
EmbyWatchedStateUpdater(),
|
||||
cycleTime=datetime.timedelta(minutes=EMBY_WATCHEDSTATE_INTERVAL),
|
||||
cycle_time=datetime.timedelta(minutes=EMBY_WATCHEDSTATE_INTERVAL),
|
||||
run_delay=datetime.timedelta(minutes=5),
|
||||
threadName='EMBYWATCHEDSTATE')
|
||||
thread_name='EMBYWATCHEDSTATE')
|
||||
|
||||
plex_watched_state_scheduler = scheduler.Scheduler(
|
||||
PlexWatchedStateUpdater(),
|
||||
cycleTime=datetime.timedelta(minutes=PLEX_WATCHEDSTATE_INTERVAL),
|
||||
cycle_time=datetime.timedelta(minutes=PLEX_WATCHEDSTATE_INTERVAL),
|
||||
run_delay=datetime.timedelta(minutes=5),
|
||||
threadName='PLEXWATCHEDSTATE')
|
||||
thread_name='PLEXWATCHEDSTATE')
|
||||
|
||||
MEMCACHE['history_tab_limit'] = 11
|
||||
MEMCACHE['history_tab'] = History.menu_tab(MEMCACHE['history_tab_limit'])
|
||||
|
@ -1732,7 +1728,7 @@ def start():
|
|||
and True is not TVInfoAPI(i).config.get('people_only')]
|
||||
background_mapping_task.start()
|
||||
|
||||
for p in providers.sortedProviderList():
|
||||
for p in providers.sorted_sources():
|
||||
if p.is_active() and getattr(p, 'ping_iv', None):
|
||||
# noinspection PyProtectedMember
|
||||
provider_ping_thread_pool[p.get_id()] = threading.Thread(
|
||||
|
@ -1845,7 +1841,7 @@ def save_config():
|
|||
new_config = ConfigObj()
|
||||
new_config.filename = CONFIG_FILE
|
||||
|
||||
# For passwords you must include the word `password` in the item_name and
|
||||
# For passwords, you must include the word `password` in the item_name and
|
||||
# add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
|
||||
new_config['General'] = dict()
|
||||
s_z = check_setting_int(CFG, 'General', 'stack_size', 0)
|
||||
|
@ -1911,7 +1907,7 @@ def save_config():
|
|||
new_config['General']['provider_order'] = ' '.join(PROVIDER_ORDER)
|
||||
new_config['General']['provider_homes'] = '%s' % dict([(pid, v) for pid, v in list(PROVIDER_HOMES.items())
|
||||
if pid in [
|
||||
p.get_id() for p in [x for x in providers.sortedProviderList() if GenericProvider.TORRENT == x.providerType]]])
|
||||
p.get_id() for p in [x for x in providers.sorted_sources() if GenericProvider.TORRENT == x.providerType]]])
|
||||
new_config['General']['update_notify'] = int(UPDATE_NOTIFY)
|
||||
new_config['General']['update_auto'] = int(UPDATE_AUTO)
|
||||
new_config['General']['update_interval'] = int(UPDATE_INTERVAL)
|
||||
|
@ -1997,7 +1993,7 @@ def save_config():
|
|||
new_config['Backup']['backup_db_max_count'] = BACKUP_DB_MAX_COUNT
|
||||
|
||||
default_not_zero = ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog', 'use_after_get_data')
|
||||
for src in filter(lambda px: GenericProvider.TORRENT == px.providerType, providers.sortedProviderList()):
|
||||
for src in filter(lambda px: GenericProvider.TORRENT == px.providerType, providers.sorted_sources()):
|
||||
src_id = src.get_id()
|
||||
src_id_uc = src_id.upper()
|
||||
new_config[src_id_uc] = {}
|
||||
|
@ -2035,7 +2031,7 @@ def save_config():
|
|||
del new_config[src_id_uc]
|
||||
|
||||
default_not_zero = ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog')
|
||||
for src in filter(lambda px: GenericProvider.NZB == px.providerType, providers.sortedProviderList()):
|
||||
for src in filter(lambda px: GenericProvider.NZB == px.providerType, providers.sorted_sources()):
|
||||
src_id = src.get_id()
|
||||
src_id_uc = src.get_id().upper()
|
||||
new_config[src_id_uc] = {}
|
||||
|
@ -2043,7 +2039,7 @@ def save_config():
|
|||
new_config[src_id_uc][src_id] = int(src.enabled)
|
||||
|
||||
for attr in filter(lambda _a: None is not getattr(src, _a, None),
|
||||
('api_key', 'digest', 'username', 'search_mode')):
|
||||
('api_key', 'digest', 'username', 'search_mode')):
|
||||
if 'search_mode' != attr or 'eponly' != getattr(src, attr):
|
||||
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = getattr(src, attr)
|
||||
|
||||
|
@ -2309,7 +2305,7 @@ def save_config():
|
|||
new_config['Newznab'] = {}
|
||||
new_config['Newznab']['newznab_data'] = NEWZNAB_DATA
|
||||
|
||||
torrent_rss = '!!!'.join([x.config_str() for x in torrentRssProviderList])
|
||||
torrent_rss = '!!!'.join([x.config_str() for x in torrent_rss_providers])
|
||||
if torrent_rss:
|
||||
new_config['TorrentRss'] = {}
|
||||
new_config['TorrentRss']['torrentrss_data'] = torrent_rss
|
||||
|
|
|
@ -1,828 +0,0 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#
|
||||
# This file contains deprecated routes and parameters
|
||||
# Eventually, this file and its use will be removed from SG core.
|
||||
#
|
||||
import threading
|
||||
import traceback
|
||||
|
||||
import sickgear
|
||||
from . import logger
|
||||
from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TVDB
|
||||
from .tv import TVidProdid
|
||||
|
||||
from requests.compat import urljoin
|
||||
from tornado import gen
|
||||
from tornado.escape import utf8
|
||||
from tornado.web import RequestHandler
|
||||
|
||||
from _23 import decode_str
|
||||
from six import iteritems
|
||||
from sg_futures import SgThreadPoolExecutor
|
||||
try:
|
||||
from multiprocessing import cpu_count
|
||||
except ImportError:
|
||||
# some platforms don't have multiprocessing
|
||||
def cpu_count():
|
||||
return None
|
||||
|
||||
""" deprecated_item, remove in 2020 = 8 items """
|
||||
""" prevent issues with requests using legacy params = 3 items"""
|
||||
# TODO: deprecated items, find the above comments and remove in 2020
|
||||
|
||||
|
||||
class LegacyBase(RequestHandler):
|
||||
|
||||
# todo: move to RouteHandler after removing _legacy module
|
||||
executor = SgThreadPoolExecutor(thread_name_prefix='WEBSERVER', max_workers=min(32, (cpu_count() or 1) + 4))
|
||||
|
||||
# todo: move to RouteHandler after removing _legacy module
|
||||
def redirect(self, url, permanent=False, status=None):
|
||||
"""Send a redirect to the given (optionally relative) URL.
|
||||
|
||||
----->>>>> NOTE: Removed self.finish <<<<<-----
|
||||
|
||||
If the ``status`` argument is specified, that value is used as the
|
||||
HTTP status code; otherwise either 301 (permanent) or 302
|
||||
(temporary) is chosen based on the ``permanent`` argument.
|
||||
The default is 302 (temporary).
|
||||
"""
|
||||
if not url.startswith(sickgear.WEB_ROOT):
|
||||
url = sickgear.WEB_ROOT + url
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
if self._headers_written:
|
||||
raise Exception('Cannot redirect after headers have been written')
|
||||
if status is None:
|
||||
status = 301 if permanent else 302
|
||||
else:
|
||||
assert isinstance(status, int)
|
||||
assert 300 <= status <= 399
|
||||
self.set_status(status)
|
||||
self.set_header('Location', urljoin(utf8(self.request.uri),
|
||||
utf8(url)))
|
||||
|
||||
# todo: move to RouteHandler after removing _legacy module
|
||||
def write_error(self, status_code, **kwargs):
|
||||
body = ''
|
||||
try:
|
||||
if self.request.body:
|
||||
body = '\nRequest body: %s' % decode_str(self.request.body)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
logger.log('Sent %s error response to a `%s` request for `%s` with headers:\n%s%s' %
|
||||
(status_code, self.request.method, self.request.path, self.request.headers, body), logger.WARNING)
|
||||
# suppress traceback by removing 'exc_info' kwarg
|
||||
if 'exc_info' in kwargs:
|
||||
logger.log('Gracefully handled exception text:\n%s' % traceback.format_exception(*kwargs["exc_info"]),
|
||||
logger.DEBUG)
|
||||
del kwargs['exc_info']
|
||||
return super(LegacyBase, self).write_error(status_code, **kwargs)
|
||||
|
||||
def data_received(self, *args):
|
||||
pass
|
||||
|
||||
|
||||
class LegacyBaseHandler(LegacyBase):
|
||||
|
||||
def redirect_args(self, new_url, exclude=(None,), **kwargs):
|
||||
args = '&'.join(['%s=%s' % (k, v) for (k, v) in
|
||||
filter(lambda arg: arg[1] not in exclude, iteritems(kwargs))])
|
||||
self.redirect('%s%s' % (new_url, ('', '?' + args)[bool(args)]), permanent=True)
|
||||
|
||||
""" deprecated from BaseHandler ------------------------------------------------------------------------------------
|
||||
"""
|
||||
def getImage(self, *args, **kwargs):
|
||||
return self.get_image(*args, **kwargs)
|
||||
|
||||
def get_image(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def showPoster(self, show=None, **kwargs):
|
||||
# test: /showPoster/?show=73141&which=poster_thumb
|
||||
return self.show_poster(TVidProdid(show)(), **kwargs)
|
||||
|
||||
def show_poster(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from MainHandler ------------------------------------------------------------------------------------
|
||||
"""
|
||||
def episodeView(self, **kwargs):
|
||||
self.redirect_args('/daily-schedule', exclude=(None, False), **kwargs)
|
||||
|
||||
def setHomeLayout(self, *args, **kwargs):
|
||||
return self.set_layout_view_shows(*args, **kwargs)
|
||||
|
||||
def set_layout_view_shows(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def setPosterSortBy(self, *args):
|
||||
return self.set_poster_sortby(*args)
|
||||
|
||||
@staticmethod
|
||||
def set_poster_sortby(*args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def setPosterSortDir(self, *args):
|
||||
return self.set_poster_sortdir(*args)
|
||||
|
||||
@staticmethod
|
||||
def set_poster_sortdir(*args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def setEpisodeViewLayout(self, *args):
|
||||
return self.set_layout_daily_schedule(*args)
|
||||
|
||||
def set_layout_daily_schedule(self, *args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def toggleEpisodeViewDisplayPaused(self):
|
||||
return self.toggle_display_paused_daily_schedule()
|
||||
|
||||
# completely deprecated for the three way state set_ function
|
||||
# def toggle_display_paused_daily_schedule(self):
|
||||
# # abstract method
|
||||
# pass
|
||||
|
||||
def toggle_display_paused_daily_schedule(self):
|
||||
|
||||
return self.set_display_paused_daily_schedule(not sickgear.EPISODE_VIEW_DISPLAY_PAUSED)
|
||||
|
||||
def set_display_paused_daily_schedule(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def setEpisodeViewCards(self, *args, **kwargs):
|
||||
return self.set_cards_daily_schedule(*args, **kwargs)
|
||||
|
||||
def set_cards_daily_schedule(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def setEpisodeViewSort(self, *args, **kwargs):
|
||||
return self.set_sort_daily_schedule(*args, **kwargs)
|
||||
|
||||
def set_sort_daily_schedule(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def getFooterTime(self, *args, **kwargs):
|
||||
return self.get_footer_time(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def get_footer_time(*args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def toggleDisplayShowSpecials(self, **kwargs):
|
||||
return self.toggle_specials_view_show(TVidProdid(kwargs.get('show'))())
|
||||
|
||||
def toggle_specials_view_show(self, *args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def setHistoryLayout(self, *args):
|
||||
return self.set_layout_history(*args)
|
||||
|
||||
def set_layout_history(self, *args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from Home -------------------------------------------------------------------------------------------
|
||||
"""
|
||||
def showlistView(self):
|
||||
self.redirect('/view-shows', permanent=True)
|
||||
|
||||
def viewchanges(self):
|
||||
self.redirect('/home/view-changes', permanent=True)
|
||||
|
||||
def displayShow(self, **kwargs):
|
||||
self.migrate_redir('view-show', **kwargs)
|
||||
|
||||
def editShow(self, **kwargs):
|
||||
kwargs['any_qualities'] = kwargs.pop('anyQualities', None)
|
||||
kwargs['best_qualities'] = kwargs.pop('bestQualities', None)
|
||||
kwargs['exceptions_list'] = kwargs.pop('exceptions_list', None)
|
||||
kwargs['direct_call'] = kwargs.pop('directCall', False)
|
||||
kwargs['tvinfo_lang'] = kwargs.pop('indexerLang', None)
|
||||
kwargs['subs'] = kwargs.pop('subtitles', None)
|
||||
self.migrate_redir('edit-show', **kwargs)
|
||||
|
||||
def testRename(self, **kwargs):
|
||||
self.migrate_redir('rename-media', **kwargs)
|
||||
|
||||
def migrate_redir(self, new_url, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
self.redirect_args('/home/%s' % new_url, exclude=(None, False), **kwargs)
|
||||
|
||||
def setStatus(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.set_show_status(**kwargs)
|
||||
|
||||
def set_show_status(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def branchCheckout(self, *args):
|
||||
return self.branch_checkout(*args)
|
||||
|
||||
def branch_checkout(self, *args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def pullRequestCheckout(self, *args):
|
||||
return self.pull_request_checkout(*args)
|
||||
|
||||
def pull_request_checkout(self, *args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def display_season(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.season_render(**kwargs)
|
||||
|
||||
def season_render(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def plotDetails(self, show, *args):
|
||||
return self.plot_details(TVidProdid(show)(), *args)
|
||||
|
||||
@staticmethod
|
||||
def plot_details(*args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def sceneExceptions(self, show):
|
||||
return self.scene_exceptions(TVidProdid(show)())
|
||||
|
||||
@staticmethod
|
||||
def scene_exceptions(*args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def saveMapping(self, show, **kwargs):
|
||||
kwargs['m_tvid'] = kwargs.pop('mindexer', 0)
|
||||
kwargs['m_prodid'] = kwargs.pop('mindexerid', 0)
|
||||
return self.save_mapping(TVidProdid(show)(), **kwargs)
|
||||
|
||||
def save_mapping(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def forceMapping(self, show, **kwargs):
|
||||
return self.force_mapping(TVidProdid(show)(), **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def force_mapping(*args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def deleteShow(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.delete_show(**kwargs)
|
||||
|
||||
def delete_show(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def refreshShow(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.refresh_show(**kwargs)
|
||||
|
||||
def refresh_show(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def updateShow(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.update_show(**kwargs)
|
||||
|
||||
def update_show(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def subtitleShow(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.subtitle_show(**kwargs)
|
||||
|
||||
def subtitle_show(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def doRename(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.do_rename(**kwargs)
|
||||
|
||||
def do_rename(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def episode_search(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.search_episode(**kwargs)
|
||||
|
||||
def search_episode(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def searchEpisodeSubtitles(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.search_episode_subtitles(**kwargs)
|
||||
|
||||
def search_episode_subtitles(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def setSceneNumbering(self, **kwargs):
|
||||
return self.set_scene_numbering(
|
||||
tvid_prodid={kwargs.pop('indexer', ''): kwargs.pop('show', '')},
|
||||
for_season=kwargs.get('forSeason'), for_episode=kwargs.get('forEpisode'),
|
||||
scene_season=kwargs.get('sceneSeason'), scene_episode=kwargs.get('sceneEpisode'),
|
||||
scene_absolute=kwargs.get('sceneAbsolute'))
|
||||
|
||||
@staticmethod
|
||||
def set_scene_numbering(*args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def update_emby(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.update_mb(**kwargs)
|
||||
|
||||
def update_mb(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def search_q_progress(self, **kwargs):
|
||||
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
|
||||
return self.search_q_status(**kwargs)
|
||||
|
||||
def search_q_status(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from NewHomeAddShows i.e. HomeAddShows --------------------------------------------------------------
|
||||
"""
|
||||
def addExistingShows(self, **kwargs):
|
||||
kwargs['prompt_for_settings'] = kwargs.pop('promptForSettings', None)
|
||||
self.redirect_args('/add-shows/add-existing-shows', **kwargs)
|
||||
|
||||
def addAniDBShow(self, **kwargs):
|
||||
self.migrate_redir_add_shows('info-anidb', TVINFO_TVDB, **kwargs)
|
||||
|
||||
def addIMDbShow(self, **kwargs):
|
||||
self.migrate_redir_add_shows('info-imdb', TVINFO_IMDB, **kwargs)
|
||||
|
||||
def addTraktShow(self, **kwargs):
|
||||
self.migrate_redir_add_shows('info-trakt', TVINFO_TVDB, **kwargs)
|
||||
|
||||
def migrate_redir_add_shows(self, new_url, tvinfo, **kwargs):
|
||||
prodid = kwargs.pop('indexer_id', None)
|
||||
if prodid:
|
||||
kwargs['ids'] = prodid
|
||||
if TVINFO_TVDB == tvinfo and prodid:
|
||||
kwargs['ids'] = TVidProdid({tvinfo: prodid})()
|
||||
kwargs['show_name'] = kwargs.pop('showName', None)
|
||||
self.redirect_args('/add-shows/%s' % new_url, **kwargs)
|
||||
|
||||
def getIndexerLanguages(self):
|
||||
return self.get_infosrc_languages()
|
||||
|
||||
@staticmethod
|
||||
def get_infosrc_languages():
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def searchIndexersForShowName(self, *args, **kwargs):
|
||||
return self.search_tvinfo_for_showname(*args, **kwargs)
|
||||
|
||||
def search_tvinfo_for_showname(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def massAddTable(self, **kwargs):
|
||||
return self.mass_add_table(
|
||||
root_dir=kwargs.pop('rootDir', None), **kwargs)
|
||||
|
||||
def mass_add_table(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def addNewShow(self, **kwargs):
|
||||
return self.add_new_show(
|
||||
provided_tvid=kwargs.pop('providedIndexer', None),
|
||||
which_series=kwargs.pop('whichSeries', None),
|
||||
tvinfo_lang=kwargs.pop('indexerLang', 'en'),
|
||||
root_dir=kwargs.pop('rootDir', None),
|
||||
default_status=kwargs.pop('defaultStatus', None),
|
||||
any_qualities=kwargs.pop('anyQualities', None),
|
||||
best_qualities=kwargs.pop('bestQualities', None),
|
||||
subs=kwargs.pop('subtitles', None),
|
||||
full_show_path=kwargs.pop('fullShowPath', None),
|
||||
skip_show=kwargs.pop('skipShow', None),
|
||||
**kwargs)
|
||||
|
||||
def add_new_show(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from ConfigGeneral ----------------------------------------------------------------------------------
|
||||
"""
|
||||
def generateKey(self):
|
||||
return self.generate_key()
|
||||
|
||||
@staticmethod
|
||||
def generate_key():
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def saveRootDirs(self, **kwargs):
|
||||
return self.save_root_dirs(root_dir_string=kwargs.get('rootDirString'))
|
||||
|
||||
@staticmethod
|
||||
def save_root_dirs(**kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def saveResultPrefs(self, **kwargs):
|
||||
return self.save_result_prefs(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def save_result_prefs(**kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def saveAddShowDefaults(self, *args, **kwargs):
|
||||
return self.save_add_show_defaults(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def save_add_show_defaults(*args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def saveGeneral(self, **kwargs):
|
||||
return self.save_general(**kwargs)
|
||||
|
||||
def save_general(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from ConfigSearch -----------------------------------------------------------------------------------
|
||||
"""
|
||||
def saveSearch(self, **kwargs):
|
||||
return self.save_search(**kwargs)
|
||||
|
||||
def save_search(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from ConfigProviders --------------------------------------------------------------------------------
|
||||
"""
|
||||
def canAddNewznabProvider(self, *args):
|
||||
return self.can_add_newznab_provider(*args)
|
||||
|
||||
@staticmethod
|
||||
def can_add_newznab_provider(*args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def getNewznabCategories(self, *args):
|
||||
return self.get_newznab_categories(*args)
|
||||
|
||||
@staticmethod
|
||||
def get_newznab_categories(*args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def canAddTorrentRssProvider(self, *args):
|
||||
return self.can_add_torrent_rss_provider(*args)
|
||||
|
||||
@staticmethod
|
||||
def can_add_torrent_rss_provider(*args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def checkProvidersPing(self):
|
||||
return self.check_providers_ping()
|
||||
|
||||
@staticmethod
|
||||
def check_providers_ping():
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def saveProviders(self, *args, **kwargs):
|
||||
return self.save_providers(*args, **kwargs)
|
||||
|
||||
def save_providers(self, *args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from ConfigPostProcessing ---------------------------------------------------------------------------
|
||||
"""
|
||||
def savePostProcessing(self, **kwargs):
|
||||
return self.save_post_processing(**kwargs)
|
||||
|
||||
def save_post_processing(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def testNaming(self, *args, **kwargs):
|
||||
return self.test_naming(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def test_naming(*args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def isNamingValid(self, *args, **kwargs):
|
||||
return self.is_naming_valid(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def is_naming_valid(*args, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def isRarSupported(self):
|
||||
return self.is_rar_supported()
|
||||
|
||||
@staticmethod
|
||||
def is_rar_supported():
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from ConfigSubtitles --------------------------------------------------------------------------------
|
||||
"""
|
||||
def saveSubtitles(self, **kwargs):
|
||||
return self.save_subtitles(**kwargs)
|
||||
|
||||
def save_subtitles(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from ConfigAnime ------------------------------------------------------------------------------------
|
||||
"""
|
||||
def saveAnime(self, **kwargs):
|
||||
return self.save_anime(**kwargs)
|
||||
|
||||
def save_anime(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from Manage -----------------------------------------------------------------------------------------
|
||||
"""
|
||||
def episode_statuses(self, **kwargs):
|
||||
self.redirect_args('/manage/episode-overview', **kwargs)
|
||||
|
||||
def subtitleMissed(self, **kwargs):
|
||||
kwargs['which_subs'] = kwargs.pop('whichSubs', None)
|
||||
self.redirect_args('/manage/subtitle_missed', **kwargs)
|
||||
|
||||
def show_episode_statuses(self, **kwargs):
|
||||
return self.get_status_episodes(TVidProdid(kwargs.get('indexer_id'))(), kwargs.get('which_status'))
|
||||
|
||||
@staticmethod
|
||||
def get_status_episodes(*args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def showSubtitleMissed(self, **kwargs):
|
||||
return self.show_subtitle_missed(TVidProdid(kwargs.get('indexer_id'))(), kwargs.get('whichSubs'))
|
||||
|
||||
@staticmethod
|
||||
def show_subtitle_missed(*args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def downloadSubtitleMissed(self, **kwargs):
|
||||
return self.download_subtitle_missed(**kwargs)
|
||||
|
||||
def download_subtitle_missed(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def backlogShow(self, **kwargs):
|
||||
return self.backlog_show(TVidProdid(kwargs.get('indexer_id'))())
|
||||
|
||||
def backlog_show(self, *args):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def backlogOverview(self):
|
||||
self.redirect('/manage/backlog_overview', permanent=True)
|
||||
|
||||
def massEdit(self, **kwargs):
|
||||
return self.mass_edit(to_edit=kwargs.get('toEdit'))
|
||||
|
||||
def mass_edit(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def massEditSubmit(self, **kwargs):
|
||||
kwargs['to_edit'] = kwargs.pop('toEdit', None)
|
||||
kwargs['subs'] = kwargs.pop('subtitles', None)
|
||||
kwargs['any_qualities'] = kwargs.pop('anyQualities', None)
|
||||
kwargs['best_qualities'] = kwargs.pop('bestQualities', None)
|
||||
return self.mass_edit_submit(**kwargs)
|
||||
|
||||
def mass_edit_submit(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def bulkChange(self, **kwargs):
|
||||
return self.bulk_change(
|
||||
to_update=kwargs.get('toUpdate'), to_refresh=kwargs.get('toRefresh'),
|
||||
to_rename=kwargs.get('toRename'), to_delete=kwargs.get('toDelete'), to_remove=kwargs.get('toRemove'),
|
||||
to_metadata=kwargs.get('toMetadata'), to_subtitle=kwargs.get('toSubtitle'))
|
||||
|
||||
def bulk_change(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def failedDownloads(self, **kwargs):
|
||||
kwargs['to_remove'] = kwargs.pop('toRemove', None)
|
||||
return self.failed_downloads(**kwargs)
|
||||
|
||||
def failed_downloads(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from ManageSearches ---------------------------------------------------------------------------------
|
||||
"""
|
||||
def retryProvider(self, **kwargs):
|
||||
return self.retry_provider(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def retry_provider(**kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def forceVersionCheck(self):
|
||||
return self.check_update()
|
||||
|
||||
def check_update(self):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def forceBacklog(self):
|
||||
return self.force_backlog()
|
||||
|
||||
def force_backlog(self):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def forceSearch(self):
|
||||
return self.force_search()
|
||||
|
||||
def force_search(self):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def forceFindPropers(self):
|
||||
return self.force_find_propers()
|
||||
|
||||
def force_find_propers(self):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def pauseBacklog(self, **kwargs):
|
||||
return self.pause_backlog(**kwargs)
|
||||
|
||||
def pause_backlog(self, **kwargs):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from ShowProcesses ----------------------------------------------------------------------------------
|
||||
"""
|
||||
def forceShowUpdate(self):
|
||||
return self.force_show_update()
|
||||
|
||||
def force_show_update(self):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from History ----------------------------------------------------------------------------------------
|
||||
"""
|
||||
def clearHistory(self):
|
||||
return self.clear_history()
|
||||
|
||||
def clear_history(self):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
def trimHistory(self):
|
||||
return self.trim_history()
|
||||
|
||||
def trim_history(self):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" deprecated from ErrorLogs --------------------------------------------------------------------------------------
|
||||
"""
|
||||
def clearerrors(self):
|
||||
self.redirect('/errors/clear-log')
|
||||
|
||||
def viewlog(self, **kwargs):
|
||||
self.redirect_args('/events/view-log/', **kwargs)
|
||||
|
||||
def downloadlog(self):
|
||||
return self.download_log()
|
||||
|
||||
def download_log(self):
|
||||
# abstract method
|
||||
pass
|
||||
|
||||
""" ------------------------------------------------------------------------------------------------------------ """
|
||||
""" ------------------------------------------------------------------------------------------------------------ """
|
||||
""" end of base deprecated function stubs """
|
||||
""" ------------------------------------------------------------------------------------------------------------ """
|
||||
""" ------------------------------------------------------------------------------------------------------------ """
|
||||
|
||||
|
||||
class LegacyRouteHandler(RequestHandler):
|
||||
|
||||
def data_received(self, *args):
|
||||
pass
|
||||
|
||||
def __init__(self, *arg, **kwargs):
|
||||
super(LegacyRouteHandler, self).__init__(*arg, **kwargs)
|
||||
self.lock = threading.Lock()
|
||||
|
||||
def set_default_headers(self):
|
||||
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
|
||||
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
|
||||
if sickgear.SEND_SECURITY_HEADERS:
|
||||
self.set_header('X-Frame-Options', 'SAMEORIGIN')
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
@gen.coroutine
|
||||
def get(self, *args, **kwargs):
|
||||
getattr(self, 'index')()
|
||||
|
||||
def redirect(self, url, permanent=False, status=None):
|
||||
if not url.startswith(sickgear.WEB_ROOT):
|
||||
url = sickgear.WEB_ROOT + url
|
||||
|
||||
super(LegacyRouteHandler, self).redirect(url, permanent, status)
|
||||
|
||||
|
||||
class LegacyManageManageSearches(LegacyRouteHandler):
|
||||
|
||||
""" deprecated from ManageSearches ---------------------------------------------------------------------------------
|
||||
"""
|
||||
def index(self):
|
||||
self.redirect('/manage/search-tasks/', permanent=True)
|
||||
|
||||
|
||||
class LegacyManageShowProcesses(LegacyRouteHandler):
|
||||
|
||||
""" deprecated from ManageShowProcesses ----------------------------------------------------------------------------
|
||||
"""
|
||||
def index(self):
|
||||
self.redirect('/manage/show-tasks/', permanent=True)
|
||||
|
||||
|
||||
class LegacyConfigPostProcessing(LegacyRouteHandler):
|
||||
|
||||
""" deprecated from ConfigPostProcessing ---------------------------------------------------------------------------
|
||||
"""
|
||||
def index(self):
|
||||
self.redirect('/config/media-process/', permanent=True)
|
||||
|
||||
|
||||
class LegacyHomeAddShows(LegacyRouteHandler):
|
||||
|
||||
""" deprecated from NewHomeAddShows i.e. HomeAddShows --------------------------------------------------------------
|
||||
"""
|
||||
def index(self):
|
||||
self.redirect('/add-shows/', permanent=True)
|
||||
|
||||
|
||||
class LegacyErrorLogs(LegacyRouteHandler):
|
||||
|
||||
""" deprecated from ErrorLogs --------------------------------------------------------------------------------------
|
||||
"""
|
||||
def index(self):
|
||||
self.redirect('/events/', permanent=True)
|
|
@ -46,7 +46,7 @@ def get_win_drives():
|
|||
def folders_at_path(path, include_parent=False, include_files=False):
|
||||
""" Returns a list of dictionaries with the folders contained at the given path
|
||||
Give the empty string as the path to list the contents of the root path
|
||||
under Unix this means "/", on Windows this will be a list of drive letters)
|
||||
under Unix this means "/", (on Windows this will be a list of drive letters)
|
||||
"""
|
||||
|
||||
# walk up the tree until we find a valid path
|
||||
|
|
|
@ -155,7 +155,7 @@ class SearchResult(LegacySearchResult):
|
|||
|
||||
class NZBSearchResult(SearchResult):
|
||||
"""
|
||||
Regular NZB result with an URL to the NZB
|
||||
Regular NZB result with a URL to the NZB
|
||||
"""
|
||||
resultType = 'nzb'
|
||||
|
||||
|
@ -169,7 +169,7 @@ class NZBDataSearchResult(SearchResult):
|
|||
|
||||
class TorrentSearchResult(SearchResult):
|
||||
"""
|
||||
Torrent result with an URL to the torrent
|
||||
Torrent result with a URL to the torrent
|
||||
"""
|
||||
resultType = 'torrent'
|
||||
|
||||
|
@ -456,7 +456,7 @@ class SimpleNamespace(object):
|
|||
|
||||
|
||||
# list that supports weak reference
|
||||
class weakList(list):
|
||||
class WeakList(list):
|
||||
__slots__ = ('__weakref__',)
|
||||
|
||||
|
||||
|
|
|
@ -164,8 +164,8 @@ class DownloadStationAPI(GenericClient):
|
|||
# type: (Union[AnyStr, list]) -> Union[bool, list]
|
||||
"""
|
||||
Pause item(s)
|
||||
:param ids: Id(s) to pause
|
||||
:return: True/Falsy if success/failure else Id(s) that failed to be paused
|
||||
:param ids: ID(s) to pause
|
||||
:return: True/Falsy if success/failure else ID(s) that failed to be paused
|
||||
"""
|
||||
return self._action(
|
||||
'pause', ids,
|
||||
|
@ -177,8 +177,8 @@ class DownloadStationAPI(GenericClient):
|
|||
# type: (Union[AnyStr, list]) -> Union[bool, list]
|
||||
"""
|
||||
Resume task(s) in client
|
||||
:param ids: Id(s) to act on
|
||||
:return: True if success, Id(s) that could not be resumed, else Falsy if failure
|
||||
:param ids: ID(s) to act on
|
||||
:return: True if success, ID(s) that could not be resumed, else Falsy if failure
|
||||
"""
|
||||
return self._perform_task(
|
||||
'resume', ids,
|
||||
|
@ -190,8 +190,8 @@ class DownloadStationAPI(GenericClient):
|
|||
# type: (Union[AnyStr, list]) -> Union[bool, list]
|
||||
"""
|
||||
Delete task(s) from client
|
||||
:param ids: Id(s) to act on
|
||||
:return: True if success, Id(s) that could not be deleted, else Falsy if failure
|
||||
:param ids: ID(s) to act on
|
||||
:return: True if success, ID(s) that could not be deleted, else Falsy if failure
|
||||
"""
|
||||
return self._perform_task(
|
||||
'delete', ids,
|
||||
|
@ -205,10 +205,10 @@ class DownloadStationAPI(GenericClient):
|
|||
"""
|
||||
Set up and send a method to client
|
||||
:param method: Either `resume` or `delete`
|
||||
:param ids: Id(s) to perform method on
|
||||
:param ids: ID(s) to perform method on
|
||||
:param filter_func: Call back function to filter tasks as failed or erroneous
|
||||
:param pause_first: True if task should be paused prior to invoking method
|
||||
:return: True if success, Id(s) that could not be acted upon, else Falsy if failure
|
||||
:return: True if success, ID(s) that could not be acted upon, else Falsy if failure
|
||||
"""
|
||||
if isinstance(ids, (string_types, list)):
|
||||
rids = ids if isinstance(ids, list) else list(map(lambda x: x.strip(), ids.split(',')))
|
||||
|
@ -256,7 +256,7 @@ class DownloadStationAPI(GenericClient):
|
|||
"""
|
||||
Add magnet to client (overridden class function)
|
||||
:param search_result: A populated search result object
|
||||
:return: Id of task in client, True if added but no ID, else Falsy if nothing added
|
||||
:return: ID of task in client, True if added but no ID, else Falsy if nothing added
|
||||
"""
|
||||
if 3 <= self._task_version:
|
||||
return self._add_torrent(uri={'uri': search_result.url})
|
||||
|
@ -269,7 +269,7 @@ class DownloadStationAPI(GenericClient):
|
|||
"""
|
||||
Add file to client (overridden class function)
|
||||
:param search_result: A populated search result object
|
||||
:return: Id of task in client, True if added but no ID, else Falsy if nothing added
|
||||
:return: ID of task in client, True if added but no ID, else Falsy if nothing added
|
||||
"""
|
||||
return self._add_torrent(
|
||||
files={'file': ('%s.torrent' % re.sub(r'(\.torrent)+$', '', search_result.name), search_result.content)})
|
||||
|
@ -280,7 +280,7 @@ class DownloadStationAPI(GenericClient):
|
|||
Create client task
|
||||
:param uri: URI param for client API
|
||||
:param files: file param for client API
|
||||
:return: Id of task in client, True if created but no id found, else Falsy if nothing created
|
||||
:return: ID of task in client, True if created but no id found, else Falsy if nothing created
|
||||
"""
|
||||
if self._testmode:
|
||||
# noinspection PyUnresolvedReferences
|
||||
|
|
|
@ -129,7 +129,7 @@ class GenericClient(object):
|
|||
def _add_torrent_file(self, result):
|
||||
"""
|
||||
This should be overridden to return the True/False from the client
|
||||
when a torrent is added via result.content (only .torrent file)
|
||||
when a torrent is added via `result.content` (only .torrent file)
|
||||
"""
|
||||
return False
|
||||
|
||||
|
@ -179,9 +179,9 @@ class GenericClient(object):
|
|||
"""
|
||||
This should be overridden to resume task(s) in client
|
||||
|
||||
:param ids: Id(s) to act on
|
||||
:param ids: ID(s) to act on
|
||||
:type ids: list or string
|
||||
:return: True if success, Id(s) that could not be resumed, else Falsy if failure
|
||||
:return: True if success, ID(s) that could not be resumed, else Falsy if failure
|
||||
:rtype: bool or list
|
||||
"""
|
||||
return False
|
||||
|
@ -189,9 +189,9 @@ class GenericClient(object):
|
|||
def _delete_torrent(self, ids):
|
||||
"""
|
||||
This should be overridden to delete task(s) from client
|
||||
:param ids: Id(s) to act on
|
||||
:param ids: ID(s) to act on
|
||||
:type ids: list or string
|
||||
:return: True if success, Id(s) that could not be deleted, else Falsy if failure
|
||||
:return: True if success, ID(s) that could not be deleted, else Falsy if failure
|
||||
:rtype: bool or list
|
||||
"""
|
||||
return False
|
||||
|
@ -200,7 +200,7 @@ class GenericClient(object):
|
|||
def _get_torrent_hash(result):
|
||||
|
||||
if result.url.startswith('magnet'):
|
||||
result.hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0]
|
||||
result.hash = re.findall(r'urn:btih:(\w{32,40})', result.url)[0]
|
||||
if 32 == len(result.hash):
|
||||
result.hash = make_btih(result.hash).lower()
|
||||
else:
|
||||
|
|
|
@ -147,7 +147,7 @@ class QbittorrentAPI(GenericClient):
|
|||
"""
|
||||
Set maximal priority in queue to torrent task
|
||||
:param ids: ID(s) to promote
|
||||
:return: True/Falsy if success/failure else Id(s) that failed to be changed
|
||||
:return: True/Falsy if success/failure else ID(s) that failed to be changed
|
||||
"""
|
||||
def _maxpri_filter(t):
|
||||
mark_fail = True
|
||||
|
@ -179,7 +179,7 @@ class QbittorrentAPI(GenericClient):
|
|||
"""
|
||||
Set label/category to torrent task
|
||||
:param ids: ID(s) to change
|
||||
:return: True/Falsy if success/failure else Id(s) that failed to be changed
|
||||
:return: True/Falsy if success/failure else ID(s) that failed to be changed
|
||||
"""
|
||||
def _label_filter(t):
|
||||
mark_fail = True
|
||||
|
@ -205,8 +205,8 @@ class QbittorrentAPI(GenericClient):
|
|||
# type: (Union[AnyStr, list]) -> Union[bool, list]
|
||||
"""
|
||||
Pause item(s)
|
||||
:param ids: Id(s) to pause
|
||||
:return: True/Falsy if success/failure else Id(s) that failed to be paused
|
||||
:param ids: ID(s) to pause
|
||||
:return: True/Falsy if success/failure else ID(s) that failed to be paused
|
||||
"""
|
||||
def _pause_filter(t):
|
||||
mark_fail = True
|
||||
|
@ -252,8 +252,8 @@ class QbittorrentAPI(GenericClient):
|
|||
# type: (Union[AnyStr, list]) -> Union[bool, list]
|
||||
"""
|
||||
Resume task(s) in client
|
||||
:param ids: Id(s) to act on
|
||||
:return: True if success, Id(s) that could not be resumed, else Falsy if failure
|
||||
:param ids: ID(s) to act on
|
||||
:return: True if success, ID(s) that could not be resumed, else Falsy if failure
|
||||
"""
|
||||
return self._perform_task(
|
||||
'resume', ids,
|
||||
|
@ -267,8 +267,8 @@ class QbittorrentAPI(GenericClient):
|
|||
# type: (Union[AnyStr, list]) -> Union[bool, list]
|
||||
"""
|
||||
Delete task(s) from client
|
||||
:param ids: Id(s) to act on
|
||||
:return: True if success, Id(s) that could not be deleted, else Falsy if failure
|
||||
:param ids: ID(s) to act on
|
||||
:return: True if success, ID(s) that could not be deleted, else Falsy if failure
|
||||
"""
|
||||
return self._perform_task(
|
||||
'delete', ids,
|
||||
|
@ -283,10 +283,10 @@ class QbittorrentAPI(GenericClient):
|
|||
"""
|
||||
Set up and send a method to client
|
||||
:param method: Either `resume` or `delete`
|
||||
:param ids: Id(s) to perform method on
|
||||
:param ids: ID(s) to perform method on
|
||||
:param filter_func: Call back function passed to _action that will filter tasks as failed or erroneous
|
||||
:param pause_first: True if task should be paused prior to invoking method
|
||||
:return: True if success, Id(s) that could not be acted upon, else Falsy if failure
|
||||
:return: True if success, ID(s) that could not be acted upon, else Falsy if failure
|
||||
"""
|
||||
if isinstance(ids, (string_types, list)):
|
||||
rids = ids if isinstance(ids, list) else list(map(lambda x: x.strip(), ids.split(',')))
|
||||
|
@ -395,7 +395,7 @@ class QbittorrentAPI(GenericClient):
|
|||
"""
|
||||
Send a request to client
|
||||
:param cmd: Api task to invoke
|
||||
:param kwargs: keyword arguments to pass thru to helpers getURL function
|
||||
:param kwargs: keyword arguments to pass through to helpers getURL function
|
||||
:return: JSON decoded response dict, True if success and no response body, Text error or None if failure,
|
||||
"""
|
||||
authless = bool(re.search('(?i)login|version', cmd))
|
||||
|
|
|
@ -90,7 +90,7 @@ class RtorrentAPI(GenericClient):
|
|||
# try:
|
||||
# if ratio > 0:
|
||||
#
|
||||
# # Explicitly set all group options to ensure it is setup correctly
|
||||
# # Explicitly set all group options to ensure it is set up correctly
|
||||
# group.set_upload('1M')
|
||||
# group.set_min(ratio)
|
||||
# group.set_max(ratio)
|
||||
|
|
|
@ -84,7 +84,7 @@ class TransmissionAPI(GenericClient):
|
|||
|
||||
def _add_torrent(self, t_object):
|
||||
|
||||
# populate blankable and download_dir
|
||||
# populate blanked and download_dir
|
||||
if not self._get_auth():
|
||||
logger.log('%s: Authentication failed' % self.name, logger.ERROR)
|
||||
return False
|
||||
|
|
|
@ -24,17 +24,17 @@ from _23 import urlencode
|
|||
from six import iteritems
|
||||
|
||||
|
||||
class uTorrentAPI(GenericClient):
|
||||
class UtorrentAPI(GenericClient):
|
||||
def __init__(self, host=None, username=None, password=None):
|
||||
|
||||
super(uTorrentAPI, self).__init__('uTorrent', host, username, password)
|
||||
super(UtorrentAPI, self).__init__('uTorrent', host, username, password)
|
||||
|
||||
self.url = self.host + 'gui/'
|
||||
|
||||
def _request(self, method='get', params=None, files=None, **kwargs):
|
||||
params = {} if None is params else params
|
||||
|
||||
return super(uTorrentAPI, self)._request(
|
||||
return super(UtorrentAPI, self)._request(
|
||||
method=method,
|
||||
params='token={0:s}&{1:s}'.format(self.auth, '&'.join(
|
||||
['%s' % urlencode(dict([[key, str(value)]]))
|
||||
|
@ -128,4 +128,4 @@ class uTorrentAPI(GenericClient):
|
|||
return self._request(params=params)
|
||||
|
||||
|
||||
api = uTorrentAPI()
|
||||
api = UtorrentAPI()
|
||||
|
|
|
@ -179,7 +179,7 @@ class Quality(object):
|
|||
return Quality.qualityStrings[quality].replace('SD DVD', 'SD DVD/BR/BD')
|
||||
|
||||
@staticmethod
|
||||
def _getStatusStrings(status):
|
||||
def _get_status_strings(status):
|
||||
"""
|
||||
|
||||
:param status: status
|
||||
|
@ -187,14 +187,14 @@ class Quality(object):
|
|||
:return:
|
||||
:rtype: AnyStr
|
||||
"""
|
||||
toReturn = {}
|
||||
to_return = {}
|
||||
for _x in Quality.qualityStrings:
|
||||
toReturn[Quality.compositeStatus(status, _x)] = '%s (%s)' % (
|
||||
to_return[Quality.composite_status(status, _x)] = '%s (%s)' % (
|
||||
Quality.statusPrefixes[status], Quality.qualityStrings[_x])
|
||||
return toReturn
|
||||
return to_return
|
||||
|
||||
@staticmethod
|
||||
def combineQualities(any_qualities, best_qualities):
|
||||
def combine_qualities(any_qualities, best_qualities):
|
||||
# type: (List[int], List[int]) -> int
|
||||
"""
|
||||
|
||||
|
@ -210,7 +210,7 @@ class Quality(object):
|
|||
return any_quality | (best_quality << 16)
|
||||
|
||||
@staticmethod
|
||||
def splitQuality(quality):
|
||||
def split_quality(quality):
|
||||
# type: (int) -> Tuple[List[int], List[int]]
|
||||
"""
|
||||
|
||||
|
@ -227,10 +227,10 @@ class Quality(object):
|
|||
return sorted(any_qualities), sorted(best_qualities)
|
||||
|
||||
@staticmethod
|
||||
def nameQuality(name, anime=False):
|
||||
def name_quality(name, anime=False):
|
||||
"""
|
||||
Return The quality from an episode File renamed by SickGear
|
||||
If no quality is achieved it will try sceneQuality regex
|
||||
If no quality is achieved it will try scene_quality regex
|
||||
:param name: name
|
||||
:type name: AnyStr
|
||||
:param anime: is anmie
|
||||
|
@ -247,7 +247,7 @@ class Quality(object):
|
|||
continue
|
||||
|
||||
if Quality.NONE == _x: # Last chance
|
||||
return Quality.sceneQuality(name, anime)
|
||||
return Quality.scene_quality(name, anime)
|
||||
|
||||
regex = r'\W' + Quality.qualityStrings[_x].replace(' ', r'\W') + r'\W'
|
||||
regex_match = re.search(regex, name, re.I)
|
||||
|
@ -255,7 +255,7 @@ class Quality(object):
|
|||
return _x
|
||||
|
||||
@staticmethod
|
||||
def sceneQuality(name, anime=False):
|
||||
def scene_quality(name, anime=False):
|
||||
"""
|
||||
Return The quality from the scene episode File
|
||||
:param name: name
|
||||
|
@ -346,7 +346,7 @@ class Quality(object):
|
|||
return Quality.UNKNOWN
|
||||
|
||||
@staticmethod
|
||||
def fileQuality(filename):
|
||||
def file_quality(filename):
|
||||
"""
|
||||
|
||||
:param filename: filename
|
||||
|
@ -405,7 +405,7 @@ class Quality(object):
|
|||
return Quality.UNKNOWN
|
||||
|
||||
@staticmethod
|
||||
def assumeQuality(name):
|
||||
def assume_quality(name):
|
||||
"""
|
||||
|
||||
:param name: name
|
||||
|
@ -420,7 +420,7 @@ class Quality(object):
|
|||
return Quality.UNKNOWN
|
||||
|
||||
@staticmethod
|
||||
def compositeStatus(status, quality):
|
||||
def composite_status(status, quality):
|
||||
"""
|
||||
|
||||
:param status: status
|
||||
|
@ -433,7 +433,7 @@ class Quality(object):
|
|||
return status + 100 * quality
|
||||
|
||||
@staticmethod
|
||||
def qualityDownloaded(status):
|
||||
def quality_downloaded(status):
|
||||
# type: (int) -> int
|
||||
"""
|
||||
|
||||
|
@ -445,7 +445,7 @@ class Quality(object):
|
|||
return (status - DOWNLOADED) // 100
|
||||
|
||||
@staticmethod
|
||||
def splitCompositeStatus(status):
|
||||
def split_composite_status(status):
|
||||
# type: (int) -> Tuple[int, int]
|
||||
"""Returns a tuple containing (status, quality)
|
||||
:param status: status
|
||||
|
@ -460,7 +460,7 @@ class Quality(object):
|
|||
return status, Quality.NONE
|
||||
|
||||
@staticmethod
|
||||
def statusFromName(name, assume=True, anime=False):
|
||||
def status_from_name(name, assume=True, anime=False):
|
||||
"""
|
||||
|
||||
:param name: name
|
||||
|
@ -472,13 +472,13 @@ class Quality(object):
|
|||
:return:
|
||||
:rtype: int or long
|
||||
"""
|
||||
quality = Quality.nameQuality(name, anime)
|
||||
quality = Quality.name_quality(name, anime)
|
||||
if assume and Quality.UNKNOWN == quality:
|
||||
quality = Quality.assumeQuality(name)
|
||||
return Quality.compositeStatus(DOWNLOADED, quality)
|
||||
quality = Quality.assume_quality(name)
|
||||
return Quality.composite_status(DOWNLOADED, quality)
|
||||
|
||||
@staticmethod
|
||||
def statusFromNameOrFile(file_path, assume=True, anime=False):
|
||||
def status_from_name_or_file(file_path, assume=True, anime=False):
|
||||
"""
|
||||
|
||||
:param file_path: file path
|
||||
|
@ -490,12 +490,12 @@ class Quality(object):
|
|||
:return:
|
||||
:rtype: int or long
|
||||
"""
|
||||
quality = Quality.nameQuality(file_path, anime)
|
||||
quality = Quality.name_quality(file_path, anime)
|
||||
if Quality.UNKNOWN == quality:
|
||||
quality = Quality.fileQuality(file_path)
|
||||
quality = Quality.file_quality(file_path)
|
||||
if assume and Quality.UNKNOWN == quality:
|
||||
quality = Quality.assumeQuality(file_path)
|
||||
return Quality.compositeStatus(DOWNLOADED, quality)
|
||||
quality = Quality.assume_quality(file_path)
|
||||
return Quality.composite_status(DOWNLOADED, quality)
|
||||
|
||||
SNATCHED = None
|
||||
SNATCHED_PROPER = None
|
||||
|
@ -515,7 +515,7 @@ class WantedQualities(dict):
|
|||
super(WantedQualities, self).__init__(**kwargs)
|
||||
|
||||
def _generate_wantedlist(self, qualities):
|
||||
initial_qualities, upgrade_qualities = Quality.splitQuality(qualities)
|
||||
initial_qualities, upgrade_qualities = Quality.split_quality(qualities)
|
||||
max_initial_quality = max(initial_qualities or [Quality.NONE])
|
||||
min_upgrade_quality = min(upgrade_qualities or [1 << 16])
|
||||
self[qualities] = {0: {self.bothlists: False, self.wantedlist: initial_qualities, self.upgradelist: False}}
|
||||
|
@ -562,23 +562,23 @@ for (attr_name, qual_val) in [
|
|||
('SNATCHED', SNATCHED), ('SNATCHED_PROPER', SNATCHED_PROPER), ('SNATCHED_BEST', SNATCHED_BEST),
|
||||
('DOWNLOADED', DOWNLOADED), ('ARCHIVED', ARCHIVED), ('FAILED', FAILED),
|
||||
]:
|
||||
setattr(Quality, attr_name, list(map(lambda qk: Quality.compositeStatus(qual_val, qk),
|
||||
setattr(Quality, attr_name, list(map(lambda qk: Quality.composite_status(qual_val, qk),
|
||||
iterkeys(Quality.qualityStrings))))
|
||||
Quality.SNATCHED_ANY = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST
|
||||
|
||||
SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], [])
|
||||
HD = Quality.combineQualities(
|
||||
SD = Quality.combine_qualities([Quality.SDTV, Quality.SDDVD], [])
|
||||
HD = Quality.combine_qualities(
|
||||
[Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY],
|
||||
[]) # HD720p + HD1080p
|
||||
HD720p = Quality.combineQualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], [])
|
||||
HD1080p = Quality.combineQualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], [])
|
||||
UHD2160p = Quality.combineQualities([Quality.UHD4KWEB], [])
|
||||
ANY = Quality.combineQualities(
|
||||
HD720p = Quality.combine_qualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], [])
|
||||
HD1080p = Quality.combine_qualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], [])
|
||||
UHD2160p = Quality.combine_qualities([Quality.UHD4KWEB], [])
|
||||
ANY = Quality.combine_qualities(
|
||||
[Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
|
||||
Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD
|
||||
|
||||
# legacy template, can't remove due to reference in mainDB upgrade?
|
||||
BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
|
||||
BEST = Quality.combine_qualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
|
||||
|
||||
qualityPresets = (SD, HD, HD720p, HD1080p, UHD2160p, ANY)
|
||||
|
||||
|
@ -607,7 +607,7 @@ class StatusStrings(object):
|
|||
|
||||
def __getitem__(self, name):
|
||||
if name in Quality.SNATCHED_ANY + Quality.DOWNLOADED + Quality.ARCHIVED:
|
||||
status, quality = Quality.splitCompositeStatus(name)
|
||||
status, quality = Quality.split_composite_status(name)
|
||||
if quality == Quality.NONE:
|
||||
return self.statusStrings[status]
|
||||
return '%s (%s)' % (self.statusStrings[status], Quality.qualityStrings[quality])
|
||||
|
@ -703,7 +703,7 @@ class NeededQualities(object):
|
|||
"""
|
||||
from sickgear.tv import TVShow
|
||||
if isinstance(show_obj, TVShow):
|
||||
init, upgrade = Quality.splitQuality(show_obj.quality)
|
||||
init, upgrade = Quality.split_quality(show_obj.quality)
|
||||
all_qual = set(init + upgrade)
|
||||
need_sd = need_hd = need_uhd = need_webdl = False
|
||||
for wanted_qualities in all_qual:
|
||||
|
|
|
@ -152,7 +152,7 @@ def schedule_mediaprocess(iv):
|
|||
if sickgear.MEDIAPROCESS_INTERVAL < sickgear.MIN_MEDIAPROCESS_INTERVAL:
|
||||
sickgear.MEDIAPROCESS_INTERVAL = sickgear.MIN_MEDIAPROCESS_INTERVAL
|
||||
|
||||
sickgear.media_process_scheduler.cycleTime = datetime.timedelta(minutes=sickgear.MEDIAPROCESS_INTERVAL)
|
||||
sickgear.media_process_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.MEDIAPROCESS_INTERVAL)
|
||||
sickgear.media_process_scheduler.set_paused_state()
|
||||
|
||||
|
||||
|
@ -162,14 +162,14 @@ def schedule_recentsearch(iv):
|
|||
if sickgear.RECENTSEARCH_INTERVAL < sickgear.MIN_RECENTSEARCH_INTERVAL:
|
||||
sickgear.RECENTSEARCH_INTERVAL = sickgear.MIN_RECENTSEARCH_INTERVAL
|
||||
|
||||
sickgear.recent_search_scheduler.cycleTime = datetime.timedelta(minutes=sickgear.RECENTSEARCH_INTERVAL)
|
||||
sickgear.recent_search_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.RECENTSEARCH_INTERVAL)
|
||||
|
||||
|
||||
def schedule_backlog(iv):
|
||||
sickgear.BACKLOG_PERIOD = minimax(iv, sickgear.DEFAULT_BACKLOG_PERIOD,
|
||||
sickgear.MIN_BACKLOG_PERIOD, sickgear.MAX_BACKLOG_PERIOD)
|
||||
sickgear.MIN_BACKLOG_PERIOD, sickgear.MAX_BACKLOG_PERIOD)
|
||||
|
||||
sickgear.backlog_search_scheduler.action.cycleTime = sickgear.BACKLOG_PERIOD
|
||||
sickgear.backlog_search_scheduler.action.cycle_time = sickgear.BACKLOG_PERIOD
|
||||
|
||||
|
||||
def schedule_update_software(iv):
|
||||
|
@ -178,7 +178,7 @@ def schedule_update_software(iv):
|
|||
if sickgear.UPDATE_INTERVAL < sickgear.MIN_UPDATE_INTERVAL:
|
||||
sickgear.UPDATE_INTERVAL = sickgear.MIN_UPDATE_INTERVAL
|
||||
|
||||
sickgear.update_software_scheduler.cycleTime = datetime.timedelta(hours=sickgear.UPDATE_INTERVAL)
|
||||
sickgear.update_software_scheduler.cycle_time = datetime.timedelta(hours=sickgear.UPDATE_INTERVAL)
|
||||
|
||||
|
||||
def schedule_update_software_notify(update_notify):
|
||||
|
@ -195,10 +195,10 @@ def schedule_update_software_notify(update_notify):
|
|||
|
||||
def schedule_update_packages(iv):
|
||||
sickgear.UPDATE_PACKAGES_INTERVAL = minimax(iv, sickgear.DEFAULT_UPDATE_PACKAGES_INTERVAL,
|
||||
sickgear.MIN_UPDATE_PACKAGES_INTERVAL,
|
||||
sickgear.MAX_UPDATE_PACKAGES_INTERVAL)
|
||||
sickgear.MIN_UPDATE_PACKAGES_INTERVAL,
|
||||
sickgear.MAX_UPDATE_PACKAGES_INTERVAL)
|
||||
|
||||
sickgear.update_packages_scheduler.cycleTime = datetime.timedelta(hours=sickgear.UPDATE_PACKAGES_INTERVAL)
|
||||
sickgear.update_packages_scheduler.cycle_time = datetime.timedelta(hours=sickgear.UPDATE_PACKAGES_INTERVAL)
|
||||
|
||||
|
||||
def schedule_update_packages_notify(update_packages_notify):
|
||||
|
@ -228,15 +228,6 @@ def schedule_trakt(use_trakt):
|
|||
return
|
||||
|
||||
sickgear.USE_TRAKT = use_trakt
|
||||
# if sickgear.USE_TRAKT:
|
||||
# sickgear.trakt_checker_scheduler.start()
|
||||
# else:
|
||||
# sickgear.trakt_checker_scheduler.stop()
|
||||
# logger.log(u'Waiting for the TRAKTCHECKER thread to exit')
|
||||
# try:
|
||||
# sickgear.trakt_checker_scheduler.join(10)
|
||||
# except:
|
||||
# pass
|
||||
|
||||
|
||||
def schedule_subtitles(use_subtitles):
|
||||
|
@ -250,7 +241,7 @@ def schedule_emby_watched(emby_watched_interval):
|
|||
0, sickgear.MAX_WATCHEDSTATE_INTERVAL)
|
||||
if emby_watched_iv and emby_watched_iv != sickgear.EMBY_WATCHEDSTATE_INTERVAL:
|
||||
sickgear.EMBY_WATCHEDSTATE_INTERVAL = emby_watched_iv
|
||||
sickgear.emby_watched_state_scheduler.cycleTime = datetime.timedelta(minutes=emby_watched_iv)
|
||||
sickgear.emby_watched_state_scheduler.cycle_time = datetime.timedelta(minutes=emby_watched_iv)
|
||||
|
||||
sickgear.EMBY_WATCHEDSTATE_SCHEDULED = bool(emby_watched_iv)
|
||||
sickgear.emby_watched_state_scheduler.set_paused_state()
|
||||
|
@ -261,7 +252,7 @@ def schedule_plex_watched(plex_watched_interval):
|
|||
0, sickgear.MAX_WATCHEDSTATE_INTERVAL)
|
||||
if plex_watched_iv and plex_watched_iv != sickgear.PLEX_WATCHEDSTATE_INTERVAL:
|
||||
sickgear.PLEX_WATCHEDSTATE_INTERVAL = plex_watched_iv
|
||||
sickgear.plex_watched_state_scheduler.cycleTime = datetime.timedelta(minutes=plex_watched_iv)
|
||||
sickgear.plex_watched_state_scheduler.cycle_time = datetime.timedelta(minutes=plex_watched_iv)
|
||||
|
||||
sickgear.PLEX_WATCHEDSTATE_SCHEDULED = bool(plex_watched_iv)
|
||||
sickgear.plex_watched_state_scheduler.set_paused_state()
|
||||
|
@ -345,7 +336,7 @@ def clean_hosts(hosts, default_port=None, allow_base=False):
|
|||
|
||||
|
||||
def clean_url(url, add_slash=True):
|
||||
""" Returns an cleaned url starting with a scheme and folder with trailing '/' or an empty string """
|
||||
""" Returns a cleaned url starting with a scheme and folder with trailing '/' or an empty string """
|
||||
|
||||
if url and url.strip():
|
||||
|
||||
|
@ -437,7 +428,7 @@ def check_setting_float(config, cfg_name, item_name, def_val):
|
|||
|
||||
def check_setting_str(config, cfg_name, item_name, def_val, log=True):
|
||||
"""
|
||||
For passwords you must include the word `password` in the item_name and
|
||||
For passwords, you must include the word `password` in the item_name and
|
||||
add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
|
||||
"""
|
||||
|
||||
|
@ -662,7 +653,7 @@ class ConfigMigrator(object):
|
|||
Reads in the old naming settings from your config and generates a new config template from them.
|
||||
"""
|
||||
# get the old settings from the file and store them in the new variable names
|
||||
for prov in [curProvider for curProvider in sickgear.providers.sortedProviderList()
|
||||
for prov in [curProvider for curProvider in sickgear.providers.sorted_sources()
|
||||
if 'omgwtfnzbs' == curProvider.name]:
|
||||
prov.username = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_uid', '')
|
||||
prov.api_key = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_key', '')
|
||||
|
@ -773,13 +764,13 @@ class ConfigMigrator(object):
|
|||
# Migration v6: Rename daily search to recent search
|
||||
def _migrate_v6(self):
|
||||
sickgear.RECENTSEARCH_INTERVAL = check_setting_int(self.config_obj, 'General', 'dailysearch_frequency',
|
||||
sickgear.DEFAULT_RECENTSEARCH_INTERVAL)
|
||||
sickgear.DEFAULT_RECENTSEARCH_INTERVAL)
|
||||
|
||||
sickgear.RECENTSEARCH_STARTUP = bool(check_setting_int(self.config_obj, 'General', 'dailysearch_startup', 1))
|
||||
if sickgear.RECENTSEARCH_INTERVAL < sickgear.MIN_RECENTSEARCH_INTERVAL:
|
||||
sickgear.RECENTSEARCH_INTERVAL = sickgear.MIN_RECENTSEARCH_INTERVAL
|
||||
|
||||
for curProvider in sickgear.providers.sortedProviderList():
|
||||
for curProvider in sickgear.providers.sorted_sources():
|
||||
if hasattr(curProvider, 'enable_recentsearch'):
|
||||
curProvider.enable_recentsearch = bool(check_setting_int(
|
||||
self.config_obj, curProvider.get_id().upper(), curProvider.get_id() + '_enable_dailysearch', 1))
|
||||
|
@ -831,7 +822,7 @@ class ConfigMigrator(object):
|
|||
# Migration v15: Transmithe.net variables
|
||||
def _migrate_v15(self):
|
||||
try:
|
||||
neb = list(filter(lambda p: 'Nebulance' in p.name, sickgear.providers.sortedProviderList()))[0]
|
||||
neb = list(filter(lambda p: 'Nebulance' in p.name, sickgear.providers.sorted_sources()))[0]
|
||||
except (BaseException, Exception):
|
||||
return
|
||||
# get the old settings from the file and store them in the new variable names
|
||||
|
|
|
@ -474,7 +474,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
|
|||
continue
|
||||
|
||||
# get the status/quality of the existing ep and make sure it's what we expect
|
||||
ep_status, ep_quality = common.Quality.splitCompositeStatus(int(sql_result[0]['status']))
|
||||
ep_status, ep_quality = common.Quality.split_composite_status(int(sql_result[0]['status']))
|
||||
if ep_status != common.DOWNLOADED:
|
||||
continue
|
||||
|
||||
|
@ -581,8 +581,8 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
|
|||
"""
|
||||
|
||||
def _update_status(self, old_status):
|
||||
(status, quality) = common.Quality.splitCompositeStatus(old_status)
|
||||
return common.Quality.compositeStatus(status, self._update_quality(quality))
|
||||
(status, quality) = common.Quality.split_composite_status(old_status)
|
||||
return common.Quality.composite_status(status, self._update_quality(quality))
|
||||
|
||||
@staticmethod
|
||||
def _update_quality(old_quality):
|
||||
|
@ -635,17 +635,17 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
|
|||
sickgear.save_config()
|
||||
|
||||
# upgrade previous HD to HD720p -- shift previous qualities to new placevalues
|
||||
old_hd = common.Quality.combineQualities(
|
||||
old_hd = common.Quality.combine_qualities(
|
||||
[common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], [])
|
||||
new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL,
|
||||
common.Quality.HDBLURAY], [])
|
||||
new_hd = common.Quality.combine_qualities([common.Quality.HDTV, common.Quality.HDWEBDL,
|
||||
common.Quality.HDBLURAY], [])
|
||||
|
||||
# update ANY -- shift existing qualities and add new 1080p qualities,
|
||||
# note that rawHD was not added to the ANY template
|
||||
old_any = common.Quality.combineQualities(
|
||||
old_any = common.Quality.combine_qualities(
|
||||
[common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2,
|
||||
common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], [])
|
||||
new_any = common.Quality.combineQualities(
|
||||
new_any = common.Quality.combine_qualities(
|
||||
[common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV,
|
||||
common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY,
|
||||
common.Quality.UNKNOWN], [])
|
||||
|
|
|
@ -33,7 +33,7 @@ class Events(threading.Thread):
|
|||
# get event type
|
||||
etype = self.queue.get(True, 1)
|
||||
|
||||
# perform callback if we got a event type
|
||||
# perform callback if we got an event type
|
||||
self.callback(etype)
|
||||
|
||||
# event completed
|
||||
|
|
|
@ -69,19 +69,19 @@ class FailedProcessor(LegacyFailedProcessor):
|
|||
"""
|
||||
self._log(u'Failed download detected: (%s, %s)' % (self.nzb_name, self.dir_name))
|
||||
|
||||
releaseName = show_name_helpers.determine_release_name(self.dir_name, self.nzb_name)
|
||||
if None is releaseName:
|
||||
release_name = show_name_helpers.determine_release_name(self.dir_name, self.nzb_name)
|
||||
if None is release_name:
|
||||
self._log(u'Warning: unable to find a valid release name.', logger.WARNING)
|
||||
raise exceptions_helper.FailedProcessingFailed()
|
||||
|
||||
try:
|
||||
parser = NameParser(False, show_obj=self.show_obj, convert=True)
|
||||
parsed = parser.parse(releaseName)
|
||||
parsed = parser.parse(release_name)
|
||||
except InvalidNameException:
|
||||
self._log(u'Error: release name is invalid: ' + releaseName, logger.DEBUG)
|
||||
self._log(u'Error: release name is invalid: ' + release_name, logger.DEBUG)
|
||||
raise exceptions_helper.FailedProcessingFailed()
|
||||
except InvalidShowException:
|
||||
self._log(u'Error: unable to parse release name %s into a valid show' % releaseName, logger.DEBUG)
|
||||
self._log(u'Error: unable to parse release name %s into a valid show' % release_name, logger.DEBUG)
|
||||
raise exceptions_helper.FailedProcessingFailed()
|
||||
|
||||
logger.log(u"name_parser info: ", logger.DEBUG)
|
||||
|
|
|
@ -160,8 +160,8 @@ def set_episode_failed(ep_obj):
|
|||
"""
|
||||
try:
|
||||
with ep_obj.lock:
|
||||
quality = Quality.splitCompositeStatus(ep_obj.status)[1]
|
||||
ep_obj.status = Quality.compositeStatus(FAILED, quality)
|
||||
quality = Quality.split_composite_status(ep_obj.status)[1]
|
||||
ep_obj.status = Quality.composite_status(FAILED, quality)
|
||||
ep_obj.save_to_db()
|
||||
|
||||
except EpisodeNotFoundException as e:
|
||||
|
@ -231,7 +231,7 @@ def revert_episode(ep_obj):
|
|||
if ep_obj.episode in history_eps:
|
||||
status_revert = history_eps[ep_obj.episode]['old_status']
|
||||
|
||||
status, quality = Quality.splitCompositeStatus(status_revert)
|
||||
status, quality = Quality.split_composite_status(status_revert)
|
||||
logger.log('Found in failed.db history with status: %s quality: %s' % (
|
||||
statusStrings[status], Quality.qualityStrings[quality]))
|
||||
else:
|
||||
|
|
|
@ -175,7 +175,7 @@ class GenericQueue(object):
|
|||
"""
|
||||
clear queue excluding internal defined types
|
||||
|
||||
:param action_types: only clear all of given action type
|
||||
:param action_types: only clear supplied action types
|
||||
"""
|
||||
if not isinstance(action_types, list):
|
||||
action_types = [action_types]
|
||||
|
|
|
@ -23,7 +23,7 @@ if False:
|
|||
|
||||
class GitHub(object):
|
||||
"""
|
||||
Simple api wrapper for the Github API v3. Currently only supports the small thing that SB
|
||||
Simple api wrapper for the GitHub API v3. Currently only supports the small thing that SB
|
||||
needs it for - list of commits.
|
||||
"""
|
||||
|
||||
|
@ -34,7 +34,7 @@ class GitHub(object):
|
|||
self.branch = branch
|
||||
|
||||
@staticmethod
|
||||
def _access_API(path, params=None):
|
||||
def _access_api(path, params=None):
|
||||
"""
|
||||
Access the API at the path given and with the optional params given.
|
||||
|
||||
|
@ -49,55 +49,57 @@ class GitHub(object):
|
|||
if params and type(params) is dict:
|
||||
url += '?' + '&'.join([str(x) + '=' + str(params[x]) for x in params])
|
||||
|
||||
parsedJSON = helpers.get_url(url, parse_json=True)
|
||||
if not parsedJSON:
|
||||
parsed_json = helpers.get_url(url, parse_json=True)
|
||||
if not parsed_json:
|
||||
return []
|
||||
|
||||
return parsedJSON
|
||||
return parsed_json
|
||||
|
||||
def commits(self):
|
||||
"""
|
||||
Get a list of the 100 most recent commits from the specified user/repo/branch, starting from HEAD.
|
||||
|
||||
user: The github username of the person whose repo you're querying
|
||||
user: The GitHub username of the person whose repo you're querying
|
||||
repo: The repo name to query
|
||||
branch: Optional, the branch name to show commits from
|
||||
|
||||
Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/
|
||||
Returns a deserialized json object containing the commit info.
|
||||
See https://developer.github.com/v3/repos/commits/
|
||||
"""
|
||||
access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'],
|
||||
access_api = self._access_api(['repos', self.github_repo_user, self.github_repo, 'commits'],
|
||||
params={'per_page': 100, 'sha': self.branch})
|
||||
return access_API
|
||||
return access_api
|
||||
|
||||
def compare(self, base, head, per_page=1):
|
||||
"""
|
||||
Uses the API to get a list of compares between base and head.
|
||||
|
||||
user: The github username of the person whose repo you're querying
|
||||
user: The GitHub username of the person whose repo you're querying
|
||||
repo: The repo name to query
|
||||
base: Start compare from branch
|
||||
head: Current commit sha or branch name to compare
|
||||
per_page: number of items per page
|
||||
|
||||
Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits
|
||||
Returns a deserialized json object containing the compare info.
|
||||
See https://developer.github.com/v3/repos/commits
|
||||
"""
|
||||
access_API = self._access_API(
|
||||
access_api = self._access_api(
|
||||
['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head],
|
||||
params={'per_page': per_page})
|
||||
return access_API
|
||||
return access_api
|
||||
|
||||
def branches(self):
|
||||
access_API = self._access_API(
|
||||
access_api = self._access_api(
|
||||
['repos', self.github_repo_user, self.github_repo, 'branches'],
|
||||
params={'per_page': 100})
|
||||
return access_API
|
||||
return access_api
|
||||
|
||||
def pull_requests(self):
|
||||
access_API = self._access_API(
|
||||
access_api = self._access_api(
|
||||
['repos', self.github_repo_user, self.github_repo, 'pulls'],
|
||||
params={'per_page': 100}) # type: Optional[Dict]
|
||||
pulls = []
|
||||
for x in access_API:
|
||||
for x in access_api:
|
||||
try:
|
||||
pull = PullRequest(x['head']['ref'], x['number'])
|
||||
pulls.append((repr(pull), pull.fetch_name()))
|
||||
|
|
|
@ -63,7 +63,7 @@ if False:
|
|||
from typing import Any, AnyStr, Dict, Generator, NoReturn, Iterable, Iterator, List, Optional, Set, Tuple, Union
|
||||
from .tv import TVShow
|
||||
# the following workaround hack resolves a pyc resolution bug
|
||||
from .name_cache import retrieveNameFromCache
|
||||
from .name_cache import retrieve_name_from_cache
|
||||
from six import integer_types
|
||||
|
||||
RE_XML_ENCODING = re.compile(r'^(<\?xml[^>]+)\s+(encoding\s*=\s*[\"\'][^\"\']*[\"\'])(\s*\?>|)', re.U)
|
||||
|
@ -954,7 +954,7 @@ def get_show(name, try_scene_exceptions=False):
|
|||
show_obj = None
|
||||
|
||||
try:
|
||||
tvid, prodid = sickgear.name_cache.retrieveNameFromCache(name)
|
||||
tvid, prodid = sickgear.name_cache.retrieve_name_from_cache(name)
|
||||
if tvid and prodid:
|
||||
show_obj = find_show_by_id({tvid: prodid})
|
||||
|
||||
|
@ -1284,7 +1284,7 @@ def check_port(host, port, timeout=1.0):
|
|||
|
||||
|
||||
def clear_unused_providers():
|
||||
providers = [x.cache.providerID for x in sickgear.providers.sortedProviderList() if x.is_active()]
|
||||
providers = [x.cache.providerID for x in sickgear.providers.sorted_sources() if x.is_active()]
|
||||
|
||||
if providers:
|
||||
my_db = db.DBConnection('cache.db')
|
||||
|
@ -1391,7 +1391,7 @@ def should_delete_episode(status):
|
|||
:return: should be deleted
|
||||
:rtype: bool
|
||||
"""
|
||||
s = Quality.splitCompositeStatus(status)[0]
|
||||
s = Quality.split_composite_status(status)[0]
|
||||
if s not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED, IGNORED]:
|
||||
return True
|
||||
logger.log('not safe to delete episode from db because of status: %s' % statusStrings[s], logger.DEBUG)
|
||||
|
@ -1515,7 +1515,7 @@ def get_overview(ep_status, show_quality, upgrade_once, split_snatch=False):
|
|||
:type split_snatch: bool
|
||||
:return: constant from classes Overview
|
||||
"""
|
||||
status, quality = Quality.splitCompositeStatus(ep_status)
|
||||
status, quality = Quality.split_composite_status(ep_status)
|
||||
if ARCHIVED == status:
|
||||
return Overview.GOOD
|
||||
if WANTED == status:
|
||||
|
@ -1531,7 +1531,7 @@ def get_overview(ep_status, show_quality, upgrade_once, split_snatch=False):
|
|||
if not split_snatch and status in SNATCHED_ANY:
|
||||
return Overview.SNATCHED
|
||||
|
||||
void, best_qualities = Quality.splitQuality(show_quality)
|
||||
void, best_qualities = Quality.split_quality(show_quality)
|
||||
# if re-downloads aren't wanted then mark it "good" if there is anything
|
||||
if not len(best_qualities):
|
||||
return Overview.GOOD
|
||||
|
|
|
@ -72,7 +72,7 @@ def log_snatch(search_result):
|
|||
else:
|
||||
provider = 'unknown'
|
||||
|
||||
action = Quality.compositeStatus((SNATCHED, SNATCHED_PROPER)[is_proper], search_result.quality)
|
||||
action = Quality.composite_status((SNATCHED, SNATCHED_PROPER)[is_proper], search_result.quality)
|
||||
|
||||
resource = search_result.name
|
||||
|
||||
|
@ -120,8 +120,8 @@ def log_subtitle(tvid, prodid, season, episode, status, subtitle_result):
|
|||
"""
|
||||
resource = subtitle_result.path
|
||||
provider = subtitle_result.service
|
||||
status, quality = Quality.splitCompositeStatus(status)
|
||||
action = Quality.compositeStatus(SUBTITLED, quality)
|
||||
status, quality = Quality.split_composite_status(status)
|
||||
action = Quality.composite_status(SUBTITLED, quality)
|
||||
|
||||
_log_history_item(action, tvid, prodid, season, episode, quality, resource, provider)
|
||||
|
||||
|
@ -135,8 +135,8 @@ def log_failed(ep_obj, release, provider=None):
|
|||
:param release: release
|
||||
:param provider: provider name
|
||||
"""
|
||||
status, quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
action = Quality.compositeStatus(FAILED, quality)
|
||||
status, quality = Quality.split_composite_status(ep_obj.status)
|
||||
action = Quality.composite_status(FAILED, quality)
|
||||
|
||||
_log_history_item(action, ep_obj.show_obj.tvid, ep_obj.show_obj.prodid,
|
||||
ep_obj.season, ep_obj.episode, quality, release, provider)
|
||||
|
@ -210,7 +210,7 @@ def history_snatched_proper_fix():
|
|||
continue
|
||||
if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime):
|
||||
cl.append(['UPDATE history SET action = ? WHERE rowid = ?',
|
||||
[Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])),
|
||||
[Quality.composite_status(SNATCHED_PROPER, int(r['quality'])),
|
||||
r['rowid']]])
|
||||
if cl:
|
||||
my_db.mass_action(cl)
|
||||
|
|
|
@ -271,7 +271,7 @@ class ImageCache(object):
|
|||
"""
|
||||
:param image_file: image file
|
||||
:type image_file: AnyStr
|
||||
:return: true if a image_file exists
|
||||
:return: true if an image_file exists
|
||||
:rtype: bool
|
||||
"""
|
||||
result = []
|
||||
|
@ -652,7 +652,7 @@ class ImageCache(object):
|
|||
if thumb_img_data:
|
||||
thumb_result = metadata_generator.write_image(thumb_img_data, dest_thumb_path, force=True)
|
||||
if not thumb_result:
|
||||
thumb_result = metadata_generator.write_image(img_data, dest_thumb_path, force=True)
|
||||
metadata_generator.write_image(img_data, dest_thumb_path, force=True)
|
||||
break
|
||||
|
||||
if result:
|
||||
|
|
|
@ -132,7 +132,7 @@ def confirm_show(premiere_date, shows_premiere, expected_name, show_name):
|
|||
# type: (Optional[datetime.date], Optional[Union[AnyStr, datetime.date]], AnyStr, AnyStr) -> bool
|
||||
"""
|
||||
confirm show possible confirmations:
|
||||
1. premiere dates are less then 2 days apart
|
||||
1. premiere dates are less than 2 days apart
|
||||
2. show name is the same and premiere year is 1 year or less apart
|
||||
|
||||
:param premiere_date: expected show premiere date
|
||||
|
@ -252,7 +252,7 @@ def map_indexers_to_show(show_obj, update=False, force=False, recheck=False, im_
|
|||
all_ids_srcs = [src_tv_id] + [s for s in (TVINFO_TRAKT, TVINFO_TMDB, TVINFO_TVMAZE, TVINFO_TVDB, TVINFO_IMDB)
|
||||
if s != src_tv_id]
|
||||
searched, confirmed = {}, False
|
||||
for r in moves.range(len(all_ids_srcs)):
|
||||
for _ in moves.range(len(all_ids_srcs)):
|
||||
search_done = False
|
||||
for i in all_ids_srcs:
|
||||
if new_ids.verified.get(i):
|
||||
|
|
|
@ -263,8 +263,8 @@ class SBRotatingLogHandler(object):
|
|||
buf = fh.read(min(remaining_size, buf_size))
|
||||
remaining_size -= buf_size
|
||||
lines = buf.split('\n')
|
||||
# the first line of the buffer is probably not a complete line so
|
||||
# we'll save it and append it to the last line of the next buffer
|
||||
# the first line of the buffer is probably not a complete line,
|
||||
# so save it and append it to the last line of the next buffer
|
||||
# we read
|
||||
if None is not segment:
|
||||
# if the previous chunk starts right from the beginning of line
|
||||
|
|
|
@ -25,7 +25,7 @@ def available_generators():
|
|||
return list(filter(lambda x: x not in ('generic', 'helpers'), __all__))
|
||||
|
||||
|
||||
def _getMetadataModule(name):
|
||||
def _get_metadata_module(name):
|
||||
name = name.lower()
|
||||
prefix = "sickgear.metadata."
|
||||
if name in __all__ and prefix + name in sys.modules:
|
||||
|
@ -33,8 +33,8 @@ def _getMetadataModule(name):
|
|||
return None
|
||||
|
||||
|
||||
def _getMetadataClass(name):
|
||||
module = _getMetadataModule(name)
|
||||
def _get_metadata_class(name):
|
||||
module = _get_metadata_module(name)
|
||||
|
||||
if not module:
|
||||
return None
|
||||
|
@ -45,10 +45,10 @@ def _getMetadataClass(name):
|
|||
def get_metadata_generator_dict():
|
||||
result = {}
|
||||
for cur_generator_id in available_generators():
|
||||
cur_generator = _getMetadataClass(cur_generator_id)
|
||||
cur_generator = _get_metadata_class(cur_generator_id)
|
||||
if not cur_generator:
|
||||
continue
|
||||
result[cur_generator.name] = cur_generator
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
|
|
@ -613,7 +613,7 @@ class GenericMetadata(object):
|
|||
logger.log(u"No thumb is available for this episode, not creating a thumb", logger.DEBUG)
|
||||
return False
|
||||
|
||||
thumb_data = metadata_helpers.getShowImage(thumb_url, show_name=ep_obj.show_obj.name)
|
||||
thumb_data = metadata_helpers.get_show_image(thumb_url, show_name=ep_obj.show_obj.name)
|
||||
|
||||
result = self._write_image(thumb_data, file_path)
|
||||
|
||||
|
@ -711,7 +711,7 @@ class GenericMetadata(object):
|
|||
if 0 == len(cur_season_art):
|
||||
continue
|
||||
|
||||
# Just grab whatever's there for now
|
||||
# Just grab whatever is there for now
|
||||
art_id, season_url = cur_season_art.popitem()
|
||||
|
||||
season_poster_file_path = self.get_season_poster_path(show_obj, cur_season)
|
||||
|
@ -721,7 +721,7 @@ class GenericMetadata(object):
|
|||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name)
|
||||
season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name)
|
||||
|
||||
if not season_data:
|
||||
logger.log(u'No season poster data available, skipping this season', logger.DEBUG)
|
||||
|
@ -756,7 +756,7 @@ class GenericMetadata(object):
|
|||
if 0 == len(cur_season_art):
|
||||
continue
|
||||
|
||||
# Just grab whatever's there for now
|
||||
# Just grab whatever is there for now
|
||||
art_id, season_url = cur_season_art.popitem()
|
||||
|
||||
season_banner_file_path = self.get_season_banner_path(show_obj, cur_season)
|
||||
|
@ -766,7 +766,7 @@ class GenericMetadata(object):
|
|||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name)
|
||||
season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name)
|
||||
|
||||
if not season_data:
|
||||
logger.log(u'No season banner data available, skipping this season', logger.DEBUG)
|
||||
|
@ -854,7 +854,7 @@ class GenericMetadata(object):
|
|||
def _get_show_info(tv_id):
|
||||
try:
|
||||
show_lang = show_obj.lang
|
||||
# There's gotta be a better way of doing this but we don't wanna
|
||||
# There's gotta be a better way of doing this, but we don't want to
|
||||
# change the language value elsewhere
|
||||
tvinfo_config = sickgear.TVInfoAPI(tv_id).api_params.copy()
|
||||
tvinfo_config['fanart'] = True
|
||||
|
@ -1058,7 +1058,7 @@ class GenericMetadata(object):
|
|||
if image_type in ('poster', 'banner'):
|
||||
if isinstance(image_url, tuple):
|
||||
image_url = image_url[0]
|
||||
img_data = metadata_helpers.getShowImage(image_url, which, show_obj.name)
|
||||
img_data = metadata_helpers.get_show_image(image_url, which, show_obj.name)
|
||||
if img_cache_type and img_cache_type != image_cache.which_type(img_data, is_binary=True):
|
||||
img_data = None
|
||||
continue
|
||||
|
@ -1082,7 +1082,7 @@ class GenericMetadata(object):
|
|||
result = {}
|
||||
|
||||
try:
|
||||
# There's gotta be a better way of doing this but we don't wanna
|
||||
# There's gotta be a better way of doing this, but we don't want to
|
||||
# change the language value elsewhere
|
||||
tvinfo_config = sickgear.TVInfoAPI(show_obj.tvid).api_params.copy()
|
||||
tvinfo_config[image_type] = True
|
||||
|
|
|
@ -22,7 +22,7 @@ if False:
|
|||
from typing import AnyStr, Optional
|
||||
|
||||
|
||||
def getShowImage(url, img_num=None, show_name=None, supress_log=False):
|
||||
def get_show_image(url, img_num=None, show_name=None, supress_log=False):
|
||||
# type: (AnyStr, Optional[int], Optional[AnyStr], bool) -> Optional[bytes]
|
||||
"""
|
||||
|
||||
|
|
|
@ -107,7 +107,7 @@ class KODIMetadata(generic.GenericMetadata):
|
|||
show_obj: a TVShow instance to create the NFO for
|
||||
"""
|
||||
|
||||
show_ID = show_obj.prodid
|
||||
show_id = show_obj.prodid
|
||||
|
||||
show_lang = show_obj.lang
|
||||
tvinfo_config = sickgear.TVInfoAPI(show_obj.tvid).api_params.copy()
|
||||
|
@ -125,9 +125,9 @@ class KODIMetadata(generic.GenericMetadata):
|
|||
tv_node = etree.Element('tvshow')
|
||||
|
||||
try:
|
||||
show_info = t[int(show_ID)]
|
||||
show_info = t[int(show_id)]
|
||||
except BaseTVinfoShownotfound as e:
|
||||
logger.log('Unable to find show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI(
|
||||
logger.log('Unable to find show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI(
|
||||
show_obj.tvid).name), logger.ERROR)
|
||||
raise e
|
||||
except BaseTVinfoError as e:
|
||||
|
@ -141,7 +141,7 @@ class KODIMetadata(generic.GenericMetadata):
|
|||
|
||||
# check for title and id
|
||||
if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None):
|
||||
logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI(
|
||||
logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI(
|
||||
show_obj.tvid).name), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -171,7 +171,7 @@ class KODIMetadata(generic.GenericMetadata):
|
|||
uniqueid = etree.SubElement(tv_node, 'uniqueid', **kwargs)
|
||||
uniqueid.text = '%s%s' % (('', 'tt')[TVINFO_IMDB == tvid], mid)
|
||||
if not has_id:
|
||||
logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI(
|
||||
logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI(
|
||||
show_obj.tvid).name), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ sceneNameCache = {}
|
|||
nameCacheLock = threading.Lock()
|
||||
|
||||
|
||||
def addNameToCache(name, tvid=0, prodid=0, season=-1):
|
||||
def add_name_to_cache(name, tvid=0, prodid=0, season=-1):
|
||||
"""Adds the show & tvdb id to the namecache
|
||||
|
||||
:param name: the show name to cache
|
||||
|
@ -41,7 +41,7 @@ def addNameToCache(name, tvid=0, prodid=0, season=-1):
|
|||
:type tvid: int
|
||||
:param prodid: the production id that this show should be cached with (can be None/0 for unknown)
|
||||
:type prodid: int or long
|
||||
:param season: the season the the name exception belongs to. -1 for generic exception
|
||||
:param season: the season the name exception belongs to. -1 for generic exception
|
||||
:type season: int
|
||||
"""
|
||||
global nameCache
|
||||
|
@ -53,7 +53,7 @@ def addNameToCache(name, tvid=0, prodid=0, season=-1):
|
|||
nameCache[name] = [int(tvid), int(prodid), season]
|
||||
|
||||
|
||||
def retrieveNameFromCache(name):
|
||||
def retrieve_name_from_cache(name):
|
||||
# type: (AnyStr) -> Union[Tuple[int, int], Tuple[None, None]]
|
||||
"""Looks up the given name in the name cache
|
||||
|
||||
|
@ -71,7 +71,7 @@ def retrieveNameFromCache(name):
|
|||
return None, None
|
||||
|
||||
|
||||
def buildNameCache(show_obj=None, update_only_scene=False):
|
||||
def build_name_cache(show_obj=None, update_only_scene=False):
|
||||
# type: (Optional[Union[TVShow, TVShowBase]], bool) -> None
|
||||
"""Adds all new name exceptions to the namecache memory and flushes any removed name exceptions
|
||||
|
||||
|
@ -104,7 +104,7 @@ def buildNameCache(show_obj=None, update_only_scene=False):
|
|||
for cur_so in sickgear.showList if cur_so])
|
||||
sceneNameCache = {}
|
||||
|
||||
cacheDB = db.DBConnection()
|
||||
cache_db = db.DBConnection()
|
||||
|
||||
cache_results = []
|
||||
if update_only_scene:
|
||||
|
@ -117,7 +117,7 @@ def buildNameCache(show_obj=None, update_only_scene=False):
|
|||
tmp_scene_name_cache = sceneNameCache.copy()
|
||||
|
||||
for t, s in iteritems(show_ids):
|
||||
cache_results += cacheDB.select(
|
||||
cache_results += cache_db.select(
|
||||
'SELECT show_name, indexer AS tv_id, indexer_id AS prod_id, season'
|
||||
' FROM scene_exceptions'
|
||||
' WHERE indexer = %s AND indexer_id IN (%s)' % (t, ','.join(['%s' % i for i in s])))
|
||||
|
|
|
@ -260,7 +260,7 @@ class NameParser(object):
|
|||
if 'extra_info' in named_groups:
|
||||
tmp_extra_info = match.group('extra_info')
|
||||
|
||||
# Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
|
||||
# Show.S04.Special or Show.S05.Part.2.Extras are almost certainly not every episode in the season
|
||||
if tmp_extra_info and 'season_only' == cur_regex_name and re.search(
|
||||
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I):
|
||||
continue
|
||||
|
@ -292,7 +292,7 @@ class NameParser(object):
|
|||
matches.append(result)
|
||||
|
||||
if len(matches):
|
||||
# pick best match with highest score based on placement
|
||||
# pick best match with the highest score based on placement
|
||||
best_result = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score)
|
||||
|
||||
show_obj = None
|
||||
|
@ -326,7 +326,7 @@ class NameParser(object):
|
|||
|
||||
# get quality
|
||||
new_name = helpers.remove_non_release_groups(name, show_obj.is_anime)
|
||||
best_result.quality = common.Quality.nameQuality(new_name, show_obj.is_anime)
|
||||
best_result.quality = common.Quality.name_quality(new_name, show_obj.is_anime)
|
||||
|
||||
new_episode_numbers = []
|
||||
new_season_numbers = []
|
||||
|
@ -451,7 +451,7 @@ class NameParser(object):
|
|||
'SickGear does not support this. '
|
||||
'Sorry.' % (str(new_season_numbers)))
|
||||
|
||||
# I guess it's possible that we'd have duplicate episodes too, so lets
|
||||
# I guess it's possible that we'd have duplicate episodes too, so let's
|
||||
# eliminate them
|
||||
new_episode_numbers = list(set(new_episode_numbers))
|
||||
new_episode_numbers.sort()
|
||||
|
@ -500,20 +500,20 @@ class NameParser(object):
|
|||
if not second:
|
||||
return getattr(first, attr)
|
||||
|
||||
a = getattr(first, attr, [])
|
||||
b = getattr(second, attr)
|
||||
first_val = getattr(first, attr, [])
|
||||
second_val = getattr(second, attr)
|
||||
|
||||
# if a is good use it
|
||||
if None is not a or (isinstance(a, list) and len(a)):
|
||||
return a
|
||||
# if first_val is good use it
|
||||
if None is not first_val or (isinstance(first_val, list) and len(first_val)):
|
||||
return first_val
|
||||
# if not use b (if b isn't set it'll just be default)
|
||||
return b
|
||||
return second_val
|
||||
|
||||
@staticmethod
|
||||
def _unicodify(obj, encoding='utf-8'):
|
||||
def _unicodify(obj, encoding='utf8'):
|
||||
if isinstance(obj, text_type):
|
||||
try:
|
||||
return obj.encode('latin1').decode('utf8')
|
||||
return obj.encode('latin1').decode(encoding)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
return obj
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# all regexes are case insensitive
|
||||
# all regexes are case-insensitive
|
||||
|
||||
normal_regexes = [
|
||||
('garbage_name',
|
||||
|
|
|
@ -109,7 +109,7 @@ class TVEpisodeSample(tv.TVEpisode):
|
|||
self.scene_absolute_number = absolute_number # type: int
|
||||
self._airdate = datetime.date(2010, 3, 9) # type: datetime.date
|
||||
self.show_obj = TVShowSample() # type: TVShowSample
|
||||
self._status = Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV) # type: int
|
||||
self._status = Quality.composite_status(common.DOWNLOADED, common.Quality.SDTV) # type: int
|
||||
self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' # type: AnyStr
|
||||
self._is_proper = True # type: bool
|
||||
self._version = 2 # type: int
|
||||
|
@ -196,7 +196,7 @@ def check_valid_abd_naming(pattern=None):
|
|||
|
||||
def check_valid_sports_naming(pattern=None):
|
||||
"""
|
||||
Checks if the name is can be parsed back to its original form for an sports format.
|
||||
Checks if the name is can be parsed back to its original form for a sports format.
|
||||
|
||||
Returns true if the naming is valid, false if not.
|
||||
:param pattern: String Naming Pattern
|
||||
|
@ -294,7 +294,7 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime=False, anime_t
|
|||
# make a fake episode object
|
||||
sample_ep_obj = TVEpisodeSample(2, 3, 3, 'Ep Name')
|
||||
|
||||
sample_ep_obj._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||
sample_ep_obj._status = Quality.composite_status(DOWNLOADED, Quality.HDTV)
|
||||
sample_ep_obj._airdate = datetime.date(2011, 3, 9)
|
||||
|
||||
if abd:
|
||||
|
@ -313,14 +313,14 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime=False, anime_t
|
|||
if None is not multi:
|
||||
sample_ep_obj._name = 'Ep Name (1)'
|
||||
second_ep = TVEpisodeSample(2, 4, 4, 'Ep Name (2)')
|
||||
second_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||
second_ep._status = Quality.composite_status(DOWNLOADED, Quality.HDTV)
|
||||
normal_naming = not anime or 3 == anime_type
|
||||
release_name = sample_ep_obj._release_name = second_ep._release_name = \
|
||||
('Show.Name.003-004.HDTV.XviD-RLSGROUP', 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP')[normal_naming]
|
||||
sample_ep_obj.related_ep_obj.append(second_ep)
|
||||
if normal_naming:
|
||||
third_ep = TVEpisodeSample(2, 5, 5, 'Ep Name (3)')
|
||||
third_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||
third_ep._status = Quality.composite_status(DOWNLOADED, Quality.HDTV)
|
||||
third_ep._release_name = release_name
|
||||
sample_ep_obj.related_ep_obj.append(third_ep)
|
||||
else:
|
||||
|
|
|
@ -36,7 +36,7 @@ if False:
|
|||
from _23 import DirEntry
|
||||
from typing import AnyStr, Optional, Tuple, Union
|
||||
|
||||
# regex to parse time (12/24 hour format)
|
||||
# regex to parse time (12/24-hour format)
|
||||
time_regex = re.compile(r'(\d{1,2})(([:.](\d{2}))? ?([PA][. ]? ?M)|[:.](\d{2}))\b', flags=re.I)
|
||||
am_regex = re.compile(r'(A[. ]? ?M)', flags=re.I)
|
||||
pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.I)
|
||||
|
@ -174,7 +174,7 @@ def _update_zoneinfo():
|
|||
url_data = helpers.get_url(url)
|
||||
if None is url_data:
|
||||
update_last_retry()
|
||||
# when None is urlData, trouble connecting to github
|
||||
# when None is urlData, trouble connecting to GitHub
|
||||
logger.log(u'Fetching zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url,
|
||||
logger.WARNING)
|
||||
return
|
||||
|
@ -263,13 +263,13 @@ def update_network_dict():
|
|||
|
||||
network_tz_data = {}
|
||||
|
||||
# network timezones are stored on github pages
|
||||
# network timezones are stored on GitHub pages
|
||||
url = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/network_timezones.txt'
|
||||
|
||||
url_data = helpers.get_url(url)
|
||||
if url_data in (None, ''):
|
||||
update_last_retry()
|
||||
# When None is urlData, trouble connecting to github
|
||||
# When None is urlData, trouble connecting to GitHub
|
||||
logger.debug(u'Updating network timezones failed, this can happen from time to time. URL: %s' % url)
|
||||
load_network_dict(load=False)
|
||||
return
|
||||
|
@ -413,7 +413,7 @@ def parse_time(time_of_day):
|
|||
hour = helpers.try_int(time_parsed.group(1))
|
||||
mins = helpers.try_int(time_parsed.group(4))
|
||||
ampm = time_parsed.group(5)
|
||||
# convert am/pm to 24 hour clock
|
||||
# convert am/pm to 24-hour clock
|
||||
if None is not ampm:
|
||||
if None is not pm_regex.search(ampm) and 12 != hour:
|
||||
hour += 12
|
||||
|
@ -505,13 +505,13 @@ def _load_network_conversions():
|
|||
|
||||
conversions_in = []
|
||||
|
||||
# network conversions are stored on github pages
|
||||
# network conversions are stored on GitHub pages
|
||||
url = 'https://raw.githubusercontent.com/prinz23/sg_network_conversions/master/conversions.txt'
|
||||
|
||||
url_data = helpers.get_url(url)
|
||||
if url_data in (None, ''):
|
||||
update_last_retry()
|
||||
# when no url_data, trouble connecting to github
|
||||
# when no url_data, trouble connecting to GitHub
|
||||
logger.debug(u'Updating network conversions failed, this can happen from time to time. URL: %s' % url)
|
||||
return
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ SUBJECT_FN_MATCHER = re.compile(r'"([^"]*)"')
|
|||
RE_NORMAL_NAME = re.compile(r'\.\w{1,5}$')
|
||||
|
||||
|
||||
def platform_encode(p):
|
||||
def _platform_encode(p):
|
||||
""" Return Unicode name, if not already Unicode, decode with UTF-8 or latin1 """
|
||||
try:
|
||||
return decode_str(p)
|
||||
|
@ -48,17 +48,17 @@ def platform_encode(p):
|
|||
return decode_str(p, sickgear.SYS_ENCODING, errors='replace').replace('?', '!')
|
||||
|
||||
|
||||
def name_extractor(subject):
|
||||
def _name_extractor(subject):
|
||||
""" Try to extract a file name from a subject line, return `subject` if in doubt """
|
||||
result = subject
|
||||
for name in re.findall(SUBJECT_FN_MATCHER, subject):
|
||||
name = name.strip(' "')
|
||||
if name and RE_NORMAL_NAME.search(name):
|
||||
result = name
|
||||
return platform_encode(result)
|
||||
return _platform_encode(result)
|
||||
|
||||
|
||||
def getSeasonNZBs(name, url_data, season):
|
||||
def _get_season_nzbs(name, url_data, season):
|
||||
"""
|
||||
|
||||
:param name: name
|
||||
|
@ -71,31 +71,31 @@ def getSeasonNZBs(name, url_data, season):
|
|||
:rtype: Tuple[Dict, AnyStr]
|
||||
"""
|
||||
try:
|
||||
showXML = etree.ElementTree(etree.XML(url_data))
|
||||
show_xml = etree.ElementTree(etree.XML(url_data))
|
||||
except SyntaxError:
|
||||
logger.log(u'Unable to parse the XML of %s, not splitting it' % name, logger.ERROR)
|
||||
return {}, ''
|
||||
|
||||
filename = name.replace('.nzb', '')
|
||||
|
||||
nzbElement = showXML.getroot()
|
||||
nzb_element = show_xml.getroot()
|
||||
|
||||
regex = r'([\w\._\ ]+)[\._ ]S%02d[\._ ]([\w\._\-\ ]+)' % season
|
||||
|
||||
sceneNameMatch = re.search(regex, filename, re.I)
|
||||
if sceneNameMatch:
|
||||
showName, qualitySection = sceneNameMatch.groups()
|
||||
scene_name_match = re.search(regex, filename, re.I)
|
||||
if scene_name_match:
|
||||
show_name, quality_section = scene_name_match.groups()
|
||||
else:
|
||||
logger.log('%s - Not a valid season pack scene name. If it\'s a valid one, log a bug.' % name, logger.ERROR)
|
||||
return {}, ''
|
||||
|
||||
regex = r'(%s[\._]S%02d(?:[E0-9]+)\.[\w\._]+)' % (re.escape(showName), season)
|
||||
regex = r'(%s[\._]S%02d(?:[E0-9]+)\.[\w\._]+)' % (re.escape(show_name), season)
|
||||
regex = regex.replace(' ', '.')
|
||||
|
||||
ep_files = {}
|
||||
xmlns = None
|
||||
|
||||
for cur_file in list(nzbElement):
|
||||
for cur_file in list(nzb_element):
|
||||
if not isinstance(cur_file.tag, string_types):
|
||||
continue
|
||||
xmlns_match = re.match(r'[{](https?://[A-Za-z0-9_./]+/nzb)[}]file', cur_file.tag)
|
||||
|
@ -108,7 +108,7 @@ def getSeasonNZBs(name, url_data, season):
|
|||
# print curFile.get("subject"), "doesn't match", regex
|
||||
continue
|
||||
cur_ep = match.group(1)
|
||||
fn = name_extractor(cur_file.get('subject', ''))
|
||||
fn = _name_extractor(cur_file.get('subject', ''))
|
||||
if cur_ep == re.sub(r'\+\d+\.par2$', '', fn, flags=re.I):
|
||||
bn, ext = os.path.splitext(fn)
|
||||
cur_ep = re.sub(r'\.(part\d+|vol\d+(\+\d+)?)$', '', bn, flags=re.I)
|
||||
|
@ -126,7 +126,7 @@ def getSeasonNZBs(name, url_data, season):
|
|||
return ep_files, xmlns
|
||||
|
||||
|
||||
def createNZBString(file_elements, xmlns):
|
||||
def _create_nzb_string(file_elements, xmlns):
|
||||
"""
|
||||
|
||||
:param file_elements: first element
|
||||
|
@ -134,17 +134,17 @@ def createNZBString(file_elements, xmlns):
|
|||
:return:
|
||||
:rtype: AnyStr
|
||||
"""
|
||||
rootElement = etree.Element("nzb")
|
||||
root_element = etree.Element("nzb")
|
||||
if xmlns:
|
||||
rootElement.set("xmlns", xmlns)
|
||||
root_element.set("xmlns", xmlns)
|
||||
|
||||
for curFile in file_elements:
|
||||
rootElement.append(stripNS(curFile, xmlns))
|
||||
root_element.append(_strip_ns(curFile, xmlns))
|
||||
|
||||
return etree.tostring(rootElement, encoding='utf-8')
|
||||
return etree.tostring(root_element, encoding='utf-8')
|
||||
|
||||
|
||||
def saveNZB(nzb_name, nzb_string):
|
||||
def _save_nzb(nzb_name, nzb_string):
|
||||
"""
|
||||
|
||||
:param nzb_name: nzb name
|
||||
|
@ -160,15 +160,15 @@ def saveNZB(nzb_name, nzb_string):
|
|||
logger.log(u'Unable to save NZB: ' + ex(e), logger.ERROR)
|
||||
|
||||
|
||||
def stripNS(element, ns):
|
||||
def _strip_ns(element, ns):
|
||||
element.tag = element.tag.replace("{" + ns + "}", "")
|
||||
for curChild in list(element):
|
||||
stripNS(curChild, ns)
|
||||
_strip_ns(curChild, ns)
|
||||
|
||||
return element
|
||||
|
||||
|
||||
def splitResult(result):
|
||||
def split_result(result):
|
||||
"""
|
||||
|
||||
:param result: search result
|
||||
|
@ -195,7 +195,7 @@ def splitResult(result):
|
|||
# bust it up
|
||||
season = parse_result.season_number if None is not parse_result.season_number else 1
|
||||
|
||||
separate_nzbs, xmlns = getSeasonNZBs(result.name, resp, season)
|
||||
separate_nzbs, xmlns = _get_season_nzbs(result.name, resp, season)
|
||||
|
||||
result_list = []
|
||||
|
||||
|
@ -246,7 +246,7 @@ def splitResult(result):
|
|||
nzb_result.provider = result.provider
|
||||
nzb_result.quality = result.quality
|
||||
nzb_result.show_obj = result.show_obj
|
||||
nzb_result.extraInfo = [createNZBString(separate_nzbs[new_nzb], xmlns)]
|
||||
nzb_result.extraInfo = [_create_nzb_string(separate_nzbs[new_nzb], xmlns)]
|
||||
|
||||
result_list.append(nzb_result)
|
||||
|
||||
|
|
|
@ -154,7 +154,7 @@ class PeopleQueueActions(object):
|
|||
|
||||
class PeopleQueueItem(generic_queue.QueueItem):
|
||||
def __init__(self, action_id, show_obj, uid=None, force=False, **kwargs):
|
||||
# type: (integer_types, TVShow, AnyStr, bool, Dict) -> PeopleQueueItem
|
||||
# type: (integer_types, TVShow, AnyStr, bool, Dict) -> None
|
||||
"""
|
||||
|
||||
:param action_id:
|
||||
|
@ -172,7 +172,7 @@ class PeopleQueueItem(generic_queue.QueueItem):
|
|||
class CastQueueItem(PeopleQueueItem):
|
||||
def __init__(self, show_obj, show_info_cast=None, uid=None, force=False, scheduled_update=False, switch=False,
|
||||
**kwargs):
|
||||
# type: (TVShow, CastList, AnyStr, bool, bool, bool, Dict) -> CastQueueItem
|
||||
# type: (TVShow, CastList, AnyStr, bool, bool, bool, Dict) -> None
|
||||
"""
|
||||
|
||||
:param show_obj: show obj
|
||||
|
|
|
@ -762,7 +762,7 @@ class PostProcessor(object):
|
|||
|
||||
# if there is a quality available in the status then we don't need to bother guessing from the filename
|
||||
if ep_obj.status in common.Quality.SNATCHED_ANY:
|
||||
old_status, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
|
||||
old_status, ep_quality = common.Quality.split_composite_status(ep_obj.status)
|
||||
if common.Quality.UNKNOWN != ep_quality:
|
||||
self._log(
|
||||
u'Using "%s" quality from the old status' % common.Quality.qualityStrings[ep_quality],
|
||||
|
@ -779,7 +779,7 @@ class PostProcessor(object):
|
|||
if not cur_name:
|
||||
continue
|
||||
|
||||
ep_quality = common.Quality.nameQuality(cur_name, ep_obj.show_obj.is_anime)
|
||||
ep_quality = common.Quality.name_quality(cur_name, ep_obj.show_obj.is_anime)
|
||||
quality_log = u' "%s" quality parsed from the %s %s'\
|
||||
% (common.Quality.qualityStrings[ep_quality], thing, cur_name)
|
||||
|
||||
|
@ -790,14 +790,14 @@ class PostProcessor(object):
|
|||
else:
|
||||
self._log(u'Found' + quality_log, logger.DEBUG)
|
||||
|
||||
ep_quality = common.Quality.fileQuality(self.file_path)
|
||||
ep_quality = common.Quality.file_quality(self.file_path)
|
||||
if common.Quality.UNKNOWN != ep_quality:
|
||||
self._log(u'Using "%s" quality parsed from the metadata file content of %s'
|
||||
% (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG)
|
||||
return ep_quality
|
||||
|
||||
# Try guessing quality from the file name
|
||||
ep_quality = common.Quality.assumeQuality(self.file_name)
|
||||
ep_quality = common.Quality.assume_quality(self.file_name)
|
||||
self._log(u'Using guessed "%s" quality from the file name %s'
|
||||
% (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG)
|
||||
|
||||
|
@ -889,7 +889,7 @@ class PostProcessor(object):
|
|||
self._log(u'SickGear snatched this episode, marking it safe to replace', logger.DEBUG)
|
||||
return True
|
||||
|
||||
old_ep_status, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
|
||||
old_ep_status, old_ep_quality = common.Quality.split_composite_status(ep_obj.status)
|
||||
|
||||
# if old episode is not downloaded/archived then it's safe
|
||||
if common.DOWNLOADED != old_ep_status and common.ARCHIVED != old_ep_status:
|
||||
|
@ -1002,10 +1002,10 @@ class PostProcessor(object):
|
|||
|
||||
cur_ep_obj.release_name = self.release_name or ''
|
||||
|
||||
any_qualities, best_qualities = common.Quality.splitQuality(cur_ep_obj.show_obj.quality)
|
||||
cur_status, cur_quality = common.Quality.splitCompositeStatus(cur_ep_obj.status)
|
||||
any_qualities, best_qualities = common.Quality.split_quality(cur_ep_obj.show_obj.quality)
|
||||
cur_status, cur_quality = common.Quality.split_composite_status(cur_ep_obj.status)
|
||||
|
||||
cur_ep_obj.status = common.Quality.compositeStatus(
|
||||
cur_ep_obj.status = common.Quality.composite_status(
|
||||
**({'status': common.DOWNLOADED, 'quality': quality},
|
||||
{'status': common.ARCHIVED, 'quality': quality})
|
||||
[cur_ep_obj.status in common.Quality.SNATCHED_BEST or
|
||||
|
@ -1111,7 +1111,7 @@ class PostProcessor(object):
|
|||
|
||||
# set the status of the episodes
|
||||
# for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj:
|
||||
# cur_ep_obj.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
|
||||
# cur_ep_obj.status = common.Quality.composite_status(common.SNATCHED, new_ep_quality)
|
||||
|
||||
# if the show directory doesn't exist then make it if allowed
|
||||
if not os.path.isdir(ep_obj.show_obj.location) and sickgear.CREATE_MISSING_SHOW_DIRS:
|
||||
|
|
|
@ -73,7 +73,7 @@ def search_propers(provider_proper_obj=None):
|
|||
|
||||
proper_sch = sickgear.proper_finder_scheduler
|
||||
if None is proper_sch.start_time:
|
||||
run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
|
||||
run_in = proper_sch.last_run + proper_sch.cycle_time - datetime.datetime.now()
|
||||
run_at = ', next check '
|
||||
if datetime.timedelta() > run_in:
|
||||
run_at += 'imminent'
|
||||
|
@ -131,7 +131,7 @@ def get_old_proper_level(show_obj, tvid, prodid, season, episode_numbers, old_st
|
|||
[tvid, prodid, season, episode])
|
||||
if not result or not isinstance(result[0]['resource'], string_types) or not result[0]['resource']:
|
||||
continue
|
||||
nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime)
|
||||
nq = Quality.scene_quality(result[0]['resource'], show_obj.is_anime)
|
||||
if nq != new_quality:
|
||||
continue
|
||||
try:
|
||||
|
@ -214,7 +214,7 @@ def load_webdl_types():
|
|||
def _search_provider(cur_provider, provider_propers, aired_since_shows, recent_shows, recent_anime):
|
||||
# type: (GenericProvider, List, datetime.datetime, List[Tuple[int, int]], List[Tuple[int, int]]) -> None
|
||||
try:
|
||||
# we need to extent the referenced list from parameter to update the original var
|
||||
# we need to extend the referenced list from parameter to update the original var
|
||||
provider_propers.extend(cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
|
||||
anime=recent_anime))
|
||||
except AuthException as e:
|
||||
|
@ -253,7 +253,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
|
|||
# 2. native proper search: active search enabled providers
|
||||
provider_list = list(filter(
|
||||
lambda p: p.is_active() and (p.enable_recentsearch, p.enable_backlog)[None is proper_dict],
|
||||
sickgear.providers.sortedProviderList()))
|
||||
sickgear.providers.sorted_sources()))
|
||||
search_threads = []
|
||||
|
||||
if None is proper_dict:
|
||||
|
@ -362,8 +362,8 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
|
|||
# only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones)
|
||||
# check if we want this release: same quality as current, current has correct status
|
||||
# restrict other release group releases to Proper's
|
||||
old_status, old_quality = Quality.splitCompositeStatus(int(sql_result[0]['status']))
|
||||
cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
|
||||
old_status, old_quality = Quality.split_composite_status(int(sql_result[0]['status']))
|
||||
cur_proper.quality = Quality.name_quality(cur_proper.name, parse_result.is_anime)
|
||||
cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level(
|
||||
parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True)
|
||||
cur_proper.proper_level = cur_proper.properlevel # local non global value
|
||||
|
@ -631,7 +631,7 @@ def get_needed_qualites(needed=None):
|
|||
continue
|
||||
ep_obj = show_obj.get_episode(season=cur_result['season'], episode=cur_result['episode'])
|
||||
if ep_obj:
|
||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
ep_status, ep_quality = Quality.split_composite_status(ep_obj.status)
|
||||
if ep_status in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]:
|
||||
needed.check_needed_qualities([ep_quality])
|
||||
|
||||
|
@ -699,7 +699,7 @@ def _set_last_proper_search(when):
|
|||
|
||||
|
||||
def next_proper_timeleft():
|
||||
return sickgear.proper_finder_scheduler.timeLeft()
|
||||
return sickgear.proper_finder_scheduler.time_left()
|
||||
|
||||
|
||||
def get_last_proper_search():
|
||||
|
|
|
@ -29,6 +29,7 @@ if False:
|
|||
from typing import AnyStr, List, Union
|
||||
from .generic import GenericProvider, NZBProvider, TorrentProvider
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
__all__ = [
|
||||
# usenet
|
||||
'filesharingtalk',
|
||||
|
@ -55,41 +56,41 @@ for module in __all__:
|
|||
raise e
|
||||
|
||||
|
||||
def sortedProviderList():
|
||||
def sorted_sources():
|
||||
# type: (...) -> List[Union[GenericProvider, NZBProvider, TorrentProvider]]
|
||||
"""
|
||||
return sorted provider list
|
||||
|
||||
:return: sorted list of providers
|
||||
"""
|
||||
initialList = sickgear.providerList + sickgear.newznabProviderList + sickgear.torrentRssProviderList
|
||||
providerDict = dict(zip([x.get_id() for x in initialList], initialList))
|
||||
initial_list = sickgear.provider_list + sickgear.newznab_providers + sickgear.torrent_rss_providers
|
||||
provider_dict = dict(zip([x.get_id() for x in initial_list], initial_list))
|
||||
|
||||
newList = []
|
||||
new_list = []
|
||||
|
||||
# add all modules in the priority list, in order
|
||||
for curModule in sickgear.PROVIDER_ORDER:
|
||||
if curModule in providerDict:
|
||||
newList.append(providerDict[curModule])
|
||||
if curModule in provider_dict:
|
||||
new_list.append(provider_dict[curModule])
|
||||
|
||||
if not sickgear.PROVIDER_ORDER:
|
||||
nzb = list(filter(lambda p: p.providerType == generic.GenericProvider.NZB, itervalues(providerDict)))
|
||||
tor = list(filter(lambda p: p.providerType != generic.GenericProvider.NZB, itervalues(providerDict)))
|
||||
newList = sorted(filter(lambda p: not p.anime_only, nzb), key=lambda v: v.get_id()) + \
|
||||
nzb = list(filter(lambda p: p.providerType == generic.GenericProvider.NZB, itervalues(provider_dict)))
|
||||
tor = list(filter(lambda p: p.providerType != generic.GenericProvider.NZB, itervalues(provider_dict)))
|
||||
new_list = sorted(filter(lambda p: not p.anime_only, nzb), key=lambda v: v.get_id()) + \
|
||||
sorted(filter(lambda p: not p.anime_only, tor), key=lambda v: v.get_id()) + \
|
||||
sorted(filter(lambda p: p.anime_only, nzb), key=lambda v: v.get_id()) + \
|
||||
sorted(filter(lambda p: p.anime_only, tor), key=lambda v: v.get_id())
|
||||
|
||||
# add any modules that are missing from that list
|
||||
for curModule in providerDict:
|
||||
if providerDict[curModule] not in newList:
|
||||
newList.append(providerDict[curModule])
|
||||
for curModule in provider_dict:
|
||||
if provider_dict[curModule] not in new_list:
|
||||
new_list.append(provider_dict[curModule])
|
||||
|
||||
return newList
|
||||
return new_list
|
||||
|
||||
|
||||
def makeProviderList():
|
||||
return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
|
||||
def provider_modules():
|
||||
return [x.provider for x in [_get_module_by_name(y) for y in __all__] if x]
|
||||
|
||||
|
||||
def generic_provider_name(n):
|
||||
|
@ -102,7 +103,7 @@ def generic_provider_url(u):
|
|||
return u.strip().strip('/').lower().replace('https', 'http')
|
||||
|
||||
|
||||
def make_unique_list(p_list, d_list=None):
|
||||
def _make_unique_list(p_list, d_list=None):
|
||||
# type: (List, List) -> List
|
||||
"""
|
||||
remove provider duplicates
|
||||
|
@ -135,32 +136,32 @@ def make_unique_list(p_list, d_list=None):
|
|||
return new_p_list
|
||||
|
||||
|
||||
def getNewznabProviderList(data):
|
||||
def newznab_source_list(data):
|
||||
# type: (AnyStr) -> List
|
||||
defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')]
|
||||
providerList = make_unique_list(list(filter(lambda _x: _x, [makeNewznabProvider(x) for x in data.split('!!!')])),
|
||||
defaultList)
|
||||
default_list = [_create_newznab_source(x) for x in _default_newznab_sources().split('!!!')]
|
||||
provider_list = _make_unique_list(list(filter(
|
||||
lambda _x: _x, [_create_newznab_source(x) for x in data.split('!!!')])), default_list)
|
||||
|
||||
providerDict = dict(zip([x.name for x in providerList], providerList))
|
||||
provider_dict = dict(zip([x.name for x in provider_list], provider_list))
|
||||
|
||||
for curDefault in defaultList:
|
||||
for curDefault in default_list:
|
||||
if not curDefault:
|
||||
continue
|
||||
|
||||
if curDefault.name not in providerDict:
|
||||
if curDefault.name not in provider_dict:
|
||||
curDefault.default = True
|
||||
providerList.append(curDefault)
|
||||
provider_list.append(curDefault)
|
||||
else:
|
||||
providerDict[curDefault.name].default = True
|
||||
provider_dict[curDefault.name].default = True
|
||||
for k in ('name', 'url', 'needs_auth', 'search_mode', 'search_fallback',
|
||||
'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
|
||||
'server_type'):
|
||||
setattr(providerDict[curDefault.name], k, getattr(curDefault, k))
|
||||
setattr(provider_dict[curDefault.name], k, getattr(curDefault, k))
|
||||
|
||||
return list(filter(lambda _x: _x, providerList))
|
||||
return list(filter(lambda _x: _x, provider_list))
|
||||
|
||||
|
||||
def makeNewznabProvider(config_string):
|
||||
def _create_newznab_source(config_string):
|
||||
if not config_string:
|
||||
return None
|
||||
|
||||
|
@ -181,19 +182,19 @@ def makeNewznabProvider(config_string):
|
|||
|
||||
newznab_module = sys.modules['sickgear.providers.newznab']
|
||||
|
||||
newProvider = newznab_module.NewznabProvider(name, url, **params)
|
||||
newProvider.enabled = '1' == enabled
|
||||
new_provider = newznab_module.NewznabProvider(name, url, **params)
|
||||
new_provider.enabled = '1' == enabled
|
||||
|
||||
return newProvider
|
||||
return new_provider
|
||||
|
||||
|
||||
def getTorrentRssProviderList(data):
|
||||
providerList = list(filter(lambda _x: _x, [makeTorrentRssProvider(x) for x in data.split('!!!')]))
|
||||
def torrent_rss_source_list(data):
|
||||
provider_list = list(filter(lambda _x: _x, [_create_torrent_rss_source(x) for x in data.split('!!!')]))
|
||||
|
||||
return list(filter(lambda _x: _x, providerList))
|
||||
return list(filter(lambda _x: _x, provider_list))
|
||||
|
||||
|
||||
def makeTorrentRssProvider(config_string):
|
||||
def _create_torrent_rss_source(config_string):
|
||||
if not config_string:
|
||||
return None
|
||||
|
||||
|
@ -217,25 +218,27 @@ def makeTorrentRssProvider(config_string):
|
|||
return None
|
||||
|
||||
try:
|
||||
torrentRss = sys.modules['sickgear.providers.rsstorrent']
|
||||
torrent_rss = sys.modules['sickgear.providers.rsstorrent']
|
||||
except (BaseException, Exception):
|
||||
return
|
||||
|
||||
newProvider = torrentRss.TorrentRssProvider(name, url, cookies, search_mode, search_fallback, enable_recentsearch,
|
||||
enable_backlog)
|
||||
newProvider.enabled = '1' == enabled
|
||||
new_provider = torrent_rss.TorrentRssProvider(name, url, cookies, search_mode, search_fallback, enable_recentsearch,
|
||||
enable_backlog)
|
||||
new_provider.enabled = '1' == enabled
|
||||
|
||||
return newProvider
|
||||
return new_provider
|
||||
|
||||
|
||||
def getDefaultNewznabProviders():
|
||||
return '!!!'.join(['NZBgeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0',
|
||||
'DrunkenSlug|https://api.drunkenslug.com/||5030,5040|0|eponly|0|0|0',
|
||||
'NinjaCentral|https://ninjacentral.co.za/||5030,5040|0|eponly|0|0|0',
|
||||
])
|
||||
def _default_newznab_sources():
|
||||
return '!!!'.join([
|
||||
'|'.join(_src) for _src in
|
||||
(['NZBgeek', 'https://api.nzbgeek.info/', '', '5030,5040', '0', 'eponly', '0', '0', '0'],
|
||||
['DrunkenSlug', 'https://api.drunkenslug.com/', '', '5030,5040', '0', 'eponly', '0', '0', '0'],
|
||||
['NinjaCentral', 'https://ninjacentral.co.za/', '', '5030,5040', '0', 'eponly', '0', '0', '0'],
|
||||
)])
|
||||
|
||||
|
||||
def getProviderModule(name):
|
||||
def _get_module_by_name(name):
|
||||
prefix, cprov, name = 'sickgear.providers.', 'motsuc'[::-1], name.lower()
|
||||
if name in __all__ and prefix + name in sys.modules:
|
||||
return sys.modules[prefix + name]
|
||||
|
@ -244,11 +247,11 @@ def getProviderModule(name):
|
|||
raise Exception('Can\'t find %s%s in providers' % (prefix, name))
|
||||
|
||||
|
||||
def getProviderClass(provider_id):
|
||||
providerMatch = [x for x in
|
||||
sickgear.providerList + sickgear.newznabProviderList + sickgear.torrentRssProviderList if
|
||||
provider_id == x.get_id()]
|
||||
def get_by_id(provider_id):
|
||||
provider_match = [x for x in
|
||||
sickgear.provider_list + sickgear.newznab_providers + sickgear.torrent_rss_providers if
|
||||
provider_id == x.get_id()]
|
||||
|
||||
if 1 != len(providerMatch):
|
||||
if 1 != len(provider_match):
|
||||
return None
|
||||
return providerMatch[0]
|
||||
return provider_match[0]
|
||||
|
|
|
@ -367,7 +367,7 @@ class BTNCache(tvcache.TVCache):
|
|||
|
||||
def _cache_data(self, **kwargs):
|
||||
|
||||
return self.provider.cache_data(age=self._getLastUpdate().timetuple(), min_time=self.update_iv)
|
||||
return self.provider.cache_data(age=self._get_last_update().timetuple(), min_time=self.update_iv)
|
||||
|
||||
|
||||
provider = BTNProvider()
|
||||
|
|
|
@ -502,7 +502,7 @@ class GenericProvider(object):
|
|||
if log_warning:
|
||||
# Ensure provider name output (e.g. when displaying config/provs) instead of e.g. thread "Tornado"
|
||||
prepend = ('[%s] :: ' % self.name, '')[any([x.name in threading.current_thread().name
|
||||
for x in sickgear.providers.sortedProviderList()])]
|
||||
for x in sickgear.providers.sorted_sources()])]
|
||||
logger.log('%sToo many requests reached at %s, waiting for %s' % (
|
||||
prepend, self.fmt_delta(self.tmr_limit_time), self.fmt_delta(time_left)), logger.WARNING)
|
||||
return use_tmr_limit
|
||||
|
@ -544,8 +544,8 @@ class GenericProvider(object):
|
|||
:param url: Address where to fetch data from
|
||||
:param skip_auth: Skip authentication check of provider if True
|
||||
:param use_tmr_limit: An API limit can be +ve before a fetch, but unwanted, set False to short should_skip
|
||||
:param args: params to pass-through to get_url
|
||||
:param kwargs: keyword params to pass-through to get_url
|
||||
:param args: params to pass through to get_url
|
||||
:param kwargs: keyword params to pass through to get_url
|
||||
:return: None or data fetched from URL
|
||||
"""
|
||||
data = None
|
||||
|
@ -641,7 +641,7 @@ class GenericProvider(object):
|
|||
:param name: name
|
||||
:return:
|
||||
"""
|
||||
return re.sub(r'[^\w\d_]', '_', name.strip().lower())
|
||||
return re.sub(r'[^\w_]', '_', name.strip().lower())
|
||||
|
||||
def image_name(self, *default_name):
|
||||
# type: (...) -> AnyStr
|
||||
|
@ -672,7 +672,7 @@ class GenericProvider(object):
|
|||
rxc_delim = re.compile(r'[&;]')
|
||||
rxc_skip_key = re.compile(r'clearance')
|
||||
|
||||
for cur_p in sickgear.providers.sortedProviderList():
|
||||
for cur_p in sickgear.providers.sorted_sources():
|
||||
pid = cur_p.get_id()
|
||||
auths = set([])
|
||||
for cur_kt in ['password', 'passkey', 'api_key', 'key', 'digest', 'cookies', 'hash']:
|
||||
|
@ -755,7 +755,7 @@ class GenericProvider(object):
|
|||
def is_enabled(self):
|
||||
# type: (...) -> bool
|
||||
"""
|
||||
This should be overridden and should return the config setting eg. sickgear.MYPROVIDER
|
||||
This should be overridden and should return the config setting e.g. sickgear.MYPROVIDER
|
||||
"""
|
||||
return self.enabled
|
||||
|
||||
|
@ -804,7 +804,7 @@ class GenericProvider(object):
|
|||
try:
|
||||
btih = None
|
||||
try:
|
||||
btih = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0]
|
||||
btih = re.findall(r'urn:btih:(\w{32,40})', result.url)[0]
|
||||
if 32 == len(btih):
|
||||
btih = make_btih(btih)
|
||||
except (BaseException, Exception):
|
||||
|
@ -927,7 +927,7 @@ class GenericProvider(object):
|
|||
|
||||
def search_rss(self, ep_obj_list):
|
||||
# type: (List[TVEpisode]) -> Dict[TVEpisode, SearchResult]
|
||||
return self.cache.findNeededEpisodes(ep_obj_list)
|
||||
return self.cache.find_needed_episodes(ep_obj_list)
|
||||
|
||||
def get_quality(self, item, anime=False):
|
||||
# type: (etree.Element, bool) -> int
|
||||
|
@ -939,7 +939,7 @@ class GenericProvider(object):
|
|||
:return: a Quality value obtained from the node's data
|
||||
"""
|
||||
(title, url) = self._title_and_url(item)
|
||||
quality = Quality.sceneQuality(title, anime)
|
||||
quality = Quality.scene_quality(title, anime)
|
||||
return quality
|
||||
|
||||
def _search_provider(self, search_params, search_mode='eponly', epcount=0, age=0, **kwargs):
|
||||
|
@ -1008,7 +1008,7 @@ class GenericProvider(object):
|
|||
all_cells = all_cells if any(all_cells) else header_row.find_all('td')
|
||||
|
||||
headers = [re.sub(
|
||||
r'[\s]+', '',
|
||||
r'\s+', '',
|
||||
((any([cell.get_text()]) and any([rc[x].search(cell.get_text()) for x in iterkeys(rc)]) and cell.get_text())
|
||||
or (cell.attrs.get('id') and any([rc[x].search(cell['id']) for x in iterkeys(rc)]) and cell['id'])
|
||||
or (cell.attrs.get('title') and any([rc[x].search(cell['title']) for x in iterkeys(rc)]) and cell['title'])
|
||||
|
@ -1103,7 +1103,7 @@ class GenericProvider(object):
|
|||
search_list = []
|
||||
for cur_ep_obj in ep_obj_list:
|
||||
# search cache for episode result
|
||||
cache_result = self.cache.searchCache(cur_ep_obj, manual_search) # type: List[SearchResult]
|
||||
cache_result = self.cache.search_cache(cur_ep_obj, manual_search) # type: List[SearchResult]
|
||||
if cache_result:
|
||||
if cur_ep_obj.episode not in results:
|
||||
results[cur_ep_obj.episode] = cache_result
|
||||
|
@ -1348,7 +1348,7 @@ class GenericProvider(object):
|
|||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
results = self.cache.listPropers(search_date)
|
||||
results = self.cache.list_propers(search_date)
|
||||
|
||||
return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show_obj) for x in
|
||||
results]
|
||||
|
@ -1458,7 +1458,7 @@ class GenericProvider(object):
|
|||
except IndexError:
|
||||
return None
|
||||
try:
|
||||
value *= 1024 ** ['b', 'k', 'm', 'g', 't'].index(re.findall('([tgmk])[i]?b', size_dim.lower())[0])
|
||||
value *= 1024 ** ['b', 'k', 'm', 'g', 't'].index(re.findall('([tgmk])i?b', size_dim.lower())[0])
|
||||
except IndexError:
|
||||
pass
|
||||
return int(math.ceil(value))
|
||||
|
@ -1531,7 +1531,7 @@ class NZBProvider(GenericProvider):
|
|||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
cache_results = self.cache.listPropers(search_date)
|
||||
cache_results = self.cache.list_propers(search_date)
|
||||
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show_obj)
|
||||
for x in cache_results]
|
||||
|
||||
|
@ -1708,7 +1708,7 @@ class TorrentProvider(GenericProvider):
|
|||
else:
|
||||
# noinspection PyUnresolvedReferences
|
||||
name = item.title
|
||||
return Quality.sceneQuality(name, anime)
|
||||
return Quality.scene_quality(name, anime)
|
||||
|
||||
@staticmethod
|
||||
def _reverse_quality(quality):
|
||||
|
@ -1829,7 +1829,7 @@ class TorrentProvider(GenericProvider):
|
|||
prefix = ([prefix], prefix)[isinstance(prefix, list)]
|
||||
|
||||
search_params = []
|
||||
crop = re.compile(r'([.\s])(?:\1)+')
|
||||
crop = re.compile(r'([.\s])\1+')
|
||||
for name in get_show_names_all_possible(self.show_obj, scenify=process_name and getattr(self, 'scene', True),
|
||||
season=season):
|
||||
for detail in ep_detail:
|
||||
|
@ -1965,7 +1965,7 @@ class TorrentProvider(GenericProvider):
|
|||
|
||||
seen_attr = 'PROVIDER_SEEN'
|
||||
setattr(sickgear, seen_attr, list(filter(lambda u: self.__module__ not in u,
|
||||
getattr(sickgear, seen_attr, []))))
|
||||
getattr(sickgear, seen_attr, []))))
|
||||
|
||||
self.failure_count = 3 * bool(failure_count)
|
||||
if self.should_skip():
|
||||
|
@ -2160,7 +2160,7 @@ class TorrentProvider(GenericProvider):
|
|||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
|
||||
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
|
||||
proper_check = re.compile(r'(?i)%s' % clean_term.sub('', proper_term))
|
||||
for item in items:
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
|
|
|
@ -347,7 +347,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
caps[NewznabConstants.SEARCH_SEASON] = 'season'
|
||||
if NewznabConstants.SEARCH_EPISODE not in caps or not caps.get(NewznabConstants.SEARCH_EPISODE):
|
||||
caps[NewznabConstants.SEARCH_TEXT] = 'ep'
|
||||
if (TVINFO_TVRAGE not in caps or not caps.get(TVINFO_TVRAGE)):
|
||||
if TVINFO_TVRAGE not in caps or not caps.get(TVINFO_TVRAGE):
|
||||
caps[TVINFO_TVRAGE] = 'rid'
|
||||
if NewznabConstants.SEARCH_TEXT not in caps or not caps.get(NewznabConstants.SEARCH_TEXT):
|
||||
caps[NewznabConstants.SEARCH_TEXT] = 'q'
|
||||
|
@ -645,7 +645,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
if not getattr(s, 'wanted_quality', None):
|
||||
# this should not happen, the creation is missing for the search in this case
|
||||
logger.log('wanted_quality property was missing for search, creating it', logger.WARNING)
|
||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
ep_status, ep_quality = Quality.split_composite_status(ep_obj.status)
|
||||
s.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True)
|
||||
needed.check_needed_qualities(s.wanted_quality)
|
||||
|
||||
|
@ -682,14 +682,14 @@ class NewznabProvider(generic.NZBProvider):
|
|||
needed.check_needed_types(ep_obj.show_obj)
|
||||
if not ep_obj.show_obj.is_anime and not ep_obj.show_obj.is_sports:
|
||||
if not getattr(ep_obj, 'wanted_quality', None):
|
||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
ep_status, ep_quality = Quality.split_composite_status(ep_obj.status)
|
||||
ep_obj.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True)
|
||||
needed.check_needed_qualities(ep_obj.wanted_quality)
|
||||
else:
|
||||
if not ep_obj.show_obj.is_anime and not ep_obj.show_obj.is_sports:
|
||||
for cur_ep_obj in ep_obj_list:
|
||||
if not getattr(cur_ep_obj, 'wanted_quality', None):
|
||||
ep_status, ep_quality = Quality.splitCompositeStatus(cur_ep_obj.status)
|
||||
ep_status, ep_quality = Quality.split_composite_status(cur_ep_obj.status)
|
||||
cur_ep_obj.wanted_quality = get_wanted_qualities(cur_ep_obj, ep_status, ep_quality,
|
||||
unaired=True)
|
||||
needed.check_needed_qualities(cur_ep_obj.wanted_quality)
|
||||
|
@ -733,7 +733,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
continue
|
||||
|
||||
# search cache for episode result
|
||||
cache_result = self.cache.searchCache(ep_obj, manual_search)
|
||||
cache_result = self.cache.search_cache(ep_obj, manual_search)
|
||||
if cache_result:
|
||||
if ep_obj.episode not in results:
|
||||
results[ep_obj.episode] = cache_result
|
||||
|
@ -1070,7 +1070,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
cache_results = self.cache.listPropers(search_date)
|
||||
cache_results = self.cache.list_propers(search_date)
|
||||
results = [classes.Proper(x['name'], x['url'],
|
||||
datetime.datetime.fromtimestamp(x['time']), self.show_obj) for x in cache_results]
|
||||
|
||||
|
@ -1183,10 +1183,10 @@ class NewznabCache(tvcache.TVCache):
|
|||
root = elem
|
||||
return root, ns
|
||||
|
||||
def updateCache(self,
|
||||
needed=NeededQualities(need_all=True), # type: NeededQualities
|
||||
**kwargs
|
||||
):
|
||||
def update_cache(self,
|
||||
needed=NeededQualities(need_all=True), # type: NeededQualities
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
|
||||
:param needed: needed qualites class
|
||||
|
@ -1195,7 +1195,7 @@ class NewznabCache(tvcache.TVCache):
|
|||
if 4489 != sickgear.RECENTSEARCH_INTERVAL or self.should_update():
|
||||
n_spaces = {}
|
||||
try:
|
||||
check = self._checkAuth()
|
||||
check = self.check_auth()
|
||||
if isinstance(check, bool) and not check:
|
||||
items = None
|
||||
else:
|
||||
|
@ -1205,12 +1205,12 @@ class NewznabCache(tvcache.TVCache):
|
|||
items = None
|
||||
|
||||
if items:
|
||||
self._clearCache()
|
||||
self.clear_cache()
|
||||
|
||||
# parse data
|
||||
cl = []
|
||||
for item in items:
|
||||
ci = self._parseItem(n_spaces, item)
|
||||
ci = self.parse_item(n_spaces, item)
|
||||
if None is not ci:
|
||||
cl.append(ci)
|
||||
|
||||
|
@ -1219,7 +1219,7 @@ class NewznabCache(tvcache.TVCache):
|
|||
my_db.mass_action(cl)
|
||||
|
||||
# set updated as time the attempt to fetch data is
|
||||
self.setLastUpdate()
|
||||
self.set_last_update()
|
||||
|
||||
@staticmethod
|
||||
def parse_ids(item, ns):
|
||||
|
@ -1240,7 +1240,7 @@ class NewznabCache(tvcache.TVCache):
|
|||
return ids
|
||||
|
||||
# overwrite method with that parses the rageid from the newznab feed
|
||||
def _parseItem(self,
|
||||
def parse_item(self,
|
||||
ns, # type: Dict
|
||||
item # type: etree.Element
|
||||
): # type: (...) -> Union[List[AnyStr, List[Any]], None]
|
||||
|
|
|
@ -164,7 +164,7 @@ class SnowflProvider(generic.TorrentProvider):
|
|||
from sickgear import providers
|
||||
if 'torlock' in url.lower():
|
||||
prov = next(filter(lambda p: 'torlock' == p.name.lower(), (filter(
|
||||
lambda sp: sp.providerType == self.providerType, providers.sortedProviderList()))))
|
||||
lambda sp: sp.providerType == self.providerType, providers.sorted_sources()))))
|
||||
state = prov.enabled
|
||||
prov.enabled = True
|
||||
_ = prov.url
|
||||
|
|
|
@ -118,7 +118,7 @@ def access_method(host):
|
|||
|
||||
def test_authentication(host=None, username=None, password=None, apikey=None):
|
||||
"""
|
||||
Sends a simple API request to SAB to determine if the given connection information is connect
|
||||
Sends a simple API request to SAB to determine if the given connection information is correct
|
||||
|
||||
Returns: A tuple containing the success boolean and a message
|
||||
:param host: The host where SAB is running (incl port)
|
||||
|
|
|
@ -318,7 +318,7 @@ def retrieve_exceptions():
|
|||
|
||||
if cl:
|
||||
my_db.mass_action(cl)
|
||||
name_cache.buildNameCache(update_only_scene=True)
|
||||
name_cache.build_name_cache(update_only_scene=True)
|
||||
|
||||
# since this could invalidate the results of the cache we clear it out after updating
|
||||
if changed_exceptions:
|
||||
|
@ -369,7 +369,7 @@ def update_scene_exceptions(tvid, prodid, scene_exceptions):
|
|||
' (indexer, indexer_id, show_name, season) VALUES (?,?,?,?)',
|
||||
[tvid, prodid, cur_exception, cur_season])
|
||||
|
||||
sickgear.name_cache.buildNameCache(update_only_scene=True)
|
||||
sickgear.name_cache.build_name_cache(update_only_scene=True)
|
||||
|
||||
|
||||
def _custom_exceptions_fetcher():
|
||||
|
|
|
@ -45,8 +45,8 @@ def get_scene_numbering(tvid, prodid, season, episode, fallback_to_xem=True, sho
|
|||
returns the TVDB numbering.
|
||||
(so the return values will always be set)
|
||||
|
||||
kwargs['scene_result']: type: Optional[List[Row]] passed thru
|
||||
kwargs['show_result']: type: Optional[List[Row]] passed thru
|
||||
kwargs['scene_result']: type: Optional[List[Row]] passed through
|
||||
kwargs['show_result']: type: Optional[List[Row]] passed through
|
||||
|
||||
:param tvid: tvid
|
||||
:type tvid: int
|
||||
|
@ -134,8 +134,8 @@ def get_scene_absolute_numbering(tvid, prodid, absolute_number, season, episode,
|
|||
returns the TVDB numbering.
|
||||
(so the return values will always be set)
|
||||
|
||||
kwargs['scene_result']: type: Optional[List[Row]] passed thru
|
||||
kwargs['show_result']: type: Optional[List[Row]] passed thru
|
||||
kwargs['scene_result']: type: Optional[List[Row]] passed through
|
||||
kwargs['show_result']: type: Optional[List[Row]] passed through
|
||||
|
||||
:param tvid: tvid
|
||||
:type tvid: int
|
||||
|
|
|
@ -26,17 +26,17 @@ from exceptions_helper import ex
|
|||
|
||||
|
||||
class Scheduler(threading.Thread):
|
||||
def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0),
|
||||
start_time=None, threadName="ScheduledThread", silent=True, prevent_cycle_run=None, paused=False):
|
||||
def __init__(self, action, cycle_time=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0),
|
||||
start_time=None, thread_name="ScheduledThread", silent=True, prevent_cycle_run=None, paused=False):
|
||||
super(Scheduler, self).__init__()
|
||||
|
||||
self.lastRun = datetime.datetime.now() + run_delay - cycleTime
|
||||
self.last_run = datetime.datetime.now() + run_delay - cycle_time
|
||||
self.action = action
|
||||
self.cycleTime = cycleTime
|
||||
self.cycle_time = cycle_time
|
||||
self.start_time = start_time
|
||||
self.prevent_cycle_run = prevent_cycle_run
|
||||
|
||||
self.name = threadName
|
||||
self.name = thread_name
|
||||
self.silent = silent
|
||||
self._stopper = threading.Event()
|
||||
self._unpause = threading.Event()
|
||||
|
@ -65,10 +65,10 @@ class Scheduler(threading.Thread):
|
|||
else:
|
||||
self.unpause()
|
||||
|
||||
def timeLeft(self):
|
||||
return self.cycleTime - (datetime.datetime.now() - self.lastRun)
|
||||
def time_left(self):
|
||||
return self.cycle_time - (datetime.datetime.now() - self.last_run)
|
||||
|
||||
def forceRun(self):
|
||||
def force_run(self):
|
||||
if not self.action.amActive:
|
||||
self.force = True
|
||||
return True
|
||||
|
@ -93,15 +93,15 @@ class Scheduler(threading.Thread):
|
|||
should_run = False
|
||||
|
||||
# check if interval has passed
|
||||
if current_time - self.lastRun >= self.cycleTime:
|
||||
if current_time - self.last_run >= self.cycle_time:
|
||||
# check if wanting to start around certain time taking interval into account
|
||||
if self.start_time:
|
||||
hour_diff = current_time.time().hour - self.start_time.hour
|
||||
if not hour_diff < 0 and hour_diff < self.cycleTime.seconds // 3600:
|
||||
if not hour_diff < 0 and hour_diff < self.cycle_time.seconds // 3600:
|
||||
should_run = True
|
||||
else:
|
||||
# set lastRun to only check start_time after another cycleTime
|
||||
self.lastRun = current_time
|
||||
# set last_run to only check start_time after another cycle_time
|
||||
self.last_run = current_time
|
||||
else:
|
||||
should_run = True
|
||||
|
||||
|
@ -110,13 +110,13 @@ class Scheduler(threading.Thread):
|
|||
|
||||
if should_run and ((self.prevent_cycle_run is not None and self.prevent_cycle_run()) or
|
||||
getattr(self.action, 'prevent_run', False)):
|
||||
logger.log(u'%s skipping this cycleTime' % self.name, logger.WARNING)
|
||||
# set lastRun to only check start_time after another cycleTime
|
||||
self.lastRun = current_time
|
||||
logger.log(u'%s skipping this cycle_time' % self.name, logger.WARNING)
|
||||
# set last_run to only check start_time after another cycle_time
|
||||
self.last_run = current_time
|
||||
should_run = False
|
||||
|
||||
if should_run:
|
||||
self.lastRun = current_time
|
||||
self.last_run = current_time
|
||||
|
||||
try:
|
||||
if not self.silent:
|
||||
|
|
|
@ -165,9 +165,9 @@ def snatch_episode(result, end_status=SNATCHED):
|
|||
for cur_ep_obj in result.ep_obj_list:
|
||||
with cur_ep_obj.lock:
|
||||
if is_first_best_match(cur_ep_obj.status, result):
|
||||
cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
|
||||
cur_ep_obj.status = Quality.composite_status(SNATCHED_BEST, result.quality)
|
||||
else:
|
||||
cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality)
|
||||
cur_ep_obj.status = Quality.composite_status(end_status, result.quality)
|
||||
|
||||
item = cur_ep_obj.get_sql()
|
||||
if None is not item:
|
||||
|
@ -354,7 +354,7 @@ def is_final_result(result):
|
|||
Checks if the given result is good enough quality that we can stop searching for other ones.
|
||||
|
||||
:param result: search result to check
|
||||
:return: If the result is the highest quality in both the any/best quality lists then this function
|
||||
:return: If the result is the highest quality in both any and best quality lists then this function
|
||||
returns True, if not then it's False
|
||||
"""
|
||||
|
||||
|
@ -362,7 +362,7 @@ def is_final_result(result):
|
|||
|
||||
show_obj = result.ep_obj_list[0].show_obj
|
||||
|
||||
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
||||
any_qualities, best_qualities = Quality.split_quality(show_obj.quality)
|
||||
|
||||
# if there is a download that's higher than this then we definitely need to keep looking
|
||||
if best_qualities and max(best_qualities) > result.quality:
|
||||
|
@ -378,11 +378,11 @@ def is_final_result(result):
|
|||
|
||||
elif best_qualities and max(best_qualities) == result.quality:
|
||||
|
||||
# if this is the best download but we have a higher initial download then keep looking
|
||||
# if this is the best download, but we have a higher initial download then keep looking
|
||||
if any_qualities and max(any_qualities) > result.quality:
|
||||
return False
|
||||
|
||||
# if this is the best download and we don't have a higher initial download then we're done
|
||||
# if this is the best download, and we don't have a higher initial download then we're done
|
||||
return True
|
||||
|
||||
# if we got here than it's either not on the lists, they're empty, or it's lower than the highest required
|
||||
|
@ -392,7 +392,7 @@ def is_final_result(result):
|
|||
def is_first_best_match(ep_status, result):
|
||||
# type: (int, sickgear.classes.SearchResult) -> bool
|
||||
"""
|
||||
Checks if the given result is a best quality match and if we want to archive the episode on first match.
|
||||
Checks if the given result is the best quality match and if we want to archive the episode on first match.
|
||||
|
||||
:param ep_status: current episode object status
|
||||
:param result: search result to check
|
||||
|
@ -403,11 +403,11 @@ def is_first_best_match(ep_status, result):
|
|||
result.name, logger.DEBUG)
|
||||
|
||||
show_obj = result.ep_obj_list[0].show_obj
|
||||
cur_status, cur_quality = Quality.splitCompositeStatus(ep_status)
|
||||
cur_status, cur_quality = Quality.split_composite_status(ep_status)
|
||||
|
||||
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
||||
any_qualities, best_qualities = Quality.split_quality(show_obj.quality)
|
||||
|
||||
# if there is a download that's a match to one of our best qualities and
|
||||
# if there is a download that's a match to one of our best qualities, and
|
||||
# we want to archive the episode then we are done
|
||||
if best_qualities and show_obj.upgrade_once and \
|
||||
(result.quality in best_qualities and
|
||||
|
@ -433,7 +433,7 @@ def set_wanted_aired(ep_obj, # type: TVEpisode
|
|||
:param ep_count_scene: count of episodes in scene seasons
|
||||
:param manual: manual search
|
||||
"""
|
||||
ep_status, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
|
||||
ep_status, ep_quality = common.Quality.split_composite_status(ep_obj.status)
|
||||
ep_obj.wanted_quality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=unaired, manual=manual)
|
||||
ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0)
|
||||
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
|
||||
|
@ -458,7 +458,7 @@ def get_wanted_qualities(ep_obj, # type: TVEpisode
|
|||
"""
|
||||
if isinstance(ep_obj, TVEpisode):
|
||||
return sickgear.WANTEDLIST_CACHE.get_wantedlist(ep_obj.show_obj.quality, ep_obj.show_obj.upgrade_once,
|
||||
cur_quality, cur_status, unaired, manual)
|
||||
cur_quality, cur_status, unaired, manual)
|
||||
|
||||
return []
|
||||
|
||||
|
@ -543,7 +543,7 @@ def wanted_episodes(show_obj, # type: TVShow
|
|||
|
||||
for result in sql_result:
|
||||
ep_obj = show_obj.get_episode(int(result['season']), int(result['episode']), ep_result=ep_sql_result)
|
||||
cur_status, cur_quality = common.Quality.splitCompositeStatus(ep_obj.status)
|
||||
cur_status, cur_quality = common.Quality.split_composite_status(ep_obj.status)
|
||||
ep_obj.wanted_quality = get_wanted_qualities(ep_obj, cur_status, cur_quality, unaired=unaired)
|
||||
if not ep_obj.wanted_quality:
|
||||
continue
|
||||
|
@ -589,7 +589,7 @@ def search_for_needed_episodes(ep_obj_list):
|
|||
|
||||
orig_thread_name = threading.current_thread().name
|
||||
|
||||
providers = list(filter(lambda x: x.is_active() and x.enable_recentsearch, sickgear.providers.sortedProviderList()))
|
||||
providers = list(filter(lambda x: x.is_active() and x.enable_recentsearch, sickgear.providers.sorted_sources()))
|
||||
|
||||
for cur_provider in providers:
|
||||
threading.current_thread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
|
||||
|
@ -615,7 +615,7 @@ def search_for_needed_episodes(ep_obj_list):
|
|||
logger.log(u'All found results for %s were rejected.' % cur_ep_obj.pretty_name(), logger.DEBUG)
|
||||
continue
|
||||
|
||||
# if it's already in the list (from another provider) and the newly found quality is no better then skip it
|
||||
# if it's already in the list (from another provider) and the newly found quality is no better, then skip it
|
||||
if cur_ep_obj in found_results and best_result.quality <= found_results[cur_ep_obj].quality:
|
||||
continue
|
||||
|
||||
|
@ -632,7 +632,7 @@ def search_for_needed_episodes(ep_obj_list):
|
|||
found_results[cur_ep_obj] = best_result
|
||||
|
||||
try:
|
||||
cur_provider.save_list()
|
||||
cur_provider.fails.save_list()
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
|
||||
|
@ -718,7 +718,7 @@ def _search_provider_thread(provider, provider_results, show_obj, ep_obj_list, m
|
|||
logger.log(u'Performing season pack search for %s' % show_obj.unique_name)
|
||||
|
||||
try:
|
||||
provider.cache._clearCache()
|
||||
provider.cache.clear_cache()
|
||||
search_result_list = provider.find_search_results(show_obj, ep_obj_list, search_mode, manual_search,
|
||||
try_other_searches=try_other_searches)
|
||||
if any(search_result_list):
|
||||
|
@ -766,7 +766,7 @@ def cache_torrent_file(
|
|||
# type: (...) -> Optional[TorrentSearchResult]
|
||||
|
||||
cache_file = os.path.join(sickgear.CACHE_DIR or helpers.get_system_temp_dir(),
|
||||
'%s.torrent' % (helpers.sanitize_filename(search_result.name)))
|
||||
'%s.torrent' % (helpers.sanitize_filename(search_result.name)))
|
||||
|
||||
if not helpers.download_file(
|
||||
search_result.url, cache_file, session=search_result.provider.session, failure_monitor=False):
|
||||
|
@ -840,7 +840,7 @@ def search_providers(
|
|||
|
||||
orig_thread_name = threading.current_thread().name
|
||||
|
||||
provider_list = [x for x in sickgear.providers.sortedProviderList() if x.is_active() and
|
||||
provider_list = [x for x in sickgear.providers.sorted_sources() if x.is_active() and
|
||||
getattr(x, 'enable_backlog', None) and
|
||||
(not torrent_only or GenericProvider.TORRENT == x.providerType) and
|
||||
(not scheduled or getattr(x, 'enable_scheduled_backlog', None))]
|
||||
|
@ -878,7 +878,7 @@ def search_providers(
|
|||
if provider_id not in found_results or not len(found_results[provider_id]):
|
||||
continue
|
||||
|
||||
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
||||
any_qualities, best_qualities = Quality.split_quality(show_obj.quality)
|
||||
|
||||
# pick the best season NZB
|
||||
best_season_result = None
|
||||
|
@ -918,8 +918,8 @@ def search_providers(
|
|||
else:
|
||||
any_wanted = True
|
||||
|
||||
# if we need every ep in the season and there's nothing better then just download this and
|
||||
# be done with it (unless single episodes are preferred)
|
||||
# if we need every ep in the season and there's nothing better,
|
||||
# then download this and be done with it (unless single episodes are preferred)
|
||||
if all_wanted and highest_quality_overall == best_season_result.quality:
|
||||
logger.log(u'Every episode in this season is needed, downloading the whole %s %s' %
|
||||
(best_season_result.provider.providerType, best_season_result.name))
|
||||
|
@ -938,7 +938,7 @@ def search_providers(
|
|||
logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG)
|
||||
|
||||
# if not, break it apart and add them as the lowest priority results
|
||||
individual_results = nzbSplitter.splitResult(best_season_result)
|
||||
individual_results = nzbSplitter.split_result(best_season_result)
|
||||
|
||||
for cur_result in filter(
|
||||
lambda r: r.show_obj == show_obj and show_name_helpers.pass_wordlist_checks(
|
||||
|
@ -985,7 +985,7 @@ def search_providers(
|
|||
logger.log(u'Checking usefulness of multi episode result [%s]' % multi_result.name, logger.DEBUG)
|
||||
|
||||
if sickgear.USE_FAILED_DOWNLOADS and failed_history.has_failed(multi_result.name, multi_result.size,
|
||||
multi_result.provider.name):
|
||||
multi_result.provider.name):
|
||||
logger.log(u'Rejecting previously failed multi episode result [%s]' % multi_result.name)
|
||||
continue
|
||||
|
||||
|
@ -1057,7 +1057,7 @@ def search_providers(
|
|||
found_results[provider_id][cur_search_result][0].ep_obj_list[0]) or \
|
||||
found_results[provider_id][cur_search_result][0].ep_obj_list[0].status
|
||||
if old_status:
|
||||
status, quality = Quality.splitCompositeStatus(old_status)
|
||||
status, quality = Quality.split_composite_status(old_status)
|
||||
use_quality_list = (status not in (
|
||||
common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN))
|
||||
|
||||
|
@ -1093,7 +1093,7 @@ def search_providers(
|
|||
best_result.after_get_data_func(best_result)
|
||||
best_result.after_get_data_func = None # consume only once
|
||||
|
||||
# add result if its not a duplicate
|
||||
# add result if it's not a duplicate
|
||||
found = False
|
||||
for i, result in enumerate(final_results):
|
||||
for best_result_ep in best_result.ep_obj_list:
|
||||
|
|
|
@ -47,29 +47,29 @@ class BacklogSearchScheduler(scheduler.Scheduler):
|
|||
self.force = True
|
||||
|
||||
def next_run(self):
|
||||
if 1 >= self.action._lastBacklog:
|
||||
if 1 >= self.action.last_backlog:
|
||||
return datetime.date.today()
|
||||
elif (self.action._lastBacklog + self.action.cycleTime) < datetime.date.today().toordinal():
|
||||
elif (self.action.last_backlog + self.action.cycle_time) < datetime.date.today().toordinal():
|
||||
return datetime.date.today()
|
||||
return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime)
|
||||
return datetime.date.fromordinal(self.action.last_backlog + self.action.cycle_time)
|
||||
|
||||
def next_backlog_timeleft(self):
|
||||
now = datetime.datetime.now()
|
||||
torrent_enabled = 0 < len([x for x in sickgear.providers.sortedProviderList() if x.is_active() and
|
||||
torrent_enabled = 0 < len([x for x in sickgear.providers.sorted_sources() if x.is_active() and
|
||||
getattr(x, 'enable_backlog', None) and GenericProvider.TORRENT == x.providerType])
|
||||
if now > self.action.nextBacklog or self.action.nextCyleTime != self.cycleTime:
|
||||
nextruntime = now + self.timeLeft()
|
||||
if now > self.action.nextBacklog or self.action.nextCyleTime != self.cycle_time:
|
||||
nextruntime = now + self.time_left()
|
||||
if not torrent_enabled:
|
||||
nextpossibleruntime = (datetime.datetime.fromtimestamp(self.action.last_runtime) +
|
||||
datetime.timedelta(hours=23))
|
||||
for _ in moves.xrange(5):
|
||||
if nextruntime > nextpossibleruntime:
|
||||
self.action.nextBacklog = nextruntime
|
||||
self.action.nextCyleTime = self.cycleTime
|
||||
self.action.nextCyleTime = self.cycle_time
|
||||
break
|
||||
nextruntime += self.cycleTime
|
||||
nextruntime += self.cycle_time
|
||||
else:
|
||||
self.action.nextCyleTime = self.cycleTime
|
||||
self.action.nextCyleTime = self.cycle_time
|
||||
self.action.nextBacklog = nextruntime
|
||||
return self.action.nextBacklog - now if self.action.nextBacklog > now else datetime.timedelta(seconds=0)
|
||||
|
||||
|
@ -77,8 +77,8 @@ class BacklogSearchScheduler(scheduler.Scheduler):
|
|||
class BacklogSearcher(object):
|
||||
def __init__(self):
|
||||
|
||||
self._lastBacklog = self._get_last_backlog()
|
||||
self.cycleTime = sickgear.BACKLOG_PERIOD
|
||||
self.last_backlog = self._get_last_backlog()
|
||||
self.cycle_time = sickgear.BACKLOG_PERIOD
|
||||
self.lock = threading.Lock()
|
||||
self.amActive = False # type: bool
|
||||
self.amPaused = False # type: bool
|
||||
|
@ -175,7 +175,7 @@ class BacklogSearcher(object):
|
|||
:param scheduled: scheduled backlog search (can be from webif or scheduler)
|
||||
:return: any provider is active for given backlog
|
||||
"""
|
||||
return 0 < len([x for x in sickgear.providers.sortedProviderList() if x.is_active() and
|
||||
return 0 < len([x for x in sickgear.providers.sorted_sources() if x.is_active() and
|
||||
getattr(x, 'enable_backlog', None) and
|
||||
(not torrent_only or GenericProvider.TORRENT == x.providerType) and
|
||||
(not scheduled or getattr(x, 'enable_scheduled_backlog', None))])
|
||||
|
@ -214,7 +214,7 @@ class BacklogSearcher(object):
|
|||
any_torrent_enabled = any(map(
|
||||
lambda x: x.is_active() and getattr(x, 'enable_backlog', None)
|
||||
and GenericProvider.TORRENT == x.providerType,
|
||||
sickgear.providers.sortedProviderList()))
|
||||
sickgear.providers.sorted_sources()))
|
||||
if not any_torrent_enabled:
|
||||
logger.log('Last scheduled backlog run was within the last day, skipping this run.', logger.DEBUG)
|
||||
return
|
||||
|
@ -383,8 +383,8 @@ class BacklogSearcher(object):
|
|||
if last_backlog > datetime.date.today().toordinal():
|
||||
last_backlog = 1
|
||||
|
||||
self._lastBacklog = last_backlog
|
||||
return self._lastBacklog
|
||||
self.last_backlog = last_backlog
|
||||
return self.last_backlog
|
||||
|
||||
@staticmethod
|
||||
def _set_last_backlog(when):
|
||||
|
|
|
@ -22,12 +22,10 @@ import re
|
|||
import threading
|
||||
import traceback
|
||||
|
||||
import exceptions_helper
|
||||
# noinspection PyPep8Naming
|
||||
from exceptions_helper import ex
|
||||
|
||||
import sickgear
|
||||
from lib.dateutil import tz
|
||||
from . import common, db, failed_history, generic_queue, helpers, \
|
||||
history, logger, network_timezones, properFinder, search, ui
|
||||
from .classes import Proper, SimpleNamespace
|
||||
|
@ -519,7 +517,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
threads = []
|
||||
|
||||
providers = list(filter(lambda x: x.is_active() and x.enable_recentsearch,
|
||||
sickgear.providers.sortedProviderList()))
|
||||
sickgear.providers.sorted_sources()))
|
||||
for cur_provider in providers:
|
||||
if not cur_provider.cache.should_update():
|
||||
continue
|
||||
|
@ -528,7 +526,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
logger.log('Updating provider caches with recent upload data')
|
||||
|
||||
# spawn a thread for each provider to save time waiting for slow response providers
|
||||
threads.append(threading.Thread(target=cur_provider.cache.updateCache,
|
||||
threads.append(threading.Thread(target=cur_provider.cache.update_cache,
|
||||
kwargs={'needed': needed},
|
||||
name='%s :: [%s]' % (orig_thread_name, cur_provider.name)))
|
||||
# start the thread we just created
|
||||
|
@ -645,7 +643,7 @@ class ManualSearchQueueItem(BaseSearchQueueItem):
|
|||
ep_count, ep_count_scene = get_aired_in_season(self.show_obj)
|
||||
set_wanted_aired(self.segment, True, ep_count, ep_count_scene, manual=True)
|
||||
if not getattr(self.segment, 'wanted_quality', None):
|
||||
ep_status, ep_quality = common.Quality.splitCompositeStatus(self.segment.status)
|
||||
ep_status, ep_quality = common.Quality.split_composite_status(self.segment.status)
|
||||
self.segment.wanted_quality = search.get_wanted_qualities(self.segment, ep_status, ep_quality,
|
||||
unaired=True, manual=True)
|
||||
if not self.segment.wanted_quality:
|
||||
|
|
|
@ -27,7 +27,7 @@ from six import integer_types, string_types
|
|||
|
||||
# noinspection PyUnreachableCode
|
||||
if False:
|
||||
from typing import Callable, Optional, Union
|
||||
from typing import Optional, Union
|
||||
|
||||
date_presets = ('%Y-%m-%d',
|
||||
'%a, %Y-%m-%d',
|
||||
|
@ -234,7 +234,7 @@ class SGDatetime(datetime.datetime):
|
|||
"""
|
||||
convert datetime to filetime
|
||||
special handling for windows filetime issues
|
||||
for pre Windows 7 this can result in an exception for pre 1970 dates
|
||||
for pre Windows 7 this can result in an exception for pre-1970 dates
|
||||
"""
|
||||
obj = (dt, self)[self is not None] # type: datetime.datetime
|
||||
if is_win:
|
||||
|
@ -286,6 +286,7 @@ class SGDatetime(datetime.datetime):
|
|||
# noinspection PyUnreachableCode
|
||||
if False:
|
||||
# just to trick pycharm in correct type detection
|
||||
# noinspection PyUnusedLocal
|
||||
def timestamp_near(d_t):
|
||||
# type: (datetime.datetime) -> float
|
||||
pass
|
||||
|
|
|
@ -264,7 +264,7 @@ def make_scene_season_search_string(show_obj, # type: sickgear.tv.TVShow
|
|||
ep_obj_list = show_obj.get_all_episodes(ep_obj.season)
|
||||
|
||||
# get show qualities
|
||||
any_qualities, best_qualities = common.Quality.splitQuality(show_obj.quality)
|
||||
any_qualities, best_qualities = common.Quality.split_quality(show_obj.quality)
|
||||
|
||||
# compile a list of all the episode numbers we need in this 'season'
|
||||
season_strings = []
|
||||
|
@ -272,7 +272,7 @@ def make_scene_season_search_string(show_obj, # type: sickgear.tv.TVShow
|
|||
|
||||
# get quality of the episode
|
||||
cur_composite_status = episode.status
|
||||
cur_status, cur_quality = common.Quality.splitCompositeStatus(cur_composite_status)
|
||||
cur_status, cur_quality = common.Quality.split_composite_status(cur_composite_status)
|
||||
|
||||
if best_qualities:
|
||||
highest_best_quality = max(best_qualities)
|
||||
|
@ -378,7 +378,7 @@ def all_possible_show_names(show_obj, season=-1, force_anime=False):
|
|||
# type: (sickgear.tv.TVShow, int, bool) -> List[AnyStr]
|
||||
"""
|
||||
Figures out every possible variation of the name for a particular show. Includes TVDB name, TVRage name,
|
||||
country codes on the end, eg. "Show Name (AU)", and any scene exception names.
|
||||
country codes on the end, e.g. "Show Name (AU)", and any scene exception names.
|
||||
|
||||
:param show_obj: a TVShow object that we should get the names of
|
||||
:param season: season
|
||||
|
@ -387,7 +387,7 @@ def all_possible_show_names(show_obj, season=-1, force_anime=False):
|
|||
"""
|
||||
|
||||
show_names = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:]
|
||||
if not show_names: # if we dont have any season specific exceptions fallback to generic exceptions
|
||||
if not show_names: # if we don't have any season specific exceptions fallback to generic exceptions
|
||||
season = -1
|
||||
show_names = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:]
|
||||
|
||||
|
|
|
@ -931,7 +931,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
wanted_updates.append({'season': sr['season'], 'episode': sr['episode'],
|
||||
'status': sr['status']})
|
||||
elif sr['status'] not in [WANTED]:
|
||||
cur_status, cur_quality = Quality.splitCompositeStatus(int(sr['status']))
|
||||
cur_status, cur_quality = Quality.split_composite_status(int(sr['status']))
|
||||
if sickgear.WANTEDLIST_CACHE.get_wantedlist(
|
||||
self.quality, self.upgrade_once, cur_quality, cur_status,
|
||||
unaired=(sickgear.SEARCH_UNAIRED and not sickgear.UNAIRED_RECENT_SEARCH_ONLY)):
|
||||
|
@ -1155,7 +1155,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
raise
|
||||
|
||||
# update internal name cache
|
||||
name_cache.buildNameCache(self.show_obj)
|
||||
name_cache.build_name_cache(self.show_obj)
|
||||
|
||||
self.show_obj.load_episodes_from_db()
|
||||
|
||||
|
@ -1446,7 +1446,7 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
for cur_season in db_ep_obj_list:
|
||||
for cur_episode in db_ep_obj_list[cur_season]:
|
||||
ep_obj = self.show_obj.get_episode(cur_season, cur_episode) # type: Optional[TVEpisode]
|
||||
status = sickgear.common.Quality.splitCompositeStatus(ep_obj.status)[0]
|
||||
status = sickgear.common.Quality.split_composite_status(ep_obj.status)[0]
|
||||
if self.switch or should_delete_episode(status):
|
||||
if self.switch:
|
||||
cl.append(self.show_obj.switch_ep_change_sql(
|
||||
|
|
|
@ -220,7 +220,7 @@ class ShowUpdater(object):
|
|||
if len(pi_list):
|
||||
sickgear.show_queue_scheduler.action.daily_update_running = True
|
||||
|
||||
ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', pi_list))
|
||||
ui.ProgressIndicators.set_indicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', pi_list))
|
||||
|
||||
logger.log(u'Added all shows to show queue for full update')
|
||||
|
||||
|
|
|
@ -1,222 +0,0 @@
|
|||
# Author: Frank Fenton
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import traceback
|
||||
|
||||
import sickgear
|
||||
from . import helpers, logger, search_queue
|
||||
from .common import SKIPPED, WANTED
|
||||
from .indexers.indexer_config import TVINFO_TVRAGE
|
||||
|
||||
|
||||
class TraktChecker(object):
|
||||
def __init__(self):
|
||||
self.todoWanted = []
|
||||
|
||||
def run(self, force=False):
|
||||
try:
|
||||
# add shows from trakt.tv watchlist
|
||||
if sickgear.TRAKT_USE_WATCHLIST:
|
||||
self.todoWanted = [] # its about to all get re-added
|
||||
if len(sickgear.ROOT_DIRS.split('|')) < 2:
|
||||
logger.log(u"No default root directory", logger.ERROR)
|
||||
return
|
||||
self.updateShows()
|
||||
self.updateEpisodes()
|
||||
|
||||
# sync trakt.tv library with SickGear library
|
||||
if sickgear.TRAKT_SYNC:
|
||||
self.syncLibrary()
|
||||
except Exception:
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
def findShow(self, tvid, prodid):
|
||||
library = TraktCall("user/library/shows/all.json/%API%/" + sickgear.TRAKT_USERNAME, sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD)
|
||||
|
||||
if library == 'NULL':
|
||||
logger.log(u"No shows found in your library, aborting library update", logger.DEBUG)
|
||||
return
|
||||
|
||||
if not library:
|
||||
logger.log(u"Could not connect to trakt service, aborting library check", logger.ERROR)
|
||||
return
|
||||
|
||||
return filter(lambda x: int(prodid) in [int(x['tvdb_id']) or 0, int(x['tvrage_id'])] or 0, library)
|
||||
|
||||
def syncLibrary(self):
|
||||
logger.log(u"Syncing Trakt.tv show library", logger.DEBUG)
|
||||
|
||||
for cur_show_obj in sickgear.showList:
|
||||
self.addShowToTraktLibrary(cur_show_obj)
|
||||
|
||||
def removeShowFromTraktLibrary(self, show_obj):
|
||||
data = {}
|
||||
if self.findShow(show_obj.tvid, show_obj.prodid):
|
||||
# URL parameters
|
||||
data['tvdb_id'] = helpers.mapIndexersToShow(show_obj)[1]
|
||||
data['title'] = show_obj.name
|
||||
data['year'] = show_obj.startyear
|
||||
|
||||
if len(data):
|
||||
logger.log(u"Removing " + show_obj.name + " from trakt.tv library", logger.DEBUG)
|
||||
TraktCall("show/unlibrary/%API%", sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD,
|
||||
data)
|
||||
|
||||
def addShowToTraktLibrary(self, show_obj):
|
||||
"""
|
||||
Sends a request to trakt indicating that the given show and all its episodes is part of our library.
|
||||
|
||||
show_obj: The TVShow object to add to trakt
|
||||
"""
|
||||
|
||||
data = {}
|
||||
|
||||
if not self.findShow(show_obj.tvid, show_obj.prodid):
|
||||
# URL parameters
|
||||
data['tvdb_id'] = helpers.mapIndexersToShow(show_obj)[1]
|
||||
data['title'] = show_obj.name
|
||||
data['year'] = show_obj.startyear
|
||||
|
||||
if len(data):
|
||||
logger.log(u"Adding " + show_obj.name + " to trakt.tv library", logger.DEBUG)
|
||||
TraktCall("show/library/%API%", sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD,
|
||||
data)
|
||||
|
||||
def updateShows(self):
|
||||
logger.log(u"Starting trakt show watchlist check", logger.DEBUG)
|
||||
watchlist = TraktCall("user/watchlist/shows.json/%API%/" + sickgear.TRAKT_USERNAME, sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD)
|
||||
|
||||
if watchlist == 'NULL':
|
||||
logger.log(u"No shows found in your watchlist, aborting watchlist update", logger.DEBUG)
|
||||
return
|
||||
|
||||
if not watchlist:
|
||||
logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR)
|
||||
return
|
||||
|
||||
for show in watchlist:
|
||||
tvid = int(sickgear.TRAKT_DEFAULT_INDEXER)
|
||||
prodid = int(show[('tvdb_id', 'tvrage_id')[TVINFO_TVRAGE == tvid]])
|
||||
|
||||
if int(sickgear.TRAKT_METHOD_ADD) != 2:
|
||||
self.addDefaultShow(tvid, prodid, show["title"], SKIPPED)
|
||||
else:
|
||||
self.addDefaultShow(tvid, prodid, show["title"], WANTED)
|
||||
|
||||
if int(sickgear.TRAKT_METHOD_ADD) == 1:
|
||||
show_obj = helpers.find_show_by_id({tvid: prodid})
|
||||
if None is not show_obj:
|
||||
self.setEpisodeToWanted(show_obj, 1, 1)
|
||||
else:
|
||||
self.todoWanted.append((prodid, 1, 1))
|
||||
|
||||
def updateEpisodes(self):
|
||||
"""
|
||||
Sets episodes to wanted that are in trakt watchlist
|
||||
"""
|
||||
logger.log(u"Starting trakt episode watchlist check", logger.DEBUG)
|
||||
watchlist = TraktCall("user/watchlist/episodes.json/%API%/" + sickgear.TRAKT_USERNAME, sickgear.TRAKT_API, sickgear.TRAKT_USERNAME, sickgear.TRAKT_PASSWORD)
|
||||
|
||||
if watchlist == 'NULL':
|
||||
logger.log(u"No episodes found in your watchlist, aborting watchlist update", logger.DEBUG)
|
||||
return
|
||||
|
||||
if not watchlist:
|
||||
logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR)
|
||||
return
|
||||
|
||||
for show in watchlist:
|
||||
tvid = int(sickgear.TRAKT_DEFAULT_INDEXER)
|
||||
prodid = int(show[('tvdb_id', 'tvrage_id')[TVINFO_TVRAGE == tvid]])
|
||||
|
||||
self.addDefaultShow(tvid, prodid, show['title'], SKIPPED)
|
||||
show_obj = helpers.find_show_by_id({tvid: prodid})
|
||||
|
||||
try:
|
||||
if show_obj and show_obj.tvid == tvid:
|
||||
for episode in show["episodes"]:
|
||||
if None is not show_obj:
|
||||
self.setEpisodeToWanted(show_obj, episode["season"], episode["number"])
|
||||
else:
|
||||
self.todoWanted.append((prodid, episode["season"], episode["number"]))
|
||||
except TypeError:
|
||||
logger.log(u"Could not parse the output from trakt for " + show["title"], logger.DEBUG)
|
||||
|
||||
def addDefaultShow(self, tvid, prod_id, name, status):
|
||||
"""
|
||||
Adds a new show with the default settings
|
||||
"""
|
||||
if not helpers.find_show_by_id({int(tvid): int(prodid)}):
|
||||
logger.log(u"Adding show " + str(prod_id))
|
||||
root_dirs = sickgear.ROOT_DIRS.split('|')
|
||||
|
||||
try:
|
||||
location = root_dirs[int(root_dirs[0]) + 1]
|
||||
except:
|
||||
location = None
|
||||
|
||||
if location:
|
||||
showPath = os.path.join(location, helpers.sanitize_filename(name))
|
||||
dir_exists = helpers.make_dir(showPath)
|
||||
if not dir_exists:
|
||||
logger.log(u"Unable to create the folder " + showPath + ", can't add the show", logger.ERROR)
|
||||
return
|
||||
else:
|
||||
helpers.chmod_as_parent(showPath)
|
||||
|
||||
sickgear.show_queue_scheduler.action.add_show(
|
||||
int(tvid), int(prod_id), showPath,
|
||||
quality=int(sickgear.QUALITY_DEFAULT),
|
||||
paused=sickgear.TRAKT_START_PAUSED, default_status=status,
|
||||
flatten_folders=int(sickgear.FLATTEN_FOLDERS_DEFAULT)
|
||||
)
|
||||
else:
|
||||
logger.log(u"There was an error creating the show, no root directory setting found", logger.ERROR)
|
||||
return
|
||||
|
||||
def setEpisodeToWanted(self, show_obj, s, e):
|
||||
"""
|
||||
Sets an episode to wanted, only is it is currently skipped
|
||||
"""
|
||||
ep_obj = show_obj.get_episode(int(s), int(e))
|
||||
if ep_obj:
|
||||
|
||||
with ep_obj.lock:
|
||||
if ep_obj.status != SKIPPED or ep_obj.airdate == datetime.date.fromordinal(1):
|
||||
return
|
||||
|
||||
logger.log(u"Setting episode s" + str(s) + "e" + str(e) + " of show " + show_obj.name + " to wanted")
|
||||
# figure out what segment the episode is in and remember it so we can backlog it
|
||||
|
||||
ep_obj.status = WANTED
|
||||
ep_obj.save_to_db()
|
||||
|
||||
backlog_queue_item = search_queue.BacklogQueueItem(show_obj, [ep_obj])
|
||||
sickgear.search_queue_scheduler.action.add_item(backlog_queue_item)
|
||||
|
||||
logger.log(u"Starting backlog for " + show_obj.name + " season " + str(
|
||||
s) + " episode " + str(e) + " because some eps were set to wanted")
|
||||
|
||||
def manageNewShow(self, show_obj):
|
||||
logger.log(u"Checking if trakt watch list wants to search for episodes from new show " + show_obj.name,
|
||||
logger.DEBUG)
|
||||
episodes = [i for i in self.todoWanted if i[0] == show_obj.prodid]
|
||||
for episode in episodes:
|
||||
self.todoWanted.remove(episode)
|
||||
self.setEpisodeToWanted(show_obj, episode[1], episode[2])
|
|
@ -43,7 +43,7 @@ import sickgear
|
|||
from . import db, helpers, history, image_cache, indexermapper, logger, \
|
||||
name_cache, network_timezones, notifiers, postProcessor, subtitles
|
||||
from .anime import AniGroupList
|
||||
from .classes import weakList
|
||||
from .classes import WeakList
|
||||
from .common import Quality, statusStrings, \
|
||||
ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED, SNATCHED_ANY, SNATCHED_PROPER, UNAIRED, UNKNOWN, WANTED, \
|
||||
NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED, NAMING_SEPARATED_REPEAT
|
||||
|
@ -280,7 +280,7 @@ def usable_id(value):
|
|||
def usable_rid(value):
|
||||
# type: (Union[AnyStr]) -> Optional[AnyStr]
|
||||
"""
|
||||
return value if is a id:format is valid
|
||||
return value if is an id:format is valid
|
||||
otherwise None if value fails basic id format validation
|
||||
"""
|
||||
if isinstance(value, string_types) and ':' in value:
|
||||
|
@ -452,7 +452,7 @@ class Person(Referential):
|
|||
def reset(self, person_obj=None):
|
||||
# type: (TVInfoPerson) -> None
|
||||
"""
|
||||
reset all properties with the exception of: name, id, ids
|
||||
reset all properties except; name, id, ids
|
||||
|
||||
:param person_obj: TVInfo Person object to reset to
|
||||
"""
|
||||
|
@ -790,7 +790,7 @@ class Person(Referential):
|
|||
for i in (TVINFO_TRAKT, TVINFO_IMDB, TVINFO_TMDB, TVINFO_TVMAZE, TVINFO_TVDB):
|
||||
if not rp.ids.get(i):
|
||||
continue
|
||||
# in case it's the current source use it's id and lock if from being changed
|
||||
# in case it's the current source use its id and lock if from being changed
|
||||
if cur_tv_info_src == i and rp.ids.get(i):
|
||||
source_confirmed[i] = True
|
||||
if rp.ids.get(i) != self.ids.get(i):
|
||||
|
@ -1403,8 +1403,8 @@ class TVShow(TVShowBase):
|
|||
|
||||
@cast_list.setter
|
||||
def cast_list(self, value):
|
||||
# type: (weakList[Character]) -> None
|
||||
self._cast_list = None if not isinstance(value, weakList) else weakref.ref(value)
|
||||
# type: (WeakList[Character]) -> None
|
||||
self._cast_list = None if not isinstance(value, WeakList) else weakref.ref(value)
|
||||
|
||||
@property
|
||||
def network_id(self):
|
||||
|
@ -1900,7 +1900,7 @@ class TVShow(TVShowBase):
|
|||
bio=cur_row['c_bio'], ids=c_ids, image_url=cur_row['image_url'], person=[person],
|
||||
persons_years=p_years, show_obj=self, sid=cur_row['c_id'],
|
||||
thumb_url=cur_row['thumb_url'], updated=cur_row['cast_updated']))
|
||||
cast_list = weakList(c for c in old_cast or [] if c.id not in old_list)
|
||||
cast_list = WeakList(c for c in old_cast or [] if c.id not in old_list)
|
||||
self.cast_list = cast_list
|
||||
return cast_list
|
||||
|
||||
|
@ -1990,7 +1990,7 @@ class TVShow(TVShowBase):
|
|||
return True
|
||||
return False
|
||||
|
||||
# In some situations self.status = None.. need to figure out where that is!
|
||||
# In some situations self.status = None, need to figure out where that is!
|
||||
if not self._status:
|
||||
self.status = ''
|
||||
logger.log('Status missing for show: [%s] with status: [%s]' %
|
||||
|
@ -2026,7 +2026,7 @@ class TVShow(TVShowBase):
|
|||
last_airdate = datetime.date.fromordinal(sql_result[1][0]['airdate']) \
|
||||
if sql_result and sql_result[1] else datetime.date.fromordinal(1)
|
||||
|
||||
# if show is not 'Ended' and last episode aired less then 460 days ago
|
||||
# if show is not 'Ended' and last episode aired less than 460 days ago
|
||||
# or don't have an airdate for the last episode always update (status 'Continuing' or '')
|
||||
update_days_limit = 2013
|
||||
ended_limit = datetime.timedelta(days=update_days_limit)
|
||||
|
@ -2446,7 +2446,7 @@ class TVShow(TVShowBase):
|
|||
logger.log('No episode number found in %s, ignoring it' % path, logger.ERROR)
|
||||
return None
|
||||
|
||||
# for now lets assume that any episode in the show dir belongs to that show
|
||||
# for now let's assume that any episode in the show dir belongs to that show
|
||||
season_number = parse_result.season_number if None is not parse_result.season_number else 1
|
||||
episode_numbers = parse_result.episode_numbers
|
||||
root_ep_obj = None
|
||||
|
@ -2471,7 +2471,7 @@ class TVShow(TVShowBase):
|
|||
|
||||
else:
|
||||
# if there is a new file associated with this ep then re-check the quality
|
||||
status, quality = sickgear.common.Quality.splitCompositeStatus(ep_obj.status)
|
||||
status, quality = sickgear.common.Quality.split_composite_status(ep_obj.status)
|
||||
|
||||
if IGNORED == status:
|
||||
continue
|
||||
|
@ -2506,25 +2506,25 @@ class TVShow(TVShowBase):
|
|||
|
||||
# if user replaces a file, attempt to recheck the quality unless it's know to be the same file
|
||||
if check_quality_again and not same_file:
|
||||
new_quality = Quality.nameQuality(path, self.is_anime)
|
||||
new_quality = Quality.name_quality(path, self.is_anime)
|
||||
if Quality.UNKNOWN == new_quality:
|
||||
new_quality = Quality.fileQuality(path)
|
||||
new_quality = Quality.file_quality(path)
|
||||
logger.log('Since this file was renamed, file %s was checked and quality "%s" found'
|
||||
% (path, Quality.qualityStrings[new_quality]), logger.DEBUG)
|
||||
status, quality = sickgear.common.Quality.splitCompositeStatus(ep_obj.status)
|
||||
status, quality = sickgear.common.Quality.split_composite_status(ep_obj.status)
|
||||
if Quality.UNKNOWN != new_quality or status in (SKIPPED, UNAIRED):
|
||||
ep_obj.status = Quality.compositeStatus(DOWNLOADED, new_quality)
|
||||
ep_obj.status = Quality.composite_status(DOWNLOADED, new_quality)
|
||||
|
||||
# check for status/quality changes as long as it's a new file
|
||||
elif not same_file and sickgear.helpers.has_media_ext(path)\
|
||||
and ep_obj.status not in Quality.DOWNLOADED + Quality.ARCHIVED + [IGNORED]:
|
||||
|
||||
old_status, old_quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
new_quality = Quality.nameQuality(path, self.is_anime)
|
||||
old_status, old_quality = Quality.split_composite_status(ep_obj.status)
|
||||
new_quality = Quality.name_quality(path, self.is_anime)
|
||||
if Quality.UNKNOWN == new_quality:
|
||||
new_quality = Quality.fileQuality(path)
|
||||
new_quality = Quality.file_quality(path)
|
||||
if Quality.UNKNOWN == new_quality:
|
||||
new_quality = Quality.assumeQuality(path)
|
||||
new_quality = Quality.assume_quality(path)
|
||||
|
||||
new_status = None
|
||||
|
||||
|
@ -2536,7 +2536,7 @@ class TVShow(TVShowBase):
|
|||
logger.DEBUG)
|
||||
new_status = DOWNLOADED
|
||||
|
||||
# if it was snatched proper and we found a higher quality one then allow the status change
|
||||
# if it was snatched proper, and we found a higher quality one then allow the status change
|
||||
elif SNATCHED_PROPER == old_status and old_quality < new_quality:
|
||||
logger.log('STATUS: this episode used to be snatched proper with quality %s but'
|
||||
' a file exists with quality %s so setting the status to DOWNLOADED'
|
||||
|
@ -2550,18 +2550,18 @@ class TVShow(TVShowBase):
|
|||
if None is not new_status:
|
||||
with ep_obj.lock:
|
||||
logger.log('STATUS: we have an associated file, so setting the status from %s to DOWNLOADED/%s'
|
||||
% (ep_obj.status, Quality.compositeStatus(new_status, new_quality)), logger.DEBUG)
|
||||
ep_obj.status = Quality.compositeStatus(new_status, new_quality)
|
||||
% (ep_obj.status, Quality.composite_status(new_status, new_quality)), logger.DEBUG)
|
||||
ep_obj.status = Quality.composite_status(new_status, new_quality)
|
||||
|
||||
elif same_file:
|
||||
status, quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
status, quality = Quality.split_composite_status(ep_obj.status)
|
||||
if status in (SKIPPED, UNAIRED):
|
||||
new_quality = Quality.nameQuality(path, self.is_anime)
|
||||
new_quality = Quality.name_quality(path, self.is_anime)
|
||||
if Quality.UNKNOWN == new_quality:
|
||||
new_quality = Quality.fileQuality(path)
|
||||
new_quality = Quality.file_quality(path)
|
||||
logger.log('Since this file has status: "%s", file %s was checked and quality "%s" found'
|
||||
% (statusStrings[status], path, Quality.qualityStrings[new_quality]), logger.DEBUG)
|
||||
ep_obj.status = Quality.compositeStatus(DOWNLOADED, new_quality)
|
||||
ep_obj.status = Quality.composite_status(DOWNLOADED, new_quality)
|
||||
|
||||
with ep_obj.lock:
|
||||
result = ep_obj.get_sql()
|
||||
|
@ -2773,7 +2773,7 @@ class TVShow(TVShowBase):
|
|||
:param scheduled_update:
|
||||
:param switch:
|
||||
"""
|
||||
# There's gotta be a better way of doing this but we don't wanna
|
||||
# There's gotta be a better way of doing this, but we don't want to
|
||||
# change the cache value elsewhere
|
||||
if None is tvapi:
|
||||
tvinfo_config = sickgear.TVInfoAPI(self.tvid).api_params.copy()
|
||||
|
@ -2900,7 +2900,7 @@ class TVShow(TVShowBase):
|
|||
|
||||
cast_list = self._load_cast_from_db()
|
||||
remove_char_ids = {c.id for c in cast_list or []}
|
||||
cast_ordered = weakList()
|
||||
cast_ordered = WeakList()
|
||||
for ct, c_l in iteritems(show_info_cast): # type: (integer_types, List[TVInfoCharacter])
|
||||
if ct not in (RoleTypes.ActorMain, RoleTypes.Host, RoleTypes.Interviewer, RoleTypes.Presenter):
|
||||
continue
|
||||
|
@ -3386,11 +3386,11 @@ class TVShow(TVShowBase):
|
|||
# check if downloaded files still exist, update our data if this has changed
|
||||
if 1 != sickgear.SKIP_REMOVED_FILES:
|
||||
with ep_obj.lock:
|
||||
# if it used to have a file associated with it and it doesn't anymore then set it to IGNORED
|
||||
# if it used to have a file associated with it, and it doesn't anymore then set it to IGNORED
|
||||
if ep_obj.location and ep_obj.status in Quality.DOWNLOADED:
|
||||
if ARCHIVED == sickgear.SKIP_REMOVED_FILES:
|
||||
ep_obj.status = Quality.compositeStatus(
|
||||
ARCHIVED, Quality.qualityDownloaded(ep_obj.status))
|
||||
ep_obj.status = Quality.composite_status(
|
||||
ARCHIVED, Quality.quality_downloaded(ep_obj.status))
|
||||
else:
|
||||
ep_obj.status = (sickgear.SKIP_REMOVED_FILES, IGNORED)[
|
||||
not sickgear.SKIP_REMOVED_FILES]
|
||||
|
@ -3545,7 +3545,7 @@ class TVShow(TVShowBase):
|
|||
sickgear.FANART_RATINGS[self.tvid_prodid] = rating
|
||||
sickgear.save_config()
|
||||
|
||||
name_cache.buildNameCache(self)
|
||||
name_cache.build_name_cache(self)
|
||||
self.reset_not_found_count()
|
||||
old_sid_int = self.create_sid(old_tvid, old_prodid)
|
||||
if old_sid_int != self.sid_int:
|
||||
|
@ -3680,7 +3680,7 @@ class TVShow(TVShowBase):
|
|||
wq = getattr(self.sxe_ep_obj.get(season, {}).get(episode, {}), 'wanted_quality', None)
|
||||
if None is not wq:
|
||||
if quality in wq:
|
||||
cur_status, cur_quality = Quality.splitCompositeStatus(self.sxe_ep_obj[season][episode].status)
|
||||
cur_status, cur_quality = Quality.split_composite_status(self.sxe_ep_obj[season][episode].status)
|
||||
if cur_status in (WANTED, UNAIRED, SKIPPED, FAILED):
|
||||
logger.log('Existing episode status is wanted/unaired/skipped/failed,'
|
||||
' getting found episode', logger.DEBUG)
|
||||
|
@ -3700,7 +3700,7 @@ class TVShow(TVShowBase):
|
|||
pass
|
||||
|
||||
# if the quality isn't one we want under any circumstances then just say no
|
||||
initial_qualities, archive_qualities = Quality.splitQuality(self._quality)
|
||||
initial_qualities, archive_qualities = Quality.split_quality(self._quality)
|
||||
all_qualities = list(set(initial_qualities + archive_qualities))
|
||||
|
||||
initial = '= (%s)' % ','.join([Quality.qualityStrings[qual] for qual in initial_qualities])
|
||||
|
@ -3725,7 +3725,7 @@ class TVShow(TVShowBase):
|
|||
logger.log('Unable to find a matching episode in database, ignoring found episode', logger.DEBUG)
|
||||
return False
|
||||
|
||||
cur_status, cur_quality = Quality.splitCompositeStatus(int(sql_result[0]['status']))
|
||||
cur_status, cur_quality = Quality.split_composite_status(int(sql_result[0]['status']))
|
||||
ep_status_text = statusStrings[cur_status]
|
||||
|
||||
logger.log('Existing episode status: %s (%s)' % (statusStrings[cur_status], ep_status_text), logger.DEBUG)
|
||||
|
@ -4011,7 +4011,7 @@ class TVEpisode(TVEpisodeBase):
|
|||
return
|
||||
|
||||
self.refresh_subtitles()
|
||||
# added the if because sometime it raises an error
|
||||
# added the if because sometimes it raises an error
|
||||
self.subtitles_searchcount = self.subtitles_searchcount + 1 if self.subtitles_searchcount else 1
|
||||
self.subtitles_lastsearch = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
self.save_to_db()
|
||||
|
@ -4292,7 +4292,7 @@ class TVEpisode(TVEpisodeBase):
|
|||
except (BaseTVinfoEpisodenotfound, BaseTVinfoSeasonnotfound):
|
||||
logger.log('Unable to find the episode on %s... has it been removed? Should I delete from db?' %
|
||||
sickgear.TVInfoAPI(self.tvid).name, logger.DEBUG)
|
||||
# if I'm no longer on the Indexers but I once was then delete myself from the DB
|
||||
# if I'm no longer on the Indexers, but I once was then delete myself from the DB
|
||||
if -1 != self._epid and helpers.should_delete_episode(self._status):
|
||||
self.delete_episode()
|
||||
elif UNKNOWN == self._status:
|
||||
|
@ -4352,7 +4352,7 @@ class TVEpisode(TVEpisodeBase):
|
|||
except (ValueError, IndexError):
|
||||
logger.error('Malformed air date retrieved from %s (%s - %sx%s)' %
|
||||
(sickgear.TVInfoAPI(self.tvid).name, self.show_obj.unique_name, season, episode))
|
||||
# if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
|
||||
# if I'm incomplete on TVDB, but I once was complete then just delete myself from the DB for now
|
||||
if -1 != self._epid and helpers.should_delete_episode(self._status):
|
||||
self.delete_episode()
|
||||
elif UNKNOWN == self._status:
|
||||
|
@ -4484,7 +4484,7 @@ class TVEpisode(TVEpisodeBase):
|
|||
# leave propers alone, you have to either post-process them or manually change them back
|
||||
elif self._status not in Quality.SNATCHED_ANY + Quality.DOWNLOADED + Quality.ARCHIVED:
|
||||
msg = '(1) Status changes from %s to ' % statusStrings[self._status]
|
||||
self.status = Quality.statusFromNameOrFile(self._location, anime=self._show_obj.is_anime)
|
||||
self.status = Quality.status_from_name_or_file(self._location, anime=self._show_obj.is_anime)
|
||||
logger.log('%s%s' % (msg, statusStrings[self._status]), logger.DEBUG)
|
||||
|
||||
# shouldn't get here probably
|
||||
|
@ -4513,7 +4513,7 @@ class TVEpisode(TVEpisodeBase):
|
|||
if '' != self.location:
|
||||
|
||||
if UNKNOWN == self._status and sickgear.helpers.has_media_ext(self.location):
|
||||
status_quality = Quality.statusFromNameOrFile(self.location, anime=self._show_obj.is_anime)
|
||||
status_quality = Quality.status_from_name_or_file(self.location, anime=self._show_obj.is_anime)
|
||||
logger.log('(3) Status changes from %s to %s' % (self._status, status_quality), logger.DEBUG)
|
||||
self.status = status_quality
|
||||
|
||||
|
@ -4841,8 +4841,8 @@ class TVEpisode(TVEpisodeBase):
|
|||
def _ep_name(self):
|
||||
"""
|
||||
:return: the name of the episode to use during renaming. Combines the names of related episodes.
|
||||
Eg. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name"
|
||||
"Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name"
|
||||
E.g. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name"
|
||||
"Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name"
|
||||
:rtype: AnyStr
|
||||
"""
|
||||
|
||||
|
@ -4915,7 +4915,7 @@ class TVEpisode(TVEpisodeBase):
|
|||
return ''
|
||||
return parse_result.release_group
|
||||
|
||||
ep_status, ep_qual = Quality.splitCompositeStatus(self._status)
|
||||
ep_status, ep_qual = Quality.split_composite_status(self._status)
|
||||
|
||||
if sickgear.NAMING_STRIP_YEAR:
|
||||
show_name = re.sub(r'\(\d+\)$', '', self._show_obj.name).rstrip()
|
||||
|
@ -5061,7 +5061,7 @@ class TVEpisode(TVEpisodeBase):
|
|||
if not ep_sep or not ep_format:
|
||||
continue
|
||||
|
||||
# start with the ep string, eg. E03
|
||||
# start with the ep string, e.g. E03
|
||||
ep_string = self._format_string(ep_format.upper(), replace_map)
|
||||
for cur_ep_obj in self.related_ep_obj:
|
||||
|
||||
|
@ -5089,7 +5089,7 @@ class TVEpisode(TVEpisodeBase):
|
|||
if 3 != anime_type:
|
||||
absolute_number = (self._absolute_number, self._episode)[0 == self._absolute_number]
|
||||
|
||||
if 0 != self._season: # dont set absolute numbers if we are on specials !
|
||||
if 0 != self._season: # don't set absolute numbers if we are on specials !
|
||||
if 1 == anime_type: # this crazy person wants both ! (note: +=)
|
||||
ep_string += sep + '%(#)03d' % {'#': absolute_number}
|
||||
elif 2 == anime_type: # total anime freak only need the absolute number ! (note: =)
|
||||
|
@ -5272,7 +5272,7 @@ class TVEpisode(TVEpisodeBase):
|
|||
|
||||
def airdate_modify_stamp(self):
|
||||
"""
|
||||
Make the modify date and time of a file reflect the show air date and time.
|
||||
Make modify date and time of a file reflect the show air date and time.
|
||||
Note: Also called from postProcessor
|
||||
|
||||
"""
|
||||
|
|
|
@ -20,7 +20,7 @@ import sickgear
|
|||
from . import logger
|
||||
from ._legacy_classes import LegacyTVShow, LegacyTVEpisode
|
||||
from .common import UNKNOWN
|
||||
from .name_cache import buildNameCache
|
||||
from .name_cache import build_name_cache
|
||||
|
||||
from six import string_types
|
||||
|
||||
|
@ -132,7 +132,7 @@ class TVShowBase(LegacyTVShow, TVBase):
|
|||
_current_name = self._name
|
||||
self.dirty_setter('_name')(self, *arg)
|
||||
if _current_name != self._name:
|
||||
buildNameCache(self)
|
||||
build_name_cache(self)
|
||||
|
||||
# imdbid = property(lambda self: self._imdbid, dirty_setter('_imdbid'))
|
||||
@property
|
||||
|
|
|
@ -33,6 +33,7 @@ from .tv import TVEpisode
|
|||
# noinspection PyUnreachableCode
|
||||
if False:
|
||||
from typing import Any, AnyStr, Dict, List, Tuple, Union
|
||||
from providers.generic import GenericProvider, NZBProvider, TorrentProvider
|
||||
|
||||
|
||||
class CacheDBConnection(db.DBConnection):
|
||||
|
@ -50,7 +51,7 @@ class CacheDBConnection(db.DBConnection):
|
|||
|
||||
class TVCache(object):
|
||||
def __init__(self, provider, interval=10):
|
||||
# type: (AnyStr, int) -> None
|
||||
# type: (Union[GenericProvider, NZBProvider, TorrentProvider], int) -> None
|
||||
self.provider = provider
|
||||
self.providerID = self.provider.get_id()
|
||||
self.providerDB = None
|
||||
|
@ -60,7 +61,7 @@ class TVCache(object):
|
|||
def get_db():
|
||||
return CacheDBConnection()
|
||||
|
||||
def _clearCache(self):
|
||||
def clear_cache(self):
|
||||
if self.should_clear_cache():
|
||||
my_db = self.get_db()
|
||||
my_db.action('DELETE FROM provider_cache WHERE provider = ?', [self.providerID])
|
||||
|
@ -81,26 +82,13 @@ class TVCache(object):
|
|||
data = None
|
||||
return data
|
||||
|
||||
def _checkAuth(self):
|
||||
def check_auth(self):
|
||||
# noinspection PyProtectedMember
|
||||
return self.provider._check_auth()
|
||||
|
||||
@staticmethod
|
||||
def _checkItemAuth(title, url):
|
||||
"""
|
||||
|
||||
:param title: title
|
||||
:type title: AnyStr
|
||||
:param url: url
|
||||
:type url: AnyStr
|
||||
:return:
|
||||
:rtype: bool
|
||||
"""
|
||||
return True
|
||||
|
||||
def updateCache(self, **kwargs):
|
||||
def update_cache(self, **kwargs):
|
||||
try:
|
||||
self._checkAuth()
|
||||
self.check_auth()
|
||||
except AuthException as e:
|
||||
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
|
||||
return []
|
||||
|
@ -110,13 +98,13 @@ class TVCache(object):
|
|||
|
||||
# clear cache
|
||||
if data:
|
||||
self._clearCache()
|
||||
self.clear_cache()
|
||||
|
||||
# parse data
|
||||
cl = []
|
||||
for item in data or []:
|
||||
title, url = self._title_and_url(item)
|
||||
ci = self._parseItem(title, url)
|
||||
ci = self.parse_item(title, url)
|
||||
if None is not ci:
|
||||
cl.append(ci)
|
||||
|
||||
|
@ -128,13 +116,13 @@ class TVCache(object):
|
|||
logger.log('Warning could not save cache value [%s], caught err: %s' % (cl, ex(e)))
|
||||
|
||||
# set updated as time the attempt to fetch data is
|
||||
self.setLastUpdate()
|
||||
self.set_last_update()
|
||||
|
||||
def get_rss(self, url, **kwargs):
|
||||
return RSSFeeds(self.provider).get_feed(url, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def _translateTitle(title):
|
||||
def _translate_title(title):
|
||||
"""
|
||||
|
||||
:param title: title
|
||||
|
@ -145,7 +133,7 @@ class TVCache(object):
|
|||
return u'' + title.replace(' ', '.')
|
||||
|
||||
@staticmethod
|
||||
def _translateLinkURL(url):
|
||||
def _translate_link_url(url):
|
||||
"""
|
||||
|
||||
:param url: url
|
||||
|
@ -155,7 +143,7 @@ class TVCache(object):
|
|||
"""
|
||||
return url.replace('&', '&')
|
||||
|
||||
def _parseItem(self, title, url):
|
||||
def parse_item(self, title, url):
|
||||
"""
|
||||
|
||||
:param title: title
|
||||
|
@ -165,18 +153,16 @@ class TVCache(object):
|
|||
:return:
|
||||
:rtype: None or List[AnyStr, List[Any]]
|
||||
"""
|
||||
self._checkItemAuth(title, url)
|
||||
|
||||
if title and url:
|
||||
title = self._translateTitle(title)
|
||||
url = self._translateLinkURL(url)
|
||||
title = self._translate_title(title)
|
||||
url = self._translate_link_url(url)
|
||||
|
||||
return self.add_cache_entry(title, url)
|
||||
|
||||
logger.log('Data returned from the %s feed is incomplete, this result is unusable' % self.provider.name,
|
||||
logger.DEBUG)
|
||||
|
||||
def _getLastUpdate(self):
|
||||
def _get_last_update(self):
|
||||
"""
|
||||
|
||||
:return:
|
||||
|
@ -186,15 +172,15 @@ class TVCache(object):
|
|||
sql_result = my_db.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID])
|
||||
|
||||
if sql_result:
|
||||
lastTime = int(sql_result[0]['time'])
|
||||
if lastTime > int(timestamp_near(datetime.datetime.now())):
|
||||
lastTime = 0
|
||||
last_time = int(sql_result[0]['time'])
|
||||
if last_time > int(timestamp_near(datetime.datetime.now())):
|
||||
last_time = 0
|
||||
else:
|
||||
lastTime = 0
|
||||
last_time = 0
|
||||
|
||||
return datetime.datetime.fromtimestamp(lastTime)
|
||||
return datetime.datetime.fromtimestamp(last_time)
|
||||
|
||||
def _getLastSearch(self):
|
||||
def _get_last_search(self):
|
||||
"""
|
||||
|
||||
:return:
|
||||
|
@ -204,15 +190,15 @@ class TVCache(object):
|
|||
sql_result = my_db.select('SELECT time FROM lastSearch WHERE provider = ?', [self.providerID])
|
||||
|
||||
if sql_result:
|
||||
lastTime = int(sql_result[0]['time'])
|
||||
if lastTime > int(timestamp_near(datetime.datetime.now())):
|
||||
lastTime = 0
|
||||
last_time = int(sql_result[0]['time'])
|
||||
if last_time > int(timestamp_near(datetime.datetime.now())):
|
||||
last_time = 0
|
||||
else:
|
||||
lastTime = 0
|
||||
last_time = 0
|
||||
|
||||
return datetime.datetime.fromtimestamp(lastTime)
|
||||
return datetime.datetime.fromtimestamp(last_time)
|
||||
|
||||
def setLastUpdate(self, to_date=None):
|
||||
def set_last_update(self, to_date=None):
|
||||
"""
|
||||
|
||||
:param to_date: date time
|
||||
|
@ -226,7 +212,7 @@ class TVCache(object):
|
|||
{'time': int(time.mktime(to_date.timetuple()))},
|
||||
{'provider': self.providerID})
|
||||
|
||||
def setLastSearch(self, to_date=None):
|
||||
def _set_last_search(self, to_date=None):
|
||||
"""
|
||||
|
||||
:param to_date: date time
|
||||
|
@ -240,8 +226,8 @@ class TVCache(object):
|
|||
{'time': int(time.mktime(to_date.timetuple()))},
|
||||
{'provider': self.providerID})
|
||||
|
||||
lastUpdate = property(_getLastUpdate)
|
||||
lastSearch = property(_getLastSearch)
|
||||
last_update = property(_get_last_update)
|
||||
last_search = property(_get_last_search)
|
||||
|
||||
def should_update(self):
|
||||
"""
|
||||
|
@ -250,7 +236,7 @@ class TVCache(object):
|
|||
:rtype: bool
|
||||
"""
|
||||
# if we've updated recently then skip the update
|
||||
return datetime.datetime.now() - self.lastUpdate >= datetime.timedelta(minutes=self.update_iv)
|
||||
return datetime.datetime.now() - self.last_update >= datetime.timedelta(minutes=self.update_iv)
|
||||
|
||||
def should_clear_cache(self):
|
||||
"""
|
||||
|
@ -259,7 +245,7 @@ class TVCache(object):
|
|||
:rtype: bool
|
||||
"""
|
||||
# if recent search hasn't used our previous results yet then don't clear the cache
|
||||
return self.lastSearch >= self.lastUpdate
|
||||
return self.last_search >= self.last_update
|
||||
|
||||
def add_cache_entry(self,
|
||||
name, # type: AnyStr
|
||||
|
@ -340,22 +326,22 @@ class TVCache(object):
|
|||
url, cur_timestamp, quality, release_group, version,
|
||||
parse_result.show_obj.tvid]]
|
||||
|
||||
def searchCache(self,
|
||||
episode, # type: TVEpisode
|
||||
manual_search=False # type: bool
|
||||
): # type: (...) -> List[SearchResult]
|
||||
def search_cache(self,
|
||||
episode, # type: TVEpisode
|
||||
manual_search=False # type: bool
|
||||
): # type: (...) -> List[SearchResult]
|
||||
"""
|
||||
|
||||
:param episode: episode object
|
||||
:param manual_search: manual search
|
||||
:return: found results or empty List
|
||||
"""
|
||||
neededEps = self.findNeededEpisodes(episode, manual_search)
|
||||
if 0 != len(neededEps):
|
||||
return neededEps[episode]
|
||||
needed_eps = self.find_needed_episodes(episode, manual_search)
|
||||
if 0 != len(needed_eps):
|
||||
return needed_eps[episode]
|
||||
return []
|
||||
|
||||
def listPropers(self, date=None):
|
||||
def list_propers(self, date=None):
|
||||
"""
|
||||
|
||||
:param date: date
|
||||
|
@ -372,14 +358,14 @@ class TVCache(object):
|
|||
|
||||
return list(filter(lambda x: x['indexerid'] != 0, my_db.select(sql, [self.providerID])))
|
||||
|
||||
def findNeededEpisodes(self, ep_obj_list, manual_search=False):
|
||||
def find_needed_episodes(self, ep_obj_list, manual_search=False):
|
||||
# type: (Union[TVEpisode, List[TVEpisode]], bool) -> Dict[TVEpisode, SearchResult]
|
||||
"""
|
||||
|
||||
:param ep_obj_list: episode object or list of episode objects
|
||||
:param manual_search: manual search
|
||||
"""
|
||||
neededEps = {}
|
||||
needed_eps = {}
|
||||
cl = []
|
||||
|
||||
my_db = self.get_db()
|
||||
|
@ -402,8 +388,8 @@ class TVCache(object):
|
|||
sql_result = list(itertools.chain(*sql_result))
|
||||
|
||||
if not sql_result:
|
||||
self.setLastSearch()
|
||||
return neededEps
|
||||
self._set_last_search()
|
||||
return needed_eps
|
||||
|
||||
# for each cache entry
|
||||
for cur_result in sql_result:
|
||||
|
@ -473,12 +459,12 @@ class TVCache(object):
|
|||
check_is_repack=True)
|
||||
|
||||
# add it to the list
|
||||
if ep_obj not in neededEps:
|
||||
neededEps[ep_obj] = [result]
|
||||
if ep_obj not in needed_eps:
|
||||
needed_eps[ep_obj] = [result]
|
||||
else:
|
||||
neededEps[ep_obj].append(result)
|
||||
needed_eps[ep_obj].append(result)
|
||||
|
||||
# datetime stamp this search so cache gets cleared
|
||||
self.setLastSearch()
|
||||
self._set_last_search()
|
||||
|
||||
return neededEps
|
||||
return needed_eps
|
||||
|
|
|
@ -117,7 +117,7 @@ class Notification(object):
|
|||
|
||||
class ProgressIndicator(object):
|
||||
def __init__(self, percent_complete=0, current_status=None):
|
||||
self.percentComplete = percent_complete
|
||||
self.percent_complete = percent_complete
|
||||
self.currentStatus = {'title': ''} if None is current_status else current_status
|
||||
|
||||
|
||||
|
@ -128,20 +128,20 @@ class ProgressIndicators(object):
|
|||
}
|
||||
|
||||
@staticmethod
|
||||
def getIndicator(name):
|
||||
def get_indicator(name):
|
||||
if name not in ProgressIndicators._pi:
|
||||
return []
|
||||
|
||||
# if any of the progress indicators are done take them off the list
|
||||
for curPI in ProgressIndicators._pi[name]:
|
||||
if None is not curPI and 100 == curPI.percentComplete():
|
||||
if None is not curPI and 100 == curPI.percent_complete():
|
||||
ProgressIndicators._pi[name].remove(curPI)
|
||||
|
||||
# return the list of progress indicators associated with this name
|
||||
return ProgressIndicators._pi[name]
|
||||
|
||||
@staticmethod
|
||||
def setIndicator(name, indicator):
|
||||
def set_indicator(name, indicator):
|
||||
ProgressIndicators._pi[name].append(indicator)
|
||||
|
||||
|
||||
|
@ -154,16 +154,16 @@ class QueueProgressIndicator(object):
|
|||
self.queueItemList = queue_item_list
|
||||
self.name = name
|
||||
|
||||
def numTotal(self):
|
||||
def num_total(self):
|
||||
return len(self.queueItemList)
|
||||
|
||||
def numFinished(self):
|
||||
def num_finished(self):
|
||||
return len([x for x in self.queueItemList if not x.is_in_queue()])
|
||||
|
||||
def numRemaining(self):
|
||||
def num_remaining(self):
|
||||
return len([x for x in self.queueItemList if x.is_in_queue()])
|
||||
|
||||
def nextName(self):
|
||||
def next_name(self):
|
||||
for curItem in [
|
||||
sickgear.show_queue_scheduler.action.currentItem] + sickgear.show_queue_scheduler.action.queue:
|
||||
if curItem in self.queueItemList:
|
||||
|
@ -171,13 +171,13 @@ class QueueProgressIndicator(object):
|
|||
|
||||
return "Unknown"
|
||||
|
||||
def percentComplete(self):
|
||||
numFinished = self.numFinished()
|
||||
numTotal = self.numTotal()
|
||||
def percent_complete(self):
|
||||
num_finished = self.num_finished()
|
||||
num_total = self.num_total()
|
||||
|
||||
if 0 == numTotal:
|
||||
if 0 == num_total:
|
||||
return 0
|
||||
return int(float(numFinished) / float(numTotal) * 100)
|
||||
return int(float(num_finished) / float(num_total) * 100)
|
||||
|
||||
|
||||
class LoadingTVShow(object):
|
||||
|
|
|
@ -33,6 +33,7 @@ class WatchedStateUpdater(object):
|
|||
return sickgear.watched_state_queue_scheduler.action.is_in_queue(self.queue_item)
|
||||
|
||||
def run(self):
|
||||
# noinspection PyUnresolvedReferences
|
||||
if self.is_enabled():
|
||||
self.amActive = True
|
||||
new_item = self.queue_item()
|
||||
|
|
|
@ -48,7 +48,7 @@ class WatchedStateQueue(generic_queue.GenericQueue):
|
|||
|
||||
return length
|
||||
|
||||
def add_item(self, item):
|
||||
def add_item(self, item, **kwargs):
|
||||
if isinstance(item, EmbyWatchedStateQueueItem) and not self.is_in_queue(EmbyWatchedStateQueueItem):
|
||||
# emby watched state item
|
||||
generic_queue.GenericQueue.add_item(self, item)
|
||||
|
|
|
@ -791,7 +791,7 @@ def _mapQuality(show_obj):
|
|||
anyQualities = []
|
||||
bestQualities = []
|
||||
|
||||
iqualityID, aqualityID = Quality.splitQuality(int(show_obj))
|
||||
iqualityID, aqualityID = Quality.split_quality(int(show_obj))
|
||||
if iqualityID:
|
||||
for quality in iqualityID:
|
||||
anyQualities.append(quality_map[quality])
|
||||
|
@ -1155,7 +1155,7 @@ class CMD_SickGearEpisode(ApiCall):
|
|||
timezone, episode['timezone'] = network_timezones.get_network_timezone(show_obj.network, return_name=True)
|
||||
episode['airdate'] = SGDatetime.sbfdate(SGDatetime.convert_to_setting(
|
||||
network_timezones.parse_date_time(int(episode['airdate']), show_obj.airs, timezone)), d_preset=dateFormat)
|
||||
status, quality = Quality.splitCompositeStatus(int(episode["status"]))
|
||||
status, quality = Quality.split_composite_status(int(episode["status"]))
|
||||
episode["status"] = _get_status_Strings(status)
|
||||
episode["quality"] = _get_quality_string(quality)
|
||||
episode["file_size_human"] = _sizeof_fmt(episode["file_size"])
|
||||
|
@ -1224,7 +1224,7 @@ class CMD_SickGearEpisodeSearch(ApiCall):
|
|||
|
||||
# return the correct json value
|
||||
if ep_queue_item.success:
|
||||
status, quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
status, quality = Quality.split_composite_status(ep_obj.status)
|
||||
# TODO: split quality and status?
|
||||
return _responds(RESULT_SUCCESS, {"quality": _get_quality_string(quality)},
|
||||
"Snatched (" + _get_quality_string(quality) + ")")
|
||||
|
@ -1348,7 +1348,7 @@ class CMD_SickGearEpisodeSetStatus(ApiCall):
|
|||
continue
|
||||
|
||||
if None is not self.quality:
|
||||
ep_obj.status = Quality.compositeStatus(self.status, self.quality)
|
||||
ep_obj.status = Quality.composite_status(self.status, self.quality)
|
||||
else:
|
||||
ep_obj.status = self.status
|
||||
result = ep_obj.get_sql()
|
||||
|
@ -1667,7 +1667,7 @@ class CMD_SickGearHistory(ApiCall):
|
|||
results = []
|
||||
np = NameParser(True, testing=True, indexer_lookup=False, try_scene_exceptions=False)
|
||||
for cur_result in sql_result:
|
||||
status, quality = Quality.splitCompositeStatus(int(cur_result["action"]))
|
||||
status, quality = Quality.split_composite_status(int(cur_result["action"]))
|
||||
if type_filter and status not in type_filter:
|
||||
continue
|
||||
status = _get_status_Strings(status)
|
||||
|
@ -2164,14 +2164,14 @@ class CMD_SickGearForceSearch(ApiCall):
|
|||
result = None
|
||||
if 'recent' == self.searchtype and not sickgear.search_queue_scheduler.action.is_recentsearch_in_progress() \
|
||||
and not sickgear.recent_search_scheduler.action.amActive:
|
||||
result = sickgear.recent_search_scheduler.forceRun()
|
||||
result = sickgear.recent_search_scheduler.force_run()
|
||||
elif 'backlog' == self.searchtype and not sickgear.search_queue_scheduler.action.is_backlog_in_progress() \
|
||||
and not sickgear.backlog_search_scheduler.action.amActive:
|
||||
sickgear.backlog_search_scheduler.force_search(force_type=FORCED_BACKLOG)
|
||||
result = True
|
||||
elif 'proper' == self.searchtype and not sickgear.search_queue_scheduler.action.is_propersearch_in_progress() \
|
||||
and not sickgear.proper_finder_scheduler.action.amActive:
|
||||
result = sickgear.proper_finder_scheduler.forceRun()
|
||||
result = sickgear.proper_finder_scheduler.force_run()
|
||||
if result:
|
||||
return _responds(RESULT_SUCCESS, msg='%s search successfully forced' % self.searchtype)
|
||||
return _responds(RESULT_FAILURE,
|
||||
|
@ -2666,7 +2666,7 @@ class CMD_SickGearSetDefaults(ApiCall):
|
|||
aqualityID.append(quality_map[quality])
|
||||
|
||||
if iqualityID or aqualityID:
|
||||
sickgear.QUALITY_DEFAULT = Quality.combineQualities(iqualityID, aqualityID)
|
||||
sickgear.QUALITY_DEFAULT = Quality.combine_qualities(iqualityID, aqualityID)
|
||||
|
||||
if self.status:
|
||||
# convert the string status to a int
|
||||
|
@ -3365,7 +3365,7 @@ class CMD_SickGearShowAddExisting(ApiCall):
|
|||
aqualityID.append(quality_map[quality])
|
||||
|
||||
if iqualityID or aqualityID:
|
||||
newQuality = Quality.combineQualities(iqualityID, aqualityID)
|
||||
newQuality = Quality.combine_qualities(iqualityID, aqualityID)
|
||||
|
||||
sickgear.show_queue_scheduler.action.add_show(
|
||||
int(self.tvid), int(self.prodid), self.location,
|
||||
|
@ -3471,7 +3471,7 @@ class CMD_SickGearShowAddNew(ApiCall):
|
|||
aqualityID.append(quality_map[quality])
|
||||
|
||||
if iqualityID or aqualityID:
|
||||
newQuality = Quality.combineQualities(iqualityID, aqualityID)
|
||||
newQuality = Quality.combine_qualities(iqualityID, aqualityID)
|
||||
|
||||
# use default status as a failsafe
|
||||
newStatus = sickgear.STATUS_DEFAULT
|
||||
|
@ -4144,7 +4144,7 @@ class CMD_SickGearShowSeasons(ApiCall):
|
|||
[self.tvid, self.prodid])
|
||||
seasons = {} # type: Dict[int, Dict]
|
||||
for cur_result in sql_result:
|
||||
status, quality = Quality.splitCompositeStatus(int(cur_result["status"]))
|
||||
status, quality = Quality.split_composite_status(int(cur_result["status"]))
|
||||
cur_result["status"] = _get_status_Strings(status)
|
||||
cur_result["quality"] = _get_quality_string(quality)
|
||||
timezone, cur_result['timezone'] = network_timezones.get_network_timezone(show_obj.network,
|
||||
|
@ -4177,7 +4177,7 @@ class CMD_SickGearShowSeasons(ApiCall):
|
|||
for cur_result in sql_result:
|
||||
curEpisode = int(cur_result["episode"])
|
||||
del cur_result["episode"]
|
||||
status, quality = Quality.splitCompositeStatus(int(cur_result["status"]))
|
||||
status, quality = Quality.split_composite_status(int(cur_result["status"]))
|
||||
cur_result["status"] = _get_status_Strings(status)
|
||||
cur_result["quality"] = _get_quality_string(quality)
|
||||
timezone, cur_result['timezone'] = network_timezones.get_network_timezone(show_obj.network,
|
||||
|
@ -4262,7 +4262,7 @@ class CMD_SickGearShowSetQuality(ApiCall):
|
|||
aqualityID.append(quality_map[quality])
|
||||
|
||||
if iqualityID or aqualityID:
|
||||
newQuality = Quality.combineQualities(iqualityID, aqualityID)
|
||||
newQuality = Quality.combine_qualities(iqualityID, aqualityID)
|
||||
show_obj.quality = newQuality
|
||||
|
||||
show_obj.upgrade_once = self.upgradeonce
|
||||
|
@ -4326,7 +4326,7 @@ class CMD_SickGearShowStats(ApiCall):
|
|||
# add all the downloaded qualities
|
||||
episode_qualities_counts_download = {"total": 0}
|
||||
for statusCode in Quality.DOWNLOADED:
|
||||
status, quality = Quality.splitCompositeStatus(statusCode)
|
||||
status, quality = Quality.split_composite_status(statusCode)
|
||||
if quality in [Quality.NONE]:
|
||||
continue
|
||||
episode_qualities_counts_download[statusCode] = 0
|
||||
|
@ -4334,7 +4334,7 @@ class CMD_SickGearShowStats(ApiCall):
|
|||
# add all snatched qualities
|
||||
episode_qualities_counts_snatch = {"total": 0}
|
||||
for statusCode in Quality.SNATCHED_ANY:
|
||||
status, quality = Quality.splitCompositeStatus(statusCode)
|
||||
status, quality = Quality.split_composite_status(statusCode)
|
||||
if quality in [Quality.NONE]:
|
||||
continue
|
||||
episode_qualities_counts_snatch[statusCode] = 0
|
||||
|
@ -4345,7 +4345,7 @@ class CMD_SickGearShowStats(ApiCall):
|
|||
[self.prodid, self.tvid])
|
||||
# the main loop that goes through all episodes
|
||||
for cur_result in sql_result:
|
||||
status, quality = Quality.splitCompositeStatus(int(cur_result["status"]))
|
||||
status, quality = Quality.split_composite_status(int(cur_result["status"]))
|
||||
|
||||
episode_status_counts_total["total"] += 1
|
||||
|
||||
|
@ -4367,7 +4367,7 @@ class CMD_SickGearShowStats(ApiCall):
|
|||
if "total" == statusCode:
|
||||
episodes_stats["downloaded"]["total"] = episode_qualities_counts_download[statusCode]
|
||||
continue
|
||||
status, quality = Quality.splitCompositeStatus(int(statusCode))
|
||||
status, quality = Quality.split_composite_status(int(statusCode))
|
||||
statusString = Quality.qualityStrings[quality].lower().replace(" ", "_").replace("(", "").replace(")", "")
|
||||
episodes_stats["downloaded"][statusString] = episode_qualities_counts_download[statusCode]
|
||||
|
||||
|
@ -4378,7 +4378,7 @@ class CMD_SickGearShowStats(ApiCall):
|
|||
if "total" == statusCode:
|
||||
episodes_stats["snatched"]["total"] = episode_qualities_counts_snatch[statusCode]
|
||||
continue
|
||||
status, quality = Quality.splitCompositeStatus(int(statusCode))
|
||||
status, quality = Quality.split_composite_status(int(statusCode))
|
||||
statusString = Quality.qualityStrings[quality].lower().replace(" ", "_").replace("(", "").replace(")", "")
|
||||
if Quality.qualityStrings[quality] in episodes_stats["snatched"]:
|
||||
episodes_stats["snatched"][statusString] += episode_qualities_counts_snatch[statusCode]
|
||||
|
@ -4390,7 +4390,7 @@ class CMD_SickGearShowStats(ApiCall):
|
|||
if "total" == statusCode:
|
||||
episodes_stats["total"] = episode_status_counts_total[statusCode]
|
||||
continue
|
||||
status, quality = Quality.splitCompositeStatus(int(statusCode))
|
||||
status, quality = Quality.split_composite_status(int(statusCode))
|
||||
statusString = statusStrings.statusStrings[statusCode].lower().replace(" ", "_").replace("(", "").replace(
|
||||
")", "")
|
||||
episodes_stats[statusString] = episode_status_counts_total[statusCode]
|
||||
|
@ -4653,7 +4653,7 @@ class CMD_SickGearShowsForceUpdate(ApiCall):
|
|||
or sickgear.show_update_scheduler.action.amActive:
|
||||
return _responds(RESULT_FAILURE, msg="show update already running.")
|
||||
|
||||
result = sickgear.show_update_scheduler.forceRun()
|
||||
result = sickgear.show_update_scheduler.force_run()
|
||||
if result:
|
||||
return _responds(RESULT_SUCCESS, msg="daily show update started")
|
||||
return _responds(RESULT_FAILURE, msg="can't start show update currently")
|
||||
|
|
|
@ -19,6 +19,7 @@ from __future__ import with_statement, division
|
|||
|
||||
# noinspection PyProtectedMember
|
||||
from mimetypes import MimeTypes
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import base64
|
||||
import copy
|
||||
|
@ -41,13 +42,21 @@ from json_helper import json_dumps, json_loads
|
|||
import sg_helpers
|
||||
from sg_helpers import remove_file, scantree, is_virtualenv
|
||||
|
||||
from sg_futures import SgThreadPoolExecutor
|
||||
try:
|
||||
from multiprocessing import cpu_count
|
||||
except ImportError:
|
||||
# some platforms don't have multiprocessing
|
||||
def cpu_count():
|
||||
return None
|
||||
|
||||
import sickgear
|
||||
from . import classes, clients, config, db, helpers, history, image_cache, logger, name_cache, naming, \
|
||||
network_timezones, notifiers, nzbget, processTV, sab, scene_exceptions, search_queue, subtitles, ui
|
||||
from .anime import AniGroupList, pull_anidb_groups, short_group_names
|
||||
from .browser import folders_at_path
|
||||
from .common import ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED, SNATCHED_ANY, UNAIRED, UNKNOWN, WANTED, \
|
||||
SD, HD720p, HD1080p, UHD2160p, Overview, Quality, qualityPresetStrings, statusStrings
|
||||
SD, HD720p, HD1080p, UHD2160p, Overview, Quality, qualityPresetStrings, statusStrings
|
||||
from .helpers import get_media_stats, has_image_ext, real_path, remove_article, remove_file_perm, starify
|
||||
from .indexermapper import MapStatus, map_indexers_to_show, save_mapping
|
||||
from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT, TVINFO_TVDB, TVINFO_TVMAZE, \
|
||||
|
@ -72,13 +81,9 @@ from unidecode import unidecode
|
|||
import dateutil.parser
|
||||
|
||||
from tornado import gen, iostream
|
||||
# noinspection PyUnresolvedReferences
|
||||
from tornado.escape import utf8
|
||||
from tornado.web import RequestHandler, StaticFileHandler, authenticated
|
||||
from tornado.concurrent import run_on_executor
|
||||
# tornado.web.RequestHandler above is unresolved until...
|
||||
# 1) RouteHandler derives from RequestHandler instead of LegacyBaseHandler
|
||||
# 2) the following line is removed (plus the noinspection deleted)
|
||||
from ._legacy import LegacyBaseHandler
|
||||
|
||||
from lib import subliminal
|
||||
from lib.cfscrape import CloudflareScraper
|
||||
|
@ -98,6 +103,7 @@ from six import binary_type, integer_types, iteritems, iterkeys, itervalues, mov
|
|||
if False:
|
||||
from typing import Any, AnyStr, Dict, List, Optional, Set, Tuple
|
||||
from sickgear.providers.generic import TorrentProvider
|
||||
from tv import TVInfoShow
|
||||
|
||||
|
||||
# noinspection PyAbstractClass
|
||||
|
@ -187,7 +193,50 @@ class BaseStaticFileHandler(StaticFileHandler):
|
|||
self.set_header('X-Frame-Options', 'SAMEORIGIN')
|
||||
|
||||
|
||||
class RouteHandler(LegacyBaseHandler):
|
||||
class RouteHandler(RequestHandler):
|
||||
|
||||
executor = SgThreadPoolExecutor(thread_name_prefix='WEBSERVER', max_workers=min(32, (cpu_count() or 1) + 4))
|
||||
|
||||
def redirect(self, url, permanent=False, status=None):
|
||||
"""Send a redirect to the given (optionally relative) URL.
|
||||
|
||||
----->>>>> NOTE: Removed self.finish <<<<<-----
|
||||
|
||||
If the ``status`` argument is specified, that value is used as the
|
||||
HTTP status code; otherwise either 301 (permanent) or 302
|
||||
(temporary) is chosen based on the ``permanent`` argument.
|
||||
The default is 302 (temporary).
|
||||
"""
|
||||
if not url.startswith(sickgear.WEB_ROOT):
|
||||
url = sickgear.WEB_ROOT + url
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
if self._headers_written:
|
||||
raise Exception('Cannot redirect after headers have been written')
|
||||
if status is None:
|
||||
status = 301 if permanent else 302
|
||||
else:
|
||||
assert isinstance(status, int)
|
||||
assert 300 <= status <= 399
|
||||
self.set_status(status)
|
||||
self.set_header('Location', urljoin(utf8(self.request.uri), utf8(url)))
|
||||
|
||||
def write_error(self, status_code, **kwargs):
|
||||
body = ''
|
||||
try:
|
||||
if self.request.body:
|
||||
body = '\nRequest body: %s' % decode_str(self.request.body)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
logger.log('Sent %s error response to a `%s` request for `%s` with headers:\n%s%s' %
|
||||
(status_code, self.request.method, self.request.path, self.request.headers, body), logger.WARNING)
|
||||
# suppress traceback by removing 'exc_info' kwarg
|
||||
if 'exc_info' in kwargs:
|
||||
logger.log('Gracefully handled exception text:\n%s' % traceback.format_exception(*kwargs["exc_info"]),
|
||||
logger.DEBUG)
|
||||
del kwargs['exc_info']
|
||||
return super(RouteHandler, self).write_error(status_code, **kwargs)
|
||||
|
||||
def data_received(self, *args):
|
||||
pass
|
||||
|
||||
|
@ -307,7 +356,7 @@ class BaseHandler(RouteHandler):
|
|||
elif 'fanart' == which[0:6]:
|
||||
image_file_name = [cache_obj.fanart_path(
|
||||
*tvid_prodid_obj.tuple +
|
||||
('%s' % (re.sub(r'.*?fanart_(\d+(?:\.\w{1,20})?\.\w{5,8}).*', r'\1.', which, 0, re.I)),))]
|
||||
('%s' % (re.sub(r'.*?fanart_(\d+(?:\.\w{1,20})?\.\w{5,8}).*', r'\1.', which, 0, re.I)),))]
|
||||
|
||||
for cur_name in image_file_name:
|
||||
if os.path.isfile(cur_name):
|
||||
|
@ -618,7 +667,7 @@ class RepoHandler(BaseStaticFileHandler):
|
|||
return self.index([('resource.language.en_gb/', 'English/')[self.kodi_is_legacy]])
|
||||
|
||||
def render_kodi_service_sickgear_watchedstate_updater_resources_language_english_index(self):
|
||||
return self.index([('strings.po', 'strings.xml')[self.kodi_is_legacy]])
|
||||
return self.index([('strings.po', 'strings.xml')[self.kodi_is_legacy]])
|
||||
|
||||
def repo_sickgear_details(self):
|
||||
return re.findall(r'(?si)addon\sid="(repository\.[^"]+)[^>]+version="([^"]+)',
|
||||
|
@ -875,9 +924,10 @@ class LogfileHandler(BaseHandler):
|
|||
super(LogfileHandler, self).__init__(application, request, **kwargs)
|
||||
self.lock = threading.Lock()
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
@authenticated
|
||||
@gen.coroutine
|
||||
def get(self, path, *args, **kwargs):
|
||||
def get(self, *args, **kwargs):
|
||||
logfile_name = logger.current_log_file()
|
||||
|
||||
try:
|
||||
|
@ -1127,7 +1177,7 @@ class MainHandler(WebHandler):
|
|||
|
||||
# make a dict out of the sql results
|
||||
sql_result = [dict(row) for row in sql_result
|
||||
if Quality.splitCompositeStatus(helpers.try_int(row['status']))[0] not in
|
||||
if Quality.split_composite_status(helpers.try_int(row['status']))[0] not in
|
||||
SNATCHED_ANY + [DOWNLOADED, ARCHIVED, IGNORED, SKIPPED]]
|
||||
|
||||
# multi dimension sort
|
||||
|
@ -1178,8 +1228,8 @@ class MainHandler(WebHandler):
|
|||
pass
|
||||
if imdb_id:
|
||||
sql_result[index]['imdb_url'] = sickgear.indexers.indexer_config.tvinfo_config[
|
||||
sickgear.indexers.indexer_config.TVINFO_IMDB][
|
||||
'show_url'] % imdb_id
|
||||
sickgear.indexers.indexer_config.TVINFO_IMDB][
|
||||
'show_url'] % imdb_id
|
||||
else:
|
||||
sql_result[index]['imdb_url'] = ''
|
||||
|
||||
|
@ -1282,7 +1332,7 @@ class MainHandler(WebHandler):
|
|||
|
||||
now = datetime.datetime.now()
|
||||
events = [
|
||||
('recent', sickgear.recent_search_scheduler.timeLeft),
|
||||
('recent', sickgear.recent_search_scheduler.time_left),
|
||||
('backlog', sickgear.backlog_search_scheduler.next_backlog_timeleft),
|
||||
]
|
||||
|
||||
|
@ -1996,7 +2046,7 @@ class Home(MainHandler):
|
|||
if not line.strip():
|
||||
continue
|
||||
if line.startswith(' '):
|
||||
change_parts = re.findall(r'^[\W]+(.*)$', line)
|
||||
change_parts = re.findall(r'^\W+(.*)$', line)
|
||||
change['text'] += change_parts and (' %s' % change_parts[0].strip()) or ''
|
||||
else:
|
||||
if change:
|
||||
|
@ -2008,11 +2058,11 @@ class Home(MainHandler):
|
|||
elif not max_rel:
|
||||
break
|
||||
elif line.startswith('### '):
|
||||
rel_data = re.findall(r'(?im)^###\W*([^\s]+)\W\(([^)]+)\)', line)
|
||||
rel_data = re.findall(r'(?im)^###\W*(\S+)\W\(([^)]+)\)', line)
|
||||
rel_data and output.append({'type': 'rel', 'ver': rel_data[0][0], 'date': rel_data[0][1]})
|
||||
max_rel -= 1
|
||||
elif line.startswith('# '):
|
||||
max_data = re.findall(r'^#\W*([\d]+)\W*$', line)
|
||||
max_data = re.findall(r'^#\W*(\d+)\W*$', line)
|
||||
max_rel = max_data and helpers.try_int(max_data[0], None) or 5
|
||||
if change:
|
||||
output.append(change)
|
||||
|
@ -2071,6 +2121,7 @@ class Home(MainHandler):
|
|||
else:
|
||||
self.redirect('/home/')
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def season_render(self, tvid_prodid=None, season=None, **kwargs):
|
||||
|
||||
response = {'success': False}
|
||||
|
@ -2309,7 +2360,7 @@ class Home(MainHandler):
|
|||
status_overview = show_obj.get_overview(row['status'])
|
||||
if status_overview:
|
||||
ep_counts[status_overview] += row['cnt']
|
||||
if ARCHIVED == Quality.splitCompositeStatus(row['status'])[0]:
|
||||
if ARCHIVED == Quality.split_composite_status(row['status'])[0]:
|
||||
ep_counts['archived'].setdefault(row['season'], 0)
|
||||
ep_counts['archived'][row['season']] = row['cnt'] + ep_counts['archived'].get(row['season'], 0)
|
||||
else:
|
||||
|
@ -2376,7 +2427,7 @@ class Home(MainHandler):
|
|||
|
||||
t.clean_show_name = quote_plus(sickgear.indexermapper.clean_show_name(show_obj.name))
|
||||
|
||||
t.min_initial = Quality.get_quality_ui(min(Quality.splitQuality(show_obj.quality)[0]))
|
||||
t.min_initial = Quality.get_quality_ui(min(Quality.split_quality(show_obj.quality)[0]))
|
||||
t.show_obj.exceptions = scene_exceptions.get_scene_exceptions(show_obj.tvid, show_obj.prodid)
|
||||
# noinspection PyUnresolvedReferences
|
||||
t.all_scene_exceptions = show_obj.exceptions # normally Unresolved as not a class attribute, force set above
|
||||
|
@ -2422,7 +2473,7 @@ class Home(MainHandler):
|
|||
sorted_show_list[i].unique_name = '%s (%s)' % (sorted_show_list[i].name, start_year)
|
||||
dups[sorted_show_list[i].unique_name] = i
|
||||
|
||||
name_cache.buildNameCache()
|
||||
name_cache.build_name_cache()
|
||||
|
||||
@staticmethod
|
||||
def sorted_show_lists():
|
||||
|
@ -2577,12 +2628,12 @@ class Home(MainHandler):
|
|||
for k, v in iteritems(new_ids):
|
||||
if None is v.get('id') or None is v.get('status'):
|
||||
continue
|
||||
if (show_obj.ids.get(k, {'id': 0}).get('id') != v.get('id') or
|
||||
(MapStatus.NO_AUTOMATIC_CHANGE == v.get('status') and
|
||||
MapStatus.NO_AUTOMATIC_CHANGE != show_obj.ids.get(
|
||||
k, {'status': MapStatus.NONE}).get('status')) or
|
||||
(MapStatus.NO_AUTOMATIC_CHANGE != v.get('status') and
|
||||
MapStatus.NO_AUTOMATIC_CHANGE == show_obj.ids.get(
|
||||
if (show_obj.ids.get(k, {'id': 0}).get('id') != v.get('id')
|
||||
or (MapStatus.NO_AUTOMATIC_CHANGE == v.get('status')
|
||||
and MapStatus.NO_AUTOMATIC_CHANGE != show_obj.ids.get(
|
||||
k, {'status': MapStatus.NONE}).get('status'))
|
||||
or (MapStatus.NO_AUTOMATIC_CHANGE != v.get('status')
|
||||
and MapStatus.NO_AUTOMATIC_CHANGE == show_obj.ids.get(
|
||||
k, {'status': MapStatus.NONE}).get('status'))):
|
||||
show_obj.ids[k]['id'] = (0, v['id'])[v['id'] >= 0]
|
||||
show_obj.ids[k]['status'] = (MapStatus.NOT_FOUND, v['status'])[v['id'] != 0]
|
||||
|
@ -2837,7 +2888,7 @@ class Home(MainHandler):
|
|||
|
||||
errors = []
|
||||
with show_obj.lock:
|
||||
show_obj.quality = Quality.combineQualities(list(map(int, any_qualities)), list(map(int, best_qualities)))
|
||||
show_obj.quality = Quality.combine_qualities(list(map(int, any_qualities)), list(map(int, best_qualities)))
|
||||
show_obj.upgrade_once = upgrade_once
|
||||
|
||||
# reversed for now
|
||||
|
@ -3032,6 +3083,7 @@ class Home(MainHandler):
|
|||
|
||||
self.redirect('/home/view-show?tvid_prodid=%s' % show_obj.tvid_prodid)
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def subtitle_show(self, tvid_prodid=None, force=0):
|
||||
|
||||
if None is tvid_prodid:
|
||||
|
@ -3050,6 +3102,7 @@ class Home(MainHandler):
|
|||
|
||||
self.redirect('/home/view-show?tvid_prodid=%s' % show_obj.tvid_prodid)
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def update_mb(self, tvid_prodid=None, **kwargs):
|
||||
|
||||
if notifiers.NotifierFactory().get('EMBY').update_library(
|
||||
|
@ -3115,7 +3168,7 @@ class Home(MainHandler):
|
|||
return json_dumps({'result': 'error'})
|
||||
return self._generic_message('Error', err_msg)
|
||||
|
||||
min_initial = min(Quality.splitQuality(show_obj.quality)[0])
|
||||
min_initial = min(Quality.split_quality(show_obj.quality)[0])
|
||||
segments = {}
|
||||
if None is not eps:
|
||||
|
||||
|
@ -3157,12 +3210,12 @@ class Home(MainHandler):
|
|||
|
||||
if ARCHIVED == status:
|
||||
if ep_obj.status in Quality.DOWNLOADED or direct:
|
||||
ep_obj.status = Quality.compositeStatus(
|
||||
ARCHIVED, (Quality.splitCompositeStatus(ep_obj.status)[1], min_initial)[use_default])
|
||||
ep_obj.status = Quality.composite_status(
|
||||
ARCHIVED, (Quality.split_composite_status(ep_obj.status)[1], min_initial)[use_default])
|
||||
elif DOWNLOADED == status:
|
||||
if ep_obj.status in Quality.ARCHIVED:
|
||||
ep_obj.status = Quality.compositeStatus(
|
||||
DOWNLOADED, Quality.splitCompositeStatus(ep_obj.status)[1])
|
||||
ep_obj.status = Quality.composite_status(
|
||||
DOWNLOADED, Quality.split_composite_status(ep_obj.status)[1])
|
||||
else:
|
||||
ep_obj.status = status
|
||||
|
||||
|
@ -3248,12 +3301,12 @@ class Home(MainHandler):
|
|||
for _cur_ep_obj in cur_ep_obj.related_ep_obj + [cur_ep_obj]:
|
||||
if _cur_ep_obj in ep_obj_rename_list:
|
||||
break
|
||||
ep_status, ep_qual = Quality.splitCompositeStatus(_cur_ep_obj.status)
|
||||
ep_status, ep_qual = Quality.split_composite_status(_cur_ep_obj.status)
|
||||
if not ep_qual:
|
||||
continue
|
||||
ep_obj_rename_list.append(cur_ep_obj)
|
||||
else:
|
||||
ep_status, ep_qual = Quality.splitCompositeStatus(cur_ep_obj.status)
|
||||
ep_status, ep_qual = Quality.split_composite_status(cur_ep_obj.status)
|
||||
if not ep_qual:
|
||||
continue
|
||||
ep_obj_rename_list.append(cur_ep_obj)
|
||||
|
@ -3330,7 +3383,7 @@ class Home(MainHandler):
|
|||
# retrieve the episode object and fail if we can't get one
|
||||
ep_obj = self._get_episode(tvid_prodid, season, episode)
|
||||
if not isinstance(ep_obj, str):
|
||||
if UNKNOWN == Quality.splitCompositeStatus(ep_obj.status)[0]:
|
||||
if UNKNOWN == Quality.split_composite_status(ep_obj.status)[0]:
|
||||
ep_obj.status = SKIPPED
|
||||
|
||||
# make a queue item for the TVEpisode and put it on the queue
|
||||
|
@ -3400,7 +3453,7 @@ class Home(MainHandler):
|
|||
seen_eps.add(uniq_sxe)
|
||||
|
||||
for snatched in filter(lambda s: ((s.tvid, s.prodid, s.season, s.episode) not in seen_eps),
|
||||
item.snatched_eps):
|
||||
item.snatched_eps):
|
||||
ep_obj = getattr(snatched, 'ep_obj', None)
|
||||
if not ep_obj:
|
||||
continue
|
||||
|
@ -3435,9 +3488,9 @@ class Home(MainHandler):
|
|||
"""
|
||||
# Find the quality class for the episode
|
||||
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
|
||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_type.status)
|
||||
ep_status, ep_quality = Quality.split_composite_status(ep_type.status)
|
||||
for x in (SD, HD720p, HD1080p, UHD2160p):
|
||||
if ep_quality in Quality.splitQuality(x)[0]:
|
||||
if ep_quality in Quality.split_quality(x)[0]:
|
||||
quality_class = qualityPresetStrings[x]
|
||||
break
|
||||
|
||||
|
@ -3466,7 +3519,7 @@ class Home(MainHandler):
|
|||
if isinstance(ep_obj, str):
|
||||
return json_dumps({'result': 'failure'})
|
||||
|
||||
# try do download subtitles for that episode
|
||||
# try to download subtitles for that episode
|
||||
try:
|
||||
previous_subtitles = set([subliminal.language.Language(x) for x in ep_obj.subtitles])
|
||||
ep_obj.subtitles = set([x.language for x in next(itervalues(ep_obj.download_subtitles()))])
|
||||
|
@ -3880,7 +3933,7 @@ class HomeProcessMedia(Home):
|
|||
regexp = re.compile(r'(?i)<br[\s/]+>', flags=re.UNICODE)
|
||||
result = regexp.sub('\n', result)
|
||||
if None is not quiet and 1 == int(quiet):
|
||||
regexp = re.compile(u'(?i)<a[^>]+>([^<]+)<[/]a>', flags=re.UNICODE)
|
||||
regexp = re.compile(u'(?i)<a[^>]+>([^<]+)</a>', flags=re.UNICODE)
|
||||
return u'%s' % regexp.sub(r'\1', result)
|
||||
return self._generic_message('Postprocessing results', u'<pre>%s</pre>' % result)
|
||||
|
||||
|
@ -3985,7 +4038,7 @@ class AddShows(Home):
|
|||
r'(?P<tmdb_full>[^ ]+themoviedb\.org/tv/(?P<tmdb>\d+)[^ ]*)|'
|
||||
r'(?P<trakt_full>[^ ]+trakt\.tv/shows/(?P<trakt>[^ /]+)[^ ]*)|'
|
||||
r'(?P<tvdb_full>[^ ]+thetvdb\.com/series/(?P<tvdb>[^ /]+)[^ ]*)|'
|
||||
r'(?P<tvdb_id_full>[^ ]+thetvdb\.com/[^\d]+(?P<tvdb_id>[^ /]+)[^ ]*)|'
|
||||
r'(?P<tvdb_id_full>[^ ]+thetvdb\.com/\D+(?P<tvdb_id>[^ /]+)[^ ]*)|'
|
||||
r'(?P<tvmaze_full>[^ ]+tvmaze\.com/shows/(?P<tvmaze>\d+)/?[^ ]*)', search_term)
|
||||
if id_check:
|
||||
for cur_match in id_check:
|
||||
|
@ -4035,7 +4088,7 @@ class AddShows(Home):
|
|||
t = sickgear.TVInfoAPI(cur_tvid).setup(**tvinfo_config)
|
||||
results.setdefault(cur_tvid, {})
|
||||
try:
|
||||
for cur_result in t.search_show(list(used_search_term), ids=ids_search_used):
|
||||
for cur_result in t.search_show(list(used_search_term), ids=ids_search_used): # type: TVInfoShow
|
||||
if TVINFO_TRAKT == cur_tvid and not cur_result['ids'].tvdb:
|
||||
continue
|
||||
tv_src_id = int(cur_result['id'])
|
||||
|
@ -4679,7 +4732,7 @@ class AddShows(Home):
|
|||
|
||||
def parse_imdb_html(self, html, filtered, kwargs):
|
||||
|
||||
img_size = re.compile(r'(?im)(V1[^XY]+([XY]))(\d+)([^\d]+)(\d+)([^\d]+)(\d+)([^\d]+)(\d+)([^\d]+)(\d+)(.*?)$')
|
||||
img_size = re.compile(r'(?im)(V1[^XY]+([XY]))(\d+)(\D+)(\d+)(\D+)(\d+)(\D+)(\d+)(\D+)(\d+)(.*?)$')
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
show_list = soup.select('.lister-list')
|
||||
|
@ -5200,7 +5253,7 @@ class AddShows(Home):
|
|||
channel_tag_copy = copy.copy(channel_tag)
|
||||
if channel_tag_copy:
|
||||
network = channel_tag_copy.a.extract().get_text(strip=True)
|
||||
date_info = re.sub(r'^[^\d]+', '', channel_tag_copy.get_text(strip=True))
|
||||
date_info = re.sub(r'^\D+', '', channel_tag_copy.get_text(strip=True))
|
||||
if date_info:
|
||||
dt = dateutil.parser.parse((date_info, '%s.01.01' % date_info)[4 == len(date_info)])
|
||||
|
||||
|
@ -5209,7 +5262,7 @@ class AddShows(Home):
|
|||
and 'printed' in ' '.join(t.get('class', ''))]
|
||||
if len(tag):
|
||||
age_args = {}
|
||||
future = re.sub(r'[^\d]+(.*)', r'\1', tag[0].get_text(strip=True))
|
||||
future = re.sub(r'\D+(.*)', r'\1', tag[0].get_text(strip=True))
|
||||
for (dim, rcx) in rc:
|
||||
value = helpers.try_int(rcx.sub(r'\1', future), None)
|
||||
if value:
|
||||
|
@ -5237,7 +5290,7 @@ class AddShows(Home):
|
|||
|
||||
genres = row.find(class_='genre')
|
||||
if genres:
|
||||
genres = re.sub(r',([^\s])', r', \1', genres.get_text(strip=True))
|
||||
genres = re.sub(r',(\S)', r', \1', genres.get_text(strip=True))
|
||||
overview = row.find(class_='summary')
|
||||
if overview:
|
||||
overview = overview.get_text(strip=True)
|
||||
|
@ -6031,7 +6084,7 @@ class AddShows(Home):
|
|||
any_qualities = [any_qualities]
|
||||
if type(best_qualities) != list:
|
||||
best_qualities = [best_qualities]
|
||||
new_quality = Quality.combineQualities(list(map(int, any_qualities)), list(map(int, best_qualities)))
|
||||
new_quality = Quality.combine_qualities(list(map(int, any_qualities)), list(map(int, best_qualities)))
|
||||
upgrade_once = config.checkbox_to_value(upgrade_once)
|
||||
|
||||
wanted_begin = config.minimax(wanted_begin, 0, -1, 10)
|
||||
|
@ -6226,7 +6279,7 @@ class Manage(MainHandler):
|
|||
if cur_season not in result:
|
||||
result[cur_season] = {}
|
||||
|
||||
cur_quality = Quality.splitCompositeStatus(int(cur_result['status']))[1]
|
||||
cur_quality = Quality.split_composite_status(int(cur_result['status']))[1]
|
||||
result[cur_season][cur_episode] = {'name': cur_result['name'],
|
||||
'airdateNever': 1000 > int(cur_result['airdate']),
|
||||
'qualityCss': Quality.get_quality_css(cur_quality),
|
||||
|
@ -6246,9 +6299,9 @@ class Manage(MainHandler):
|
|||
if event_sql_result:
|
||||
for cur_result_event in event_sql_result:
|
||||
if None is d_status and cur_result_event['action'] in Quality.DOWNLOADED:
|
||||
d_status, d_qual = Quality.splitCompositeStatus(cur_result_event['action'])
|
||||
d_status, d_qual = Quality.split_composite_status(cur_result_event['action'])
|
||||
if None is s_status and cur_result_event['action'] in Quality.SNATCHED_ANY:
|
||||
s_status, s_quality = Quality.splitCompositeStatus(cur_result_event['action'])
|
||||
s_status, s_quality = Quality.split_composite_status(cur_result_event['action'])
|
||||
aged = ((datetime.datetime.now() -
|
||||
datetime.datetime.strptime(str(cur_result_event['date']),
|
||||
sickgear.history.dateFormat))
|
||||
|
@ -6289,11 +6342,11 @@ class Manage(MainHandler):
|
|||
if Quality.NONE == cur_quality:
|
||||
return undo_from_history, change_to, status
|
||||
|
||||
cur_status = Quality.splitCompositeStatus(int(cur_status))[0]
|
||||
cur_status = Quality.split_composite_status(int(cur_status))[0]
|
||||
if any([location]):
|
||||
undo_from_history = True
|
||||
change_to = statusStrings[DOWNLOADED]
|
||||
status = [Quality.compositeStatus(DOWNLOADED, d_qual or cur_quality)]
|
||||
status = [Quality.composite_status(DOWNLOADED, d_qual or cur_quality)]
|
||||
elif cur_status in Quality.SNATCHED_ANY + [IGNORED, SKIPPED, WANTED]:
|
||||
if None is d_qual:
|
||||
if cur_status not in [IGNORED, SKIPPED]:
|
||||
|
@ -6305,7 +6358,7 @@ class Manage(MainHandler):
|
|||
or sickgear.SKIP_REMOVED_FILES in [ARCHIVED, IGNORED, SKIPPED]:
|
||||
undo_from_history = True
|
||||
change_to = '%s %s' % (statusStrings[ARCHIVED], Quality.qualityStrings[d_qual])
|
||||
status = [Quality.compositeStatus(ARCHIVED, d_qual)]
|
||||
status = [Quality.composite_status(ARCHIVED, d_qual)]
|
||||
elif sickgear.SKIP_REMOVED_FILES in [IGNORED, SKIPPED] \
|
||||
and cur_status not in [IGNORED, SKIPPED]:
|
||||
change_to = statusStrings[statusStrings[sickgear.SKIP_REMOVED_FILES]]
|
||||
|
@ -6893,7 +6946,7 @@ class Manage(MainHandler):
|
|||
new_subtitles = 'on' if new_subtitles else 'off'
|
||||
|
||||
if 'keep' == quality_preset:
|
||||
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
||||
any_qualities, best_qualities = Quality.split_quality(show_obj.quality)
|
||||
elif int(quality_preset):
|
||||
best_qualities = []
|
||||
|
||||
|
@ -7110,7 +7163,7 @@ class ManageSearch(Manage):
|
|||
def retry_provider(provider=None):
|
||||
if not provider:
|
||||
return
|
||||
prov = [p for p in sickgear.providerList + sickgear.newznabProviderList if p.get_id() == provider]
|
||||
prov = [p for p in sickgear.provider_list + sickgear.newznab_providers if p.get_id() == provider]
|
||||
if not prov:
|
||||
return
|
||||
prov[0].retry_next()
|
||||
|
@ -7131,7 +7184,7 @@ class ManageSearch(Manage):
|
|||
|
||||
# force it to run the next time it looks
|
||||
if not sickgear.search_queue_scheduler.action.is_recentsearch_in_progress():
|
||||
result = sickgear.recent_search_scheduler.forceRun()
|
||||
result = sickgear.recent_search_scheduler.force_run()
|
||||
if result:
|
||||
logger.log(u'Recent search forced')
|
||||
ui.notifications.message('Recent search started')
|
||||
|
@ -7142,7 +7195,7 @@ class ManageSearch(Manage):
|
|||
def force_find_propers(self):
|
||||
|
||||
# force it to run the next time it looks
|
||||
result = sickgear.proper_finder_scheduler.forceRun()
|
||||
result = sickgear.proper_finder_scheduler.force_run()
|
||||
if result:
|
||||
logger.log(u'Find propers search forced')
|
||||
ui.notifications.message('Find propers search started')
|
||||
|
@ -7166,7 +7219,7 @@ class ShowTasks(Manage):
|
|||
t = PageTemplate(web_handler=self, file='manage_showProcesses.tmpl')
|
||||
t.queue_length = sickgear.show_queue_scheduler.action.queue_length()
|
||||
t.people_queue = sickgear.people_queue_scheduler.action.queue_data()
|
||||
t.next_run = sickgear.show_update_scheduler.lastRun.replace(
|
||||
t.next_run = sickgear.show_update_scheduler.last_run.replace(
|
||||
hour=sickgear.show_update_scheduler.start_time.hour)
|
||||
t.show_update_running = sickgear.show_queue_scheduler.action.is_show_update_running() \
|
||||
or sickgear.show_update_scheduler.action.amActive
|
||||
|
@ -7252,7 +7305,7 @@ class ShowTasks(Manage):
|
|||
|
||||
def force_show_update(self):
|
||||
|
||||
result = sickgear.show_update_scheduler.forceRun()
|
||||
result = sickgear.show_update_scheduler.force_run()
|
||||
if result:
|
||||
logger.log(u'Show Update forced')
|
||||
ui.notifications.message('Forced Show Update started')
|
||||
|
@ -7412,7 +7465,7 @@ class History(MainHandler):
|
|||
r['status'] = r['status_w']
|
||||
r['file_size'] = r['file_size_w']
|
||||
|
||||
r['status'], r['quality'] = Quality.splitCompositeStatus(helpers.try_int(r['status']))
|
||||
r['status'], r['quality'] = Quality.split_composite_status(helpers.try_int(r['status']))
|
||||
r['season'], r['episode'] = '%02i' % r['season'], '%02i' % r['episode']
|
||||
if r['tvep_id'] not in mru_count:
|
||||
# depends on SELECT ORDER BY date_watched DESC to determine mru_count
|
||||
|
@ -7428,9 +7481,9 @@ class History(MainHandler):
|
|||
|
||||
elif 'stats' in sickgear.HISTORY_LAYOUT:
|
||||
|
||||
prov_list = [p.name for p in (sickgear.providerList
|
||||
+ sickgear.newznabProviderList
|
||||
+ sickgear.torrentRssProviderList)]
|
||||
prov_list = [p.name for p in (sickgear.provider_list
|
||||
+ sickgear.newznab_providers
|
||||
+ sickgear.torrent_rss_providers)]
|
||||
# noinspection SqlResolve
|
||||
sql = 'SELECT COUNT(1) AS count,' \
|
||||
' MIN(DISTINCT date) AS earliest,' \
|
||||
|
@ -7462,7 +7515,7 @@ class History(MainHandler):
|
|||
prov_id=p.get_id(), # 2020.03.17 legacy var, remove at future date
|
||||
fails=p.fails.fails_sorted, next_try=p.get_next_try_time,
|
||||
has_limit=getattr(p, 'has_limit', False), tmr_limit_time=p.tmr_limit_time)
|
||||
for p in sickgear.providerList + sickgear.newznabProviderList]))
|
||||
for p in sickgear.provider_list + sickgear.newznab_providers]))
|
||||
|
||||
t.provider_fail_cnt = len([p for p in t.provider_fail_stats if len(p['fails'])])
|
||||
t.provider_fails = t.provider_fail_cnt # 2020.03.17 legacy var, remove at future date
|
||||
|
@ -7841,7 +7894,7 @@ class History(MainHandler):
|
|||
show_obj = helpers.find_show_by_id(tvid_prodid_dict)
|
||||
ep_obj = show_obj.get_episode(cur_result['season'], cur_result['episode'])
|
||||
for n in filter(lambda x: x.name.lower() in ('emby', 'kodi', 'plex'),
|
||||
notifiers.NotifierFactory().get_enabled()):
|
||||
notifiers.NotifierFactory().get_enabled()):
|
||||
if 'PLEX' == n.name:
|
||||
if updating:
|
||||
continue
|
||||
|
@ -8011,7 +8064,7 @@ class ConfigGeneral(Config):
|
|||
return json_dumps(dict(text='%s\n\n' % ui_output))
|
||||
|
||||
@staticmethod
|
||||
def generate_key():
|
||||
def generate_key(*args, **kwargs):
|
||||
""" Return a new randomized API_KEY
|
||||
"""
|
||||
# Create some values to seed md5
|
||||
|
@ -8019,8 +8072,10 @@ class ConfigGeneral(Config):
|
|||
|
||||
result = hashlib.new('md5', decode_bytes(seed)).hexdigest()
|
||||
|
||||
# Return a hex digest of the md5, eg 49f68a5c8493ec2c0bf489821c21fc3b
|
||||
logger.log(u'New API generated')
|
||||
# Return a hex digest of the md5, e.g. 49f68a5c8493ec2c0bf489821c21fc3b
|
||||
app_name = kwargs.get('app_name')
|
||||
app_name = '' if not app_name else ' for [%s]' % app_name
|
||||
logger.log(u'New API generated%s' % app_name)
|
||||
|
||||
return result
|
||||
|
||||
|
@ -8053,8 +8108,8 @@ class ConfigGeneral(Config):
|
|||
any_qualities = ([], any_qualities.split(','))[any(any_qualities)]
|
||||
best_qualities = ([], best_qualities.split(','))[any(best_qualities)]
|
||||
|
||||
sickgear.QUALITY_DEFAULT = int(Quality.combineQualities(list(map(int, any_qualities)),
|
||||
list(map(int, best_qualities))))
|
||||
sickgear.QUALITY_DEFAULT = int(Quality.combine_qualities(list(map(int, any_qualities)),
|
||||
list(map(int, best_qualities))))
|
||||
sickgear.WANTED_BEGIN_DEFAULT = config.minimax(default_wanted_begin, 0, -1, 10)
|
||||
sickgear.WANTED_LATEST_DEFAULT = config.minimax(default_wanted_latest, 0, -1, 10)
|
||||
sickgear.SHOW_TAG_DEFAULT = default_tag
|
||||
|
@ -8067,33 +8122,6 @@ class ConfigGeneral(Config):
|
|||
|
||||
sickgear.save_config()
|
||||
|
||||
@staticmethod
|
||||
def generateKey(*args, **kwargs):
|
||||
""" Return a new randomized API_KEY
|
||||
"""
|
||||
|
||||
try:
|
||||
from hashlib import md5
|
||||
except ImportError:
|
||||
# noinspection PyUnresolvedReferences,PyCompatibility
|
||||
from md5 import md5
|
||||
|
||||
# Create some values to seed md5
|
||||
t = str(time.time())
|
||||
r = str(random.random())
|
||||
|
||||
# Create the md5 instance and give it the current time
|
||||
m = md5(decode_bytes(t))
|
||||
|
||||
# Update the md5 instance with the random variable
|
||||
m.update(decode_bytes(r))
|
||||
|
||||
# Return a hex digest of the md5, eg 49f68a5c8493ec2c0bf489821c21fc3b
|
||||
app_name = kwargs.get('app_name')
|
||||
app_name = '' if not app_name else ' for [%s]' % app_name
|
||||
logger.log(u'New apikey generated%s' % app_name)
|
||||
return m.hexdigest()
|
||||
|
||||
def create_apikey(self, app_name):
|
||||
result = dict()
|
||||
if not app_name:
|
||||
|
@ -8101,7 +8129,7 @@ class ConfigGeneral(Config):
|
|||
elif app_name in [k[0] for k in sickgear.API_KEYS if k[0]]:
|
||||
result['result'] = 'Failed: name is not unique'
|
||||
else:
|
||||
api_key = self.generateKey(app_name=app_name)
|
||||
api_key = self.generate_key(app_name=app_name)
|
||||
if api_key in [k[1] for k in sickgear.API_KEYS if k[0]]:
|
||||
result['result'] = 'Failed: apikey already exists, try again'
|
||||
else:
|
||||
|
@ -8199,7 +8227,7 @@ class ConfigGeneral(Config):
|
|||
sickgear.FANART_LIMIT = config.minimax(fanart_limit, 3, 0, 500)
|
||||
sickgear.SHOWLIST_TAGVIEW = showlist_tagview
|
||||
|
||||
# 'Show List' is the must have default fallback. Tags in use that are removed from config ui are restored,
|
||||
# 'Show List' is the must-have default fallback. Tags in use that are removed from config ui are restored,
|
||||
# not deleted. Deduped list order preservation is key to feature function.
|
||||
my_db = db.DBConnection()
|
||||
sql_result = my_db.select('SELECT DISTINCT tag FROM tv_shows')
|
||||
|
@ -8211,7 +8239,7 @@ class ConfigGeneral(Config):
|
|||
results += [u'An attempt was prevented to remove a show list group name still in use']
|
||||
dedupe = {}
|
||||
sickgear.SHOW_TAGS = [dedupe.setdefault(item, item) for item in (cleanser + new_names + [u'Show List'])
|
||||
if item not in dedupe]
|
||||
if item not in dedupe]
|
||||
|
||||
sickgear.HOME_SEARCH_FOCUS = config.checkbox_to_value(home_search_focus)
|
||||
sickgear.USE_IMDB_INFO = config.checkbox_to_value(use_imdb_info)
|
||||
|
@ -8255,7 +8283,7 @@ class ConfigGeneral(Config):
|
|||
sickgear.HANDLE_REVERSE_PROXY = config.checkbox_to_value(handle_reverse_proxy)
|
||||
sickgear.SEND_SECURITY_HEADERS = config.checkbox_to_value(send_security_headers)
|
||||
hosts = ','.join(filter(lambda name: not helpers.re_valid_hostname(with_allowed=False).match(name),
|
||||
config.clean_hosts(allowed_hosts).split(',')))
|
||||
config.clean_hosts(allowed_hosts).split(',')))
|
||||
if not hosts or self.request.host_name in hosts:
|
||||
sickgear.ALLOWED_HOSTS = hosts
|
||||
sickgear.ALLOW_ANYIP = config.checkbox_to_value(allow_anyip)
|
||||
|
@ -8395,9 +8423,9 @@ class ConfigSearch(Config):
|
|||
sickgear.USENET_RETENTION = config.to_int(usenet_retention, default=500)
|
||||
|
||||
sickgear.IGNORE_WORDS, sickgear.IGNORE_WORDS_REGEX = helpers.split_word_str(ignore_words
|
||||
if ignore_words else '')
|
||||
if ignore_words else '')
|
||||
sickgear.REQUIRE_WORDS, sickgear.REQUIRE_WORDS_REGEX = helpers.split_word_str(require_words
|
||||
if require_words else '')
|
||||
if require_words else '')
|
||||
|
||||
clean_ignore_require_words()
|
||||
|
||||
|
@ -8406,7 +8434,7 @@ class ConfigSearch(Config):
|
|||
|
||||
sickgear.SEARCH_UNAIRED = bool(config.checkbox_to_value(search_unaired))
|
||||
sickgear.UNAIRED_RECENT_SEARCH_ONLY = bool(config.checkbox_to_value(unaired_recent_search_only,
|
||||
value_off=1, value_on=0))
|
||||
value_off=1, value_on=0))
|
||||
|
||||
sickgear.FLARESOLVERR_HOST = config.clean_url(flaresolverr_host)
|
||||
sg_helpers.FLARESOLVERR_HOST = sickgear.FLARESOLVERR_HOST
|
||||
|
@ -8668,9 +8696,9 @@ class ConfigProviders(Config):
|
|||
return json_dumps({'error': 'No Provider Name or url specified'})
|
||||
|
||||
provider_dict = dict(zip([sickgear.providers.generic_provider_name(x.get_id())
|
||||
for x in sickgear.newznabProviderList], sickgear.newznabProviderList))
|
||||
for x in sickgear.newznab_providers], sickgear.newznab_providers))
|
||||
provider_url_dict = dict(zip([sickgear.providers.generic_provider_url(x.url)
|
||||
for x in sickgear.newznabProviderList], sickgear.newznabProviderList))
|
||||
for x in sickgear.newznab_providers], sickgear.newznab_providers))
|
||||
|
||||
temp_provider = newznab.NewznabProvider(name, config.clean_url(url))
|
||||
|
||||
|
@ -8694,12 +8722,12 @@ class ConfigProviders(Config):
|
|||
error = '\nNo provider %s specified' % error
|
||||
return json_dumps({'success': False, 'error': error})
|
||||
|
||||
if name in [n.name for n in sickgear.newznabProviderList if n.url == url]:
|
||||
provider = [n for n in sickgear.newznabProviderList if n.name == name][0]
|
||||
if name in [n.name for n in sickgear.newznab_providers if n.url == url]:
|
||||
provider = [n for n in sickgear.newznab_providers if n.name == name][0]
|
||||
tv_categories = provider.clean_newznab_categories(provider.all_cats)
|
||||
state = provider.is_enabled()
|
||||
else:
|
||||
providers = dict(zip([x.get_id() for x in sickgear.newznabProviderList], sickgear.newznabProviderList))
|
||||
providers = dict(zip([x.get_id() for x in sickgear.newznab_providers], sickgear.newznab_providers))
|
||||
temp_provider = newznab.NewznabProvider(name, url, key)
|
||||
if None is not key and starify(key, True):
|
||||
temp_provider.key = providers[temp_provider.get_id()].key
|
||||
|
@ -8715,7 +8743,7 @@ class ConfigProviders(Config):
|
|||
return json_dumps({'error': 'Invalid name specified'})
|
||||
|
||||
provider_dict = dict(
|
||||
zip([x.get_id() for x in sickgear.torrentRssProviderList], sickgear.torrentRssProviderList))
|
||||
zip([x.get_id() for x in sickgear.torrent_rss_providers], sickgear.torrent_rss_providers))
|
||||
|
||||
temp_provider = rsstorrent.TorrentRssProvider(name, url, cookies)
|
||||
|
||||
|
@ -8730,7 +8758,7 @@ class ConfigProviders(Config):
|
|||
|
||||
@staticmethod
|
||||
def check_providers_ping():
|
||||
for p in sickgear.providers.sortedProviderList():
|
||||
for p in sickgear.providers.sorted_sources():
|
||||
if getattr(p, 'ping_iv', None):
|
||||
if p.is_active() and (p.get_id() not in sickgear.provider_ping_thread_pool
|
||||
or not sickgear.provider_ping_thread_pool[p.get_id()].is_alive()):
|
||||
|
@ -8748,7 +8776,7 @@ class ConfigProviders(Config):
|
|||
pass
|
||||
|
||||
# stop removed providers
|
||||
prov = [n.get_id() for n in sickgear.providers.sortedProviderList()]
|
||||
prov = [n.get_id() for n in sickgear.providers.sorted_sources()]
|
||||
for p in [x for x in sickgear.provider_ping_thread_pool if x not in prov]:
|
||||
sickgear.provider_ping_thread_pool[p].stop = True
|
||||
try:
|
||||
|
@ -8764,7 +8792,7 @@ class ConfigProviders(Config):
|
|||
provider_list = []
|
||||
|
||||
# add all the newznab info we have into our list
|
||||
newznab_sources = dict(zip([x.get_id() for x in sickgear.newznabProviderList], sickgear.newznabProviderList))
|
||||
newznab_sources = dict(zip([x.get_id() for x in sickgear.newznab_providers], sickgear.newznab_providers))
|
||||
active_ids = []
|
||||
reload_page = False
|
||||
if newznab_string:
|
||||
|
@ -8821,18 +8849,18 @@ class ConfigProviders(Config):
|
|||
new_provider.enabled = True
|
||||
_ = new_provider.caps # when adding a custom, trigger server_type update
|
||||
new_provider.enabled = False
|
||||
sickgear.newznabProviderList.append(new_provider)
|
||||
sickgear.newznab_providers.append(new_provider)
|
||||
|
||||
active_ids.append(cur_id)
|
||||
|
||||
# delete anything that is missing
|
||||
if sickgear.USE_NZBS:
|
||||
for source in [x for x in sickgear.newznabProviderList if x.get_id() not in active_ids]:
|
||||
sickgear.newznabProviderList.remove(source)
|
||||
for source in [x for x in sickgear.newznab_providers if x.get_id() not in active_ids]:
|
||||
sickgear.newznab_providers.remove(source)
|
||||
|
||||
# add all the torrent RSS info we have into our list
|
||||
torrent_rss_sources = dict(zip([x.get_id() for x in sickgear.torrentRssProviderList],
|
||||
sickgear.torrentRssProviderList))
|
||||
torrent_rss_sources = dict(zip([x.get_id() for x in sickgear.torrent_rss_providers],
|
||||
sickgear.torrent_rss_providers))
|
||||
active_ids = []
|
||||
if torrentrss_string:
|
||||
for curTorrentRssProviderStr in torrentrss_string.split('!!!'):
|
||||
|
@ -8868,19 +8896,19 @@ class ConfigProviders(Config):
|
|||
if attr_check in kwargs:
|
||||
setattr(torrss_src, attr, str(kwargs.get(attr_check) or '').strip())
|
||||
else:
|
||||
sickgear.torrentRssProviderList.append(new_provider)
|
||||
sickgear.torrent_rss_providers.append(new_provider)
|
||||
|
||||
active_ids.append(cur_id)
|
||||
|
||||
# delete anything that is missing
|
||||
if sickgear.USE_TORRENTS:
|
||||
for source in [x for x in sickgear.torrentRssProviderList if x.get_id() not in active_ids]:
|
||||
sickgear.torrentRssProviderList.remove(source)
|
||||
for source in [x for x in sickgear.torrent_rss_providers if x.get_id() not in active_ids]:
|
||||
sickgear.torrent_rss_providers.remove(source)
|
||||
|
||||
# enable/disable states of source providers
|
||||
provider_str_list = provider_order.split()
|
||||
sources = dict(zip([x.get_id() for x in sickgear.providers.sortedProviderList()],
|
||||
sickgear.providers.sortedProviderList()))
|
||||
sources = dict(zip([x.get_id() for x in sickgear.providers.sorted_sources()],
|
||||
sickgear.providers.sorted_sources()))
|
||||
for cur_src_str in provider_str_list:
|
||||
src_name, src_enabled = cur_src_str.split(':')
|
||||
|
||||
|
@ -8904,7 +8932,7 @@ class ConfigProviders(Config):
|
|||
torrent_rss_sources[src_name].enabled = src_enabled
|
||||
|
||||
# update torrent source settings
|
||||
for torrent_src in [src for src in sickgear.providers.sortedProviderList()
|
||||
for torrent_src in [src for src in sickgear.providers.sorted_sources()
|
||||
if sickgear.GenericProvider.TORRENT == src.providerType]: # type: TorrentProvider
|
||||
src_id_prefix = torrent_src.get_id() + '_'
|
||||
|
||||
|
@ -8951,7 +8979,7 @@ class ConfigProviders(Config):
|
|||
setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip())
|
||||
|
||||
# update nzb source settings
|
||||
for nzb_src in [src for src in sickgear.providers.sortedProviderList() if
|
||||
for nzb_src in [src for src in sickgear.providers.sorted_sources() if
|
||||
sickgear.GenericProvider.NZB == src.providerType]:
|
||||
src_id_prefix = nzb_src.get_id() + '_'
|
||||
|
||||
|
@ -8979,7 +9007,7 @@ class ConfigProviders(Config):
|
|||
if hasattr(nzb_src, attr):
|
||||
setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip())
|
||||
|
||||
sickgear.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickgear.newznabProviderList])
|
||||
sickgear.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickgear.newznab_providers])
|
||||
sickgear.PROVIDER_ORDER = provider_list
|
||||
|
||||
helpers.clear_unused_providers()
|
||||
|
@ -9487,17 +9515,11 @@ class EventLogs(MainHandler):
|
|||
class WebFileBrowser(MainHandler):
|
||||
|
||||
def index(self, path='', include_files=False, **kwargs):
|
||||
""" prevent issues with requests using legacy params """
|
||||
include_files = include_files or kwargs.get('includeFiles') or False
|
||||
""" /legacy """
|
||||
|
||||
self.set_header('Content-Type', 'application/json')
|
||||
return json_dumps(folders_at_path(path, True, bool(int(include_files))))
|
||||
|
||||
def complete(self, term, include_files=0, **kwargs):
|
||||
""" prevent issues with requests using legacy params """
|
||||
include_files = include_files or kwargs.get('includeFiles') or False
|
||||
""" /legacy """
|
||||
|
||||
self.set_header('Content-Type', 'application/json')
|
||||
return json_dumps([entry['path'] for entry in folders_at_path(
|
||||
|
@ -9700,7 +9722,7 @@ class CachedImages(MainHandler):
|
|||
:param tvid_prodid:
|
||||
:param thumb: return thumb or normal as fallback
|
||||
:param pid: optional person_id
|
||||
:param prefer_person: prefer person image if person_id is set and character has more then 1 person assigned
|
||||
:param prefer_person: prefer person image if person_id is set and character has more than 1 person assigned
|
||||
"""
|
||||
_ = kwargs.get('oid') # suppress pyc non used var highlight, oid (original id) is a visual ui key
|
||||
show_obj = tvid_prodid and helpers.find_show_by_id(tvid_prodid)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from sys import exc_info, platform
|
||||
from sys import exc_info
|
||||
import threading
|
||||
|
||||
from tornado.ioloop import IOLoop
|
||||
|
@ -8,13 +8,9 @@ from tornado.routing import AnyMatches, Rule
|
|||
from tornado.web import Application, _ApplicationRouter
|
||||
|
||||
from . import logger, webapi, webserve
|
||||
from ._legacy import LegacyConfigPostProcessing, LegacyHomeAddShows, \
|
||||
LegacyManageManageSearches, LegacyManageShowProcesses, LegacyErrorLogs
|
||||
from .helpers import create_https_certificates, re_valid_hostname
|
||||
import sickgear
|
||||
|
||||
from _23 import PY38
|
||||
|
||||
# noinspection PyUnreachableCode
|
||||
if False:
|
||||
# noinspection PyUnresolvedReferences
|
||||
|
@ -218,22 +214,6 @@ class WebServer(threading.Thread):
|
|||
(r'%s/api/builder(/?)(.*)' % self.options['web_root'], webserve.ApiBuilder),
|
||||
(r'%s/api(/?.*)' % self.options['web_root'], webapi.Api),
|
||||
# ----------------------------------------------------------------------------------------------------------
|
||||
# legacy deprecated Aug 2019
|
||||
(r'%s/home/addShows/?$' % self.options['web_root'], LegacyHomeAddShows),
|
||||
(r'%s/manage/manageSearches/?$' % self.options['web_root'], LegacyManageManageSearches),
|
||||
(r'%s/manage/showProcesses/?$' % self.options['web_root'], LegacyManageShowProcesses),
|
||||
(r'%s/config/postProcessing/?$' % self.options['web_root'], LegacyConfigPostProcessing),
|
||||
(r'%s/errorlogs/?$' % self.options['web_root'], LegacyErrorLogs),
|
||||
(r'%s/home/is_alive(/?.*)' % self.options['web_root'], webserve.IsAliveHandler),
|
||||
(r'%s/home/addShows(/?.*)' % self.options['web_root'], webserve.AddShows),
|
||||
(r'%s/manage/manageSearches(/?.*)' % self.options['web_root'], webserve.ManageSearch),
|
||||
(r'%s/manage/showProcesses(/?.*)' % self.options['web_root'], webserve.ShowTasks),
|
||||
(r'%s/config/postProcessing(/?.*)' % self.options['web_root'], webserve.ConfigMediaProcess),
|
||||
(r'%s/errorlogs(/?.*)' % self.options['web_root'], webserve.EventLogs),
|
||||
# ----------------------------------------------------------------------------------------------------------
|
||||
# legacy deprecated Aug 2019 - never remove as used in external scripts
|
||||
(r'%s/home/postprocess(/?.*)' % self.options['web_root'], webserve.HomeProcessMedia),
|
||||
(r'%s(/?update_watched_state_kodi/?)' % self.options['web_root'], webserve.NoXSRFHandler),
|
||||
# regular catchall routes - keep here at the bottom
|
||||
(r'%s/home(/?.*)' % self.options['web_root'], webserve.Home),
|
||||
(r'%s/manage/(/?.*)' % self.options['web_root'], webserve.Manage),
|
||||
|
@ -255,9 +235,6 @@ class WebServer(threading.Thread):
|
|||
|
||||
# python 3 needs to start event loop first
|
||||
import asyncio
|
||||
if 'win32' == platform and PY38:
|
||||
# noinspection PyUnresolvedReferences
|
||||
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||
from tornado.platform.asyncio import AnyThreadEventLoopPolicy
|
||||
asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy())
|
||||
|
|
|
@ -135,7 +135,7 @@ class QualityTests(unittest.TestCase):
|
|||
|
||||
def check_quality_names(self, quality, cases):
|
||||
for fn in cases:
|
||||
second = common.Quality.nameQuality(fn)
|
||||
second = common.Quality.name_quality(fn)
|
||||
self.assertEqual(quality, second, msg='fail [%s] != [%s] for case: %s' %
|
||||
(Quality.qualityStrings[quality], Quality.qualityStrings[second], fn))
|
||||
|
||||
|
@ -148,7 +148,7 @@ class QualityTests(unittest.TestCase):
|
|||
|
||||
def check_wantedquality_list(self, cases):
|
||||
for show_quality, result in cases:
|
||||
sq = common.Quality.combineQualities(*show_quality)
|
||||
sq = common.Quality.combine_qualities(*show_quality)
|
||||
wd = common.WantedQualities()
|
||||
_ = wd.get_wantedlist(sq, False, common.Quality.NONE, common.UNAIRED, manual=True)
|
||||
for w, v in iteritems(wd):
|
||||
|
@ -158,7 +158,7 @@ class QualityTests(unittest.TestCase):
|
|||
|
||||
def check_wantedquality_get_wantedlist(self, cases):
|
||||
for show_quality, result in cases:
|
||||
sq = common.Quality.combineQualities(*show_quality)
|
||||
sq = common.Quality.combine_qualities(*show_quality)
|
||||
wd = common.WantedQualities()
|
||||
for case, wlist in result:
|
||||
ka = {'qualities': sq}
|
||||
|
@ -169,7 +169,7 @@ class QualityTests(unittest.TestCase):
|
|||
def check_sceneQuality(self, cases):
|
||||
msg = 'Test case: "%s", actual: [%s] != expected: [%s]'
|
||||
for show_name, result in cases:
|
||||
sq = common.Quality.sceneQuality(show_name[0], show_name[1])
|
||||
sq = common.Quality.scene_quality(show_name[0], show_name[1])
|
||||
self.assertEqual(result, sq, msg=msg % (show_name[0], Quality.qualityStrings[sq],
|
||||
Quality.qualityStrings[result]))
|
||||
|
||||
|
@ -177,8 +177,8 @@ class QualityTests(unittest.TestCase):
|
|||
|
||||
def test_SDTV(self):
|
||||
|
||||
self.assertEqual(common.Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV),
|
||||
common.Quality.statusFromName('Test.Show.S01E02-GROUP.mkv'))
|
||||
self.assertEqual(common.Quality.composite_status(common.DOWNLOADED, common.Quality.SDTV),
|
||||
common.Quality.status_from_name('Test.Show.S01E02-GROUP.mkv'))
|
||||
|
||||
def test_qualites(self):
|
||||
self.longMessage = True
|
||||
|
|
|
@ -65,7 +65,7 @@ class HelpersTests(unittest.TestCase):
|
|||
((WANTED, Quality.NONE), True),
|
||||
]
|
||||
for c, b in test_cases:
|
||||
self.assertEqual(helpers.should_delete_episode(Quality.compositeStatus(*c)), b)
|
||||
self.assertEqual(helpers.should_delete_episode(Quality.composite_status(*c)), b)
|
||||
|
||||
def test_encrypt(self):
|
||||
helpers.unique_key1 = '0x12d48f154876c16164a1646'
|
||||
|
|
|
@ -508,8 +508,8 @@ class MultiSceneNumbering(test.SickbeardTestDBCase):
|
|||
)
|
||||
my_db = db.DBConnection()
|
||||
my_db.mass_action(c_l)
|
||||
name_cache.addNameToCache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'],
|
||||
prodid=e_t['show_obj']['prodid'])
|
||||
name_cache.add_name_to_cache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'],
|
||||
prodid=e_t['show_obj']['prodid'])
|
||||
for _t in e_t['tests']:
|
||||
try:
|
||||
res = parser.NameParser(True, convert=True).parse(_t['parse_name'])
|
||||
|
@ -533,8 +533,8 @@ class EpisodeNameCases(unittest.TestCase):
|
|||
e_obj.season = e_o['season']
|
||||
e_obj.episode = e_o['number']
|
||||
s.sxe_ep_obj.setdefault(e_obj.season, {})[e_obj.episode] = e_obj
|
||||
name_cache.addNameToCache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'],
|
||||
prodid=e_t['show_obj']['prodid'])
|
||||
name_cache.add_name_to_cache(e_t['show_obj']['name'], tvid=e_t['show_obj']['tvid'],
|
||||
prodid=e_t['show_obj']['prodid'])
|
||||
try:
|
||||
res = parser.NameParser(True).parse(e_t['parse_name'])
|
||||
except (BaseException, Exception):
|
||||
|
@ -550,7 +550,7 @@ class InvalidCases(unittest.TestCase):
|
|||
for s in [TVShowTest(name=rls_name, prodid=prodid, tvid=tvid, is_anime=is_anime)]:
|
||||
sickgear.showList.append(s)
|
||||
sickgear.showDict[s.sid_int] = s
|
||||
name_cache.addNameToCache(show_name, tvid=tvid, prodid=prodid)
|
||||
name_cache.add_name_to_cache(show_name, tvid=tvid, prodid=prodid)
|
||||
invalidexception = False
|
||||
try:
|
||||
_ = parser.NameParser(True).parse(rls_name)
|
||||
|
@ -939,7 +939,7 @@ class ExtraInfoNoNameTests(test.SickbeardTestDBCase):
|
|||
sickgear.showList = [tvs]
|
||||
sickgear.showDict = {tvs.sid_int: tvs}
|
||||
name_cache.nameCache = {}
|
||||
name_cache.buildNameCache()
|
||||
name_cache.build_name_cache()
|
||||
|
||||
np = parser.NameParser()
|
||||
r = np.parse(case[2], cache_result=False)
|
||||
|
|
|
@ -27,7 +27,7 @@ import unittest
|
|||
|
||||
import sickgear
|
||||
from sickgear.helpers import real_path
|
||||
from sickgear.name_cache import addNameToCache
|
||||
from sickgear.name_cache import add_name_to_cache
|
||||
from sickgear.postProcessor import PostProcessor
|
||||
from sickgear.processTV import ProcessTVShow
|
||||
from sickgear.tv import TVEpisode, TVShow, logger
|
||||
|
@ -94,7 +94,7 @@ class PPBasicTests(test.SickbeardTestDBCase):
|
|||
ep_obj.release_name = 'test setter'
|
||||
ep_obj.save_to_db()
|
||||
|
||||
addNameToCache('show name', tvid=TVINFO_TVDB, prodid=3)
|
||||
add_name_to_cache('show name', tvid=TVINFO_TVDB, prodid=3)
|
||||
sickgear.PROCESS_METHOD = 'move'
|
||||
|
||||
pp = PostProcessor(test.FILEPATH)
|
||||
|
|
|
@ -75,7 +75,7 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
|
|||
sickgear.showDict[s.sid_int] = s
|
||||
sickgear.webserve.Home.make_showlist_unique_names()
|
||||
scene_exceptions.retrieve_exceptions()
|
||||
name_cache.buildNameCache()
|
||||
name_cache.build_name_cache()
|
||||
|
||||
def test_sceneExceptionsEmpty(self):
|
||||
self.assertEqual(scene_exceptions.get_scene_exceptions(0, 0), [])
|
||||
|
@ -99,7 +99,7 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
|
|||
sickgear.showList.append(s)
|
||||
sickgear.showDict[s.sid_int] = s
|
||||
scene_exceptions.retrieve_exceptions()
|
||||
name_cache.buildNameCache()
|
||||
name_cache.build_name_cache()
|
||||
self.assertEqual(scene_exceptions.get_scene_exception_by_name(u'ブラック・ラグーン'), [1, 79604, -1])
|
||||
self.assertEqual(scene_exceptions.get_scene_exception_by_name(u'Burakku Ragūn'), [1, 79604, -1])
|
||||
self.assertEqual(scene_exceptions.get_scene_exception_by_name('Rokka no Yuusha'), [1, 295243, -1])
|
||||
|
@ -114,11 +114,11 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
|
|||
my_db.action('DELETE FROM scene_exceptions WHERE 1=1')
|
||||
|
||||
# put something in the cache
|
||||
name_cache.addNameToCache('Cached Name', prodid=0)
|
||||
name_cache.add_name_to_cache('Cached Name', prodid=0)
|
||||
|
||||
# updating should not clear the cache this time since our exceptions didn't change
|
||||
scene_exceptions.retrieve_exceptions()
|
||||
self.assertEqual(name_cache.retrieveNameFromCache('Cached Name'), (0, 0))
|
||||
self.assertEqual(name_cache.retrieve_name_from_cache('Cached Name'), (0, 0))
|
||||
|
||||
|
||||
if '__main__' == __name__:
|
||||
|
|
|
@ -31,7 +31,7 @@ from sickgear.tv import TVEpisode, TVShow
|
|||
wanted_tests = [
|
||||
dict(
|
||||
name='Start and End',
|
||||
show=dict(indexer=1, indexerid=1, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=1, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
|
||||
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
|
||||
|
@ -64,7 +64,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='Start and End, entire season',
|
||||
show=dict(indexer=1, indexerid=10, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=10, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 2)),
|
||||
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
|
||||
|
@ -97,7 +97,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='Start, entire season',
|
||||
show=dict(indexer=1, indexerid=210, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=210, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 2)),
|
||||
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
|
||||
|
@ -130,7 +130,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='End only',
|
||||
show=dict(indexer=1, indexerid=2, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=2, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 3)),
|
||||
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
|
||||
|
@ -163,7 +163,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='End only, entire season',
|
||||
show=dict(indexer=1, indexerid=20, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=20, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 4)),
|
||||
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
|
||||
|
@ -196,7 +196,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='End only, multi season',
|
||||
show=dict(indexer=1, indexerid=3, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=3, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 5)),
|
||||
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
|
||||
|
@ -229,7 +229,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='End only, multi season, entire season',
|
||||
show=dict(indexer=1, indexerid=30, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=30, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 6)),
|
||||
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
|
||||
|
@ -262,7 +262,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='End only, multi season, cross season',
|
||||
show=dict(indexer=1, indexerid=33, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=33, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
dict(season=1, episode=1, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 7)),
|
||||
dict(season=1, episode=2, status=SKIPPED, quality=Quality.NONE, airdate=datetime.date(2019, 1, 1)),
|
||||
|
@ -295,7 +295,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='all episodes unaired',
|
||||
show=dict(indexer=1, indexerid=35, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=35, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
dict(season=1, episode=1, status=UNAIRED, quality=Quality.NONE, airdate=datetime.date.fromordinal(1)),
|
||||
dict(season=1, episode=2, status=UNAIRED, quality=Quality.NONE, airdate=datetime.date.fromordinal(1)),
|
||||
|
@ -317,7 +317,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='no episodes',
|
||||
show=dict(indexer=1, indexerid=36, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=36, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
],
|
||||
start_wanted=7, end_wanted=3,
|
||||
|
@ -332,7 +332,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='no episodes, whole first season',
|
||||
show=dict(indexer=1, indexerid=37, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=37, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
],
|
||||
start_wanted=-1, end_wanted=0,
|
||||
|
@ -347,7 +347,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='no episodes, whole last season',
|
||||
show=dict(indexer=1, indexerid=38, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=38, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
],
|
||||
start_wanted=0, end_wanted=-1,
|
||||
|
@ -362,7 +362,7 @@ wanted_tests = [
|
|||
|
||||
dict(
|
||||
name='no episodes, whole first and last season',
|
||||
show=dict(indexer=1, indexerid=39, quality=Quality.combineQualities([Quality.SDTV], [])),
|
||||
show=dict(indexer=1, indexerid=39, quality=Quality.combine_qualities([Quality.SDTV], [])),
|
||||
episodes=[
|
||||
],
|
||||
start_wanted=-1, end_wanted=-1,
|
||||
|
@ -408,7 +408,7 @@ class ShowAddTests(test.SickbeardTestDBCase):
|
|||
show_obj.sxe_ep_obj[ep['season']] = {}
|
||||
show_obj.sxe_ep_obj[ep['season']][ep['episode']] = TVEpisode(show_obj, ep['season'], ep['episode'])
|
||||
episode = show_obj.sxe_ep_obj[ep['season']][ep['episode']]
|
||||
episode.status = Quality.compositeStatus(ep['status'], ep['quality'])
|
||||
episode.status = Quality.composite_status(ep['status'], ep['quality'])
|
||||
episode.airdate = ep['airdate']
|
||||
episode.name = 'nothing'
|
||||
episode.epid = ep_id
|
||||
|
|
|
@ -57,7 +57,7 @@ class SearchTest(test.SickbeardTestDBCase):
|
|||
return True
|
||||
|
||||
def __init__(self, something):
|
||||
for provider in sickgear.providers.sortedProviderList():
|
||||
for provider in sickgear.providers.sorted_sources():
|
||||
provider.get_url = self._fake_getURL
|
||||
#provider.isActive = self._fake_isActive
|
||||
|
||||
|
|
|
@ -91,8 +91,8 @@ sickgear.NAMING_SPORTS_PATTERN = ''
|
|||
sickgear.NAMING_MULTI_EP = 1
|
||||
|
||||
sickgear.PROVIDER_ORDER = []
|
||||
sickgear.newznabProviderList = providers.getNewznabProviderList('')
|
||||
sickgear.providerList = providers.makeProviderList()
|
||||
sickgear.newznab_providers = providers.newznab_source_list('')
|
||||
sickgear.provider_list = providers.provider_modules()
|
||||
|
||||
sickgear.PROG_DIR = os.path.abspath('..')
|
||||
# sickgear.DATA_DIR = os.path.join(sickgear.PROG_DIR, 'tests')
|
||||
|
|
|
@ -75,7 +75,7 @@ test_shows = [
|
|||
'quality_init': [], 'quality_upgrade': [],
|
||||
'episodes': {
|
||||
1: {
|
||||
1: {'name': 'ep1', 'status': Quality.compositeStatus(DOWNLOADED, Quality.HDWEBDL),
|
||||
1: {'name': 'ep1', 'status': Quality.composite_status(DOWNLOADED, Quality.HDWEBDL),
|
||||
'airdate': old_date, 'description': 'ep1 description'},
|
||||
2: {'name': 'ep2', 'status': WANTED, 'airdate': last_week, 'description': 'ep2 description'},
|
||||
3: {'name': 'ep3', 'status': WANTED, 'airdate': today, 'description': 'ep3 description'},
|
||||
|
@ -174,17 +174,17 @@ class WebAPICase(test.SickbeardTestDBCase):
|
|||
sickgear.events = Events(None)
|
||||
sickgear.show_queue_scheduler = scheduler.Scheduler(
|
||||
show_queue.ShowQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName='SHOWQUEUE')
|
||||
cycle_time=datetime.timedelta(seconds=3),
|
||||
thread_name='SHOWQUEUE')
|
||||
sickgear.search_queue_scheduler = scheduler.Scheduler(
|
||||
search_queue.SearchQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName='SEARCHQUEUE')
|
||||
cycle_time=datetime.timedelta(seconds=3),
|
||||
thread_name='SEARCHQUEUE')
|
||||
sickgear.backlog_search_scheduler = search_backlog.BacklogSearchScheduler(
|
||||
search_backlog.BacklogSearcher(),
|
||||
cycleTime=datetime.timedelta(minutes=60),
|
||||
cycle_time=datetime.timedelta(minutes=60),
|
||||
run_delay=datetime.timedelta(minutes=60),
|
||||
threadName='BACKLOG')
|
||||
thread_name='BACKLOG')
|
||||
sickgear.indexermapper.indexer_list = [i for i in sickgear.indexers.indexer_api.TVInfoAPI().all_sources]
|
||||
for root_dirs, path, expected in root_folder_tests:
|
||||
sickgear.ROOT_DIRS = root_dirs
|
||||
|
@ -198,8 +198,8 @@ class WebAPICase(test.SickbeardTestDBCase):
|
|||
elif k in show_obj.__dict__:
|
||||
show_obj.__dict__[k] = v
|
||||
if 'quality_init' in cur_show and cur_show['quality_init']:
|
||||
show_obj.quality = Quality.combineQualities(cur_show['quality_init'],
|
||||
cur_show.get('quality_upgrade', []))
|
||||
show_obj.quality = Quality.combine_qualities(cur_show['quality_init'],
|
||||
cur_show.get('quality_upgrade', []))
|
||||
show_obj.dirty = True
|
||||
|
||||
show_obj.save_to_db(True)
|
||||
|
@ -216,7 +216,7 @@ class WebAPICase(test.SickbeardTestDBCase):
|
|||
ep_obj.__dict__[k] = v
|
||||
show_obj.sxe_ep_obj.setdefault(season, {})[ep] = ep_obj
|
||||
ep_obj.save_to_db(True)
|
||||
status, quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
status, quality = Quality.split_composite_status(ep_obj.status)
|
||||
if status in (DOWNLOADED, SNATCHED):
|
||||
s_r = SearchResult([ep_obj])
|
||||
s_r.show_obj, s_r.quality, s_r.provider, s_r.name = \
|
||||
|
@ -240,8 +240,8 @@ class WebAPICase(test.SickbeardTestDBCase):
|
|||
for cur_show in test_shows:
|
||||
show_obj = sickgear.helpers.find_show_by_id({cur_show['tvid']: cur_show['prodid']})
|
||||
if 'quality_init' in cur_show and cur_show['quality_init']:
|
||||
show_obj.quality = Quality.combineQualities(cur_show['quality_init'],
|
||||
cur_show.get('quality_upgrade', []))
|
||||
show_obj.quality = Quality.combine_qualities(cur_show['quality_init'],
|
||||
cur_show.get('quality_upgrade', []))
|
||||
else:
|
||||
show_obj.quality = int(sickgear.QUALITY_DEFAULT)
|
||||
show_obj.upgrade_once = int(cur_show.get('upgrade_once', 0))
|
||||
|
@ -821,7 +821,7 @@ class WebAPICase(test.SickbeardTestDBCase):
|
|||
if cur_quality:
|
||||
params.update({'quality': cur_quality_str})
|
||||
old_status = ep_obj.status
|
||||
status, quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
status, quality = Quality.split_composite_status(ep_obj.status)
|
||||
expect_fail = UNAIRED == status or (DOWNLOADED == status and not cur_quality)
|
||||
expected_msg = (success_msg, failed_msg)[expect_fail]
|
||||
data = self._request_from_api(webapi.CMD_SickGearEpisodeSetStatus, params=params)
|
||||
|
|
Loading…
Reference in a new issue