Merge branch 'feature/ChangeReplacePy23Stuff' into dev

This commit is contained in:
JackDandy 2023-02-24 15:21:16 +00:00
commit 69038392a4
172 changed files with 1724 additions and 10258 deletions

View file

@ -3,6 +3,7 @@
* Update package resource API 63.2.0 (3ae44cd) to 67.3.2 (b9bf2ec)
* Change remove calls to legacy py2 fix encoding function
* Change requirements for pure py3
* Change codebase cleanups
### 3.27.8 (2023-02-20 23:30:00 UTC)
@ -1080,7 +1081,7 @@
* Add API response field `global exclude require` to sg.listrequirewords endpoint
* Change improve Popen resource usage under py2
* Add overall failure monitoring to History/Connect fails (renamed from "Provider fails")
* Change log exception during updateCache in newznab
* Change log exception during update_cache in newznab
* Change make Py3.9 preparations
* Change anime "Available groups" to display "No groups listed..." when API is fine with no results instead of blank
* Change improve clarity of anime group lists by using terms Allow list and Block list

View file

@ -37,6 +37,9 @@ if old_magic != magic_number:
# skip cleaned005 as used during dev by testers
cleanups = [
['.cleaned009.tmp', r'lib\scandir', [
r'lib\scandir\__pycache__', r'lib\scandir',
]],
['.cleaned008.tmp', r'lib\tornado_py3', [
r'lib\bs4_py2\builder\__pycache__', r'lib\bs4_py2\builder', r'lib\bs4_py2',
r'lib\bs4_py3\builder\__pycache__', r'lib\bs4_py3\builder', r'lib\bs4_py3',

View file

@ -65,7 +65,7 @@
<tbody>
#for $hItem in $cacheResults:
#set $provider = $providers.getProviderClass($hItem['provider'])
#set $provider = $providers.get_by_id($hItem['provider'])
#set $tip = '%s @ %s' % ($hItem['provider'], $SGDatetime.sbfdatetime($SGDatetime.fromtimestamp($hItem['time'])))
#set $ver = $hItem['version']
#set $ver = ($ver, '')[-1 == $ver]

View file

@ -182,7 +182,11 @@ def param(visible=True, rid=None, cache_person=None, cache_char=None, person=Non
#end if
#set $section_links = False
#set $all_sources = $TVInfoAPI().all_sources
#for $cur_src, $cur_sid in sorted(iteritems($person.ids))
#if $cur_src not in $all_sources:
#continue
#end if
#if $TVInfoAPI($cur_src).config.get('people_url')
#if not $section_links
#set $section_links = True

View file

@ -29,7 +29,7 @@
</td>
</tr>
<tr><td class="infoTableHeader">Config file:</td><td class="infoTableCell">$sg_str('CONFIG_FILE')</td></tr>
<tr><td class="infoTableHeader">Database file:</td><td class="infoTableCell">$db.dbFilename()</td></tr>
<tr><td class="infoTableHeader">Database file:</td><td class="infoTableCell">$db.db_filename()</td></tr>
#if $db.db_supports_backup
<tr><td class="infoTableHeader">Database backups:</td><td class="infoTableCell">$backup_db_path</td></tr>
#end if

View file

@ -13,7 +13,6 @@
#from sickgear.sgdatetime import *
<% def sg_var(varname, default=False): return getattr(sickgear, varname, default) %>#slurp#
<% def sg_str(varname, default=''): return getattr(sickgear, varname, default) %>#slurp#
#from _23 import list_keys
##
#set global $title = 'Config - General'
#set global $header = 'General Settings'
@ -846,7 +845,7 @@
<span class="component-title">File logging level:</span>
<span class="component-desc">
<select id="file_logging_presets" name="file_logging_preset" class="form-control input-sm">
#set $levels = $list_keys(file_logging_presets)
#set $levels = $list(file_logging_presets)
#set void = $levels.sort(key=lambda x: $file_logging_presets[$x])
#set $level_count = len($levels)
#for $level in $levels

View file

@ -36,12 +36,12 @@
<!--
\$(document).ready(function(){
#if $sickgear.USE_NZBS
#for $cur_newznab_provider in $sickgear.newznabProviderList:
#for $cur_newznab_provider in $sickgear.newznab_providers:
\$(this).addProvider('$cur_newznab_provider.get_id()', '$cur_newznab_provider.name', '$cur_newznab_provider.url', '<%= starify(cur_newznab_provider.key) %>', '$cur_newznab_provider.cat_ids', $int($cur_newznab_provider.default), !0);
#end for
#end if
#if $sickgear.USE_TORRENTS
#for $cur_torrent_rss_provider in $sickgear.torrentRssProviderList:
#for $cur_torrent_rss_provider in $sickgear.torrent_rss_providers:
\$(this).addTorrentRssProvider('$cur_torrent_rss_provider.get_id()', '$cur_torrent_rss_provider.name', '$cur_torrent_rss_provider.url', '<%= starify(cur_torrent_rss_provider.cookies) %>');
#end for
#end if
@ -101,7 +101,7 @@
<ul id="provider_order_list" class="provider_order_panel">
#for $cur_provider in [$x for $x in $sickgear.providers.sortedProviderList()
#for $cur_provider in [$x for $x in $sickgear.providers.sorted_sources()
if $x.providerType == $GenericProvider.NZB and $sickgear.USE_NZBS or
$x.providerType == $GenericProvider.TORRENT and $sickgear.USE_TORRENTS]
#set $cur_name = $cur_provider.get_id()
@ -129,7 +129,7 @@
#end for
</ul>
<input type="hidden" name="provider_order" id="provider_order" value="<%=' '.join([x.get_id()+':'+str(int(x.is_enabled())) for x in sickgear.providers.sortedProviderList()])%>"/>
<input type="hidden" name="provider_order" id="provider_order" value="<%=' '.join([x.get_id()+':'+str(int(x.is_enabled())) for x in sickgear.providers.sorted_sources()])%>"/>
#if $sickgear.USE_NZBS or $sickgear.USE_TORRENTS
<div id="provider_key">
<span style="float:left;font-size:10px;vertical-align:top;font-weight:normal">(PA)</span><p class="note">Public access, no account required</p>
@ -168,7 +168,7 @@
<span class="component-desc">
#set $provider_config_list_enabled = []
#set $provider_config_list = []
#for $cur_provider in [$x for $x in $sickgear.providers.sortedProviderList()
#for $cur_provider in [$x for $x in $sickgear.providers.sorted_sources()
if $x.providerType == $GenericProvider.NZB and $sickgear.USE_NZBS or
$x.providerType == $GenericProvider.TORRENT and $sickgear.USE_TORRENTS]
#if $cur_provider.is_enabled()
@ -213,7 +213,7 @@
#set $filter_scene_rej_nuked_desc = 'not scene nuked'
#set $filter_scene_nuked_active_desc = 'nuked if no active search results'
#set $filter_tip = 'nothing selected allows everything (i.e. no filtering, default)'
#for $cur_newznab_provider in [$cur_provider for $cur_provider in $sickgear.newznabProviderList]
#for $cur_newznab_provider in [$cur_provider for $cur_provider in $sickgear.newznab_providers]
<div class="providerDiv" id="${cur_newznab_provider.get_id()}Div">
#set $can_recent = $hasattr($cur_newznab_provider, 'enable_recentsearch')
#set $can_backlog = $hasattr($cur_newznab_provider, 'enable_backlog')
@ -345,8 +345,8 @@
##
##
#for $cur_nzb_provider in [$cur_provider for $cur_provider in $sickgear.providers.sortedProviderList()
if $cur_provider.providerType == $GenericProvider.NZB and $cur_provider not in $sickgear.newznabProviderList]:
#for $cur_nzb_provider in [$cur_provider for $cur_provider in $sickgear.providers.sorted_sources()
if $cur_provider.providerType == $GenericProvider.NZB and $cur_provider not in $sickgear.newznab_providers]:
<div class="providerDiv" id="${cur_nzb_provider.get_id()}Div">
#set $can_recent = $hasattr($cur_nzb_provider, 'enable_recentsearch')
#set $can_backlog = $hasattr($cur_nzb_provider, 'enable_backlog')
@ -488,7 +488,7 @@
##
##
#for $cur_torrent_provider in $sickgear.USE_TORRENTS and [$cur_provider for $cur_provider in $sickgear.providers.sortedProviderList()
#for $cur_torrent_provider in $sickgear.USE_TORRENTS and [$cur_provider for $cur_provider in $sickgear.providers.sorted_sources()
if $cur_provider.providerType == $GenericProvider.TORRENT] or []:
<div class="providerDiv" id="${cur_torrent_provider.get_id()}Div">
#if callable(getattr(cur_torrent_provider, 'ui_string', None))

View file

@ -319,7 +319,7 @@
</div>
#end if
#set $anyQualities, $bestQualities = $Quality.splitQuality(int($show_obj.quality))
#set $anyQualities, $bestQualities = $Quality.split_quality(int($show_obj.quality))
#if $show_obj.quality in $qualityPresets
<div>
<span class="details-title">Quality</span>

View file

@ -202,7 +202,7 @@
<div class="field-pair">
#set $qualities = $common.Quality.splitQuality(int($show_obj.quality))
#set $qualities = $common.Quality.split_quality(int($show_obj.quality))
#set global $any_qualities = $qualities[0]
#set global $best_qualities = $qualities[1]
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_qualityChooser.tmpl')

View file

@ -133,7 +133,7 @@
<tbody>
#for $hItem in $history_results
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($hItem['action']))
#set $curStatus, $curQuality = $Quality.split_composite_status(int($hItem['action']))
#set $display_name = '<span data-sort="%s">%s - S%02iE%02i</span>' % (
$hItem['data_name'],
(('<span class="article">%s</span> %s' % ($hItem['name1'], $hItem['name2'])), $hItem['show_name'])[$sg_var('SORT_ARTICLE') or not $hItem['name1']],
@ -141,7 +141,7 @@
<tr>
#set $curdatetime = $datetime.datetime.strptime(str($hItem['date']), $history.dateFormat)
<td><div class="${fuzzydate}" data-sort="$time.mktime($curdatetime.timetuple())">$SGDatetime.sbfdatetime($curdatetime, show_seconds=True)</div></td>
<td class="tvShow"><a href="$sbRoot/home/view-show?tvid_prodid=$hItem['tvid_prodid']#season-$hItem['season']">$display_name#if $Quality.splitCompositeStatus($hItem['action'])[0] == $SNATCHED_PROPER then ' <span class="quality Proper">Proper</span>' else ''#</a></td>
<td class="tvShow"><a href="$sbRoot/home/view-show?tvid_prodid=$hItem['tvid_prodid']#season-$hItem['season']">$display_name#if $Quality.split_composite_status($hItem['action'])[0] == $SNATCHED_PROPER then ' <span class="quality Proper">Proper</span>' else ''#</a></td>
<td#echo ('', ' class="subtitles_column"')[$SUBTITLED == $curStatus]#>
#if $SUBTITLED == $curStatus
<img width="16" height="11" src="$sbRoot/images/flags/<%= hItem["resource"][len(hItem["resource"])-6:len(hItem["resource"])-4] + '.png' %>">
@ -156,7 +156,7 @@
#else
#if '-1' != $hItem['provider'] and len($hItem['provider'])
#if $curStatus in $SNATCHED_ANY + [$FAILED]
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($hItem['provider']))
#set $provider = $providers.get_by_id($generic.GenericProvider.make_id($hItem['provider']))
#if None is not $provider
<img src="$sbRoot/images/providers/<%= provider.image_name() %>" width="16" height="16" /><span>$provider.name</span>
#else
@ -207,10 +207,10 @@
#set $order = 1
#set $ordinal_indicators = {'1':'st', '2':'nd', '3':'rd'}
#for $action in reversed($hItem['actions'])
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($action['action']))
#set $curStatus, $curQuality = $Quality.split_composite_status(int($action['action']))
#set $basename = $os.path.basename($action['resource'])
#if $curStatus in $SNATCHED_ANY + [$FAILED]
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($action['provider']))
#set $provider = $providers.get_by_id($generic.GenericProvider.make_id($action['provider']))
#if None is not $provider
#set $prov_list += ['<span%s><img class="help" src="%s/images/providers/%s" width="16" height="16" alt="%s" title="%s.. %s: %s" /></span>'\
% (('', ' class="fail"')[$FAILED == $curStatus], $sbRoot, $provider.image_name(), $provider.name,
@ -262,7 +262,7 @@
#if $sg_var('USE_SUBTITLES')
<td>
#for $action in reversed($hItem['actions'])
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($action['action']))
#set $curStatus, $curQuality = $Quality.split_composite_status(int($action['action']))
#if $SUBTITLED == $curStatus
<img src="$sbRoot/images/subtitles/<%= action['provider'] + '.png' %>" width="16" height="16" alt="$action['provider']" title="<%= action['provider'].capitalize() %>:$os.path.basename($action['resource'])" />
<span> / </span>
@ -575,7 +575,7 @@
#for $hItem in $stat_results
<tr>
<td class="provider text-nowrap">
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($hItem['provider']))
#set $provider = $providers.get_by_id($generic.GenericProvider.make_id($hItem['provider']))
#if None is not $provider
<img src="$sbRoot/images/providers/<%= provider.image_name() %>" width="16" height="16"><span data-sort="$hItem['provider']">$provider.name</span>
#else
@ -628,7 +628,7 @@
</thead>
#set global $row = 0
<tbody>
#for $cur_provider in $sorted($sickgear.newznabProviderList, key=lambda x: x.last_recent_search or SGDatetime(2000,1,1), reverse=True)
#for $cur_provider in $sorted($sickgear.newznab_providers, key=lambda x: x.last_recent_search or SGDatetime(2000,1,1), reverse=True)
#set $last_rls_date = '-'
#set $last_rls_age = None
#set $last_rls_age_str = '-'

View file

@ -18,7 +18,7 @@
</div>
<div class="field-pair">
#set $qualities = $Quality.splitQuality($sg_var('QUALITY_DEFAULT', SD))
#set $qualities = $Quality.split_quality($sg_var('QUALITY_DEFAULT', SD))
#set global $any_qualities = $qualities[0]
#set global $best_qualities = $qualities[1]
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_qualityChooser.tmpl')

View file

@ -25,7 +25,7 @@
#set $ep_str = '%sx%s' % $ep_key
#set $epLoc = $ep['location']
#set never_aired = 0 < int($ep['season']) and 1 == int($ep['airdate'])
<tr class="#echo ' '.join([$Overview.overviewStrings[$ep_cats[$ep_str]], ('', 'airdate-never')[$never_aired], ('', 'archived')[$ARCHIVED == $Quality.splitCompositeStatus(int($ep['status']))[0]]])#">
<tr class="#echo ' '.join([$Overview.overviewStrings[$ep_cats[$ep_str]], ('', 'airdate-never')[$never_aired], ('', 'archived')[$ARCHIVED == $Quality.split_composite_status(int($ep['status']))[0]]])#">
<td class="col-checkbox">
<input type="checkbox" class="epCheck #echo 'hide' if $UNAIRED == int($ep['status']) else ''#" id="$ep_str" name="$ep_str">
</td>
@ -99,7 +99,7 @@
</td>
#end if
#slurp
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($ep['status']))
#set $curStatus, $curQuality = $Quality.split_composite_status(int($ep['status']))
#if Quality.NONE != $curQuality
<td class="col-status">#if $SUBTITLED == $curStatus#<span class="addQTip" title="$statusStrings[$curStatus]"><i class="sgicon-subtitles" style="vertical-align:middle"></i></span>#else#$statusStrings[$curStatus].replace('Downloaded', '')#end if# #if 'Unknown' != $statusStrings[$curStatus]#<span class="quality $Quality.get_quality_css($curQuality)#if $downloaded# addQTip" title="$downloaded#end if#">$Quality.get_quality_ui($curQuality)</span>#end if#</td>
#else
@ -107,7 +107,7 @@
#end if
<td class="col-search">
#if 0 != int($ep['season'])
#set $status = $Quality.splitCompositeStatus(int($ep['status']))[0]
#set $status = $Quality.split_composite_status(int($ep['status']))[0]
#if ($status in $SNATCHED_ANY + [$DOWNLOADED, $ARCHIVED]) and $sg_var('USE_FAILED_DOWNLOADS')
<a class="ep-retry" href="$sbRoot/home/episode-retry?tvid_prodid=$show_obj.tvid_prodid&amp;season=$ep['season']&amp;episode=$ep['episode']"><img src="$sbRoot/images/search16.png" height="16" alt="retry" title="Retry download"></a>
#else

View file

@ -1,12 +1,11 @@
#import sickgear
#from sickgear.common import Quality, qualityPresets, qualityPresetStrings
#from _23 import filter_list
##
#set $html_checked = ' checked="checked"'
#set $html_selected = ' selected="selected"'
<div class="field-pair">
<label for="quality-preset" class="clearfix">
#set $overall_quality = $Quality.combineQualities($any_qualities, $best_qualities)
#set $overall_quality = $Quality.combine_qualities($any_qualities, $best_qualities)
<span class="component-title input">Quality to download</span>
<span class="component-desc">
#set $selected = None
@ -35,7 +34,7 @@
<span id="wanted-quality" class="component-desc">
<p>select one or more qualities; the best one found when searching will be used</p>
#set $any_quality_list = filter_list(lambda x: x > $Quality.NONE and x < $Quality.UNKNOWN, $Quality.qualityStrings)
#set $any_quality_list = list(filter(lambda x: x > $Quality.NONE and x < $Quality.UNKNOWN, $Quality.qualityStrings))
#set $has_unknown = False
#for $cur_quality in sorted($any_quality_list):
##set $has_unknown |= ($Quality.UNKNOWN == $cur_quality and $cur_quality in $any_qualities)
@ -62,7 +61,7 @@
</div>
<span id="upgrade-quality" class="component-desc">
<p>optional, upgrade existing media to any selected quality</p>
#set $best_quality_list = filter_list(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings)
#set $best_quality_list = list(filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings))
#for $cur_quality in sorted($best_quality_list):
<a href="#" data-quality="$cur_quality" class="btn btn-inverse dark-bg#echo ('', ' active')[$cur_quality in $best_qualities]#" role="button"><i class="icon-glyph searchadd"></i>$Quality.get_quality_ui($cur_quality)</a>
#if $cur_quality in [$Quality.SDDVD, $Quality.FULLHDTV, $Quality.FULLHDBLURAY]
@ -85,7 +84,7 @@
<span class="component-desc bfr">
<div style="float:left;padding-right:28px">
<h4 class="jumbo">Wanted</h4>
#set $any_quality_list = filter_list(lambda x: x > $Quality.NONE, $Quality.qualityStrings)
#set $any_quality_list = list(filter(lambda x: x > $Quality.NONE, $Quality.qualityStrings))
<select id="wanted-qualities" name="any_qualities" multiple="multiple" size="$len($any_quality_list)" class="form-control form-control-inline input-sm">
#for $cur_quality in sorted($any_quality_list):
@ -96,7 +95,7 @@
<div style="float:left;padding-right:20px">
<h4 class="jumbo">Upgrade to</h4>
#set $best_quality_list = filter_list(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings)
#set $best_quality_list = list(filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings))
<select id="upgrade-qualities" name="best_qualities" multiple="multiple" size="$len($best_quality_list)" class="form-control form-control-inline input-sm">
#for $cur_quality in sorted($best_quality_list):
<option value="$cur_quality"#echo ('', $html_selected)[$cur_quality in $best_qualities]#>$Quality.get_quality_ui($cur_quality)</option>

View file

@ -222,7 +222,7 @@
#for item in $history_compact
#if 'tvid_prodid' in $item
#set $action = $item['actions'][0]
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($action['action']))
#set $curStatus, $curQuality = $Quality.split_composite_status(int($action['action']))
#set $status = None
#if $curStatus in $SNATCHED_ANY + [$FAILED]
#set $status = 'snatched'

View file

@ -62,7 +62,7 @@
<tbody>
#set $order = $oldest
#for $hItem in $failed_results[::-1]
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($hItem['provider']))
#set $provider = $providers.get_by_id($generic.GenericProvider.make_id($hItem['provider']))
#set $provider_name = None is not $provider and $provider.name or 'missing provider'
#set $provider_image = None is not $provider and $provider.image_name() or 'missing.png'
<tr>

View file

@ -2,7 +2,6 @@
#from sickgear.common import Quality, qualityPresets, qualityPresetStrings, SD
#from sickgear.indexers.indexer_config import TVINFO_TVMAZE, TVINFO_TVDB
#from lib import exceptions_helper as exceptions
#from _23 import filter_list
<% def sg_var(varname, default=False): return getattr(sickgear, varname, default) %>#slurp#
<% def sg_str(varname, default=''): return getattr(sickgear, varname, default) %>#slurp#
##
@ -19,7 +18,7 @@
#else:
#set $initial_quality = $SD
#end if
#set $anyQualities, $bestQualities = $Quality.splitQuality($sg_var('QUALITY_DEFAULT', $initial_quality))
#set $anyQualities, $bestQualities = $Quality.split_quality($sg_var('QUALITY_DEFAULT', $initial_quality))
<script type="text/javascript" src="$sbRoot/js/qualityChooser.js?v=$sbPID"></script>
<script type="text/javascript" src="$sbRoot/js/massEdit.js?v=$sbPID"></script>
@ -69,7 +68,7 @@
<div id="custom-quality" class="show-if-quality-custom">
<div class="manageCustom pull-left">
<h4 style="font-size:14px">Initial</h4>
#set $anyQualityList = filter_list(lambda x: x > $Quality.NONE, $Quality.qualityStrings)
#set $anyQualityList = list(filter(lambda x: x > $Quality.NONE, $Quality.qualityStrings))
<select id="wanted-qualities" name="any_qualities" multiple="multiple" size="$len($anyQualityList)">
#for $curQuality in sorted($anyQualityList):
<option value="$curQuality" #if $curQuality in $anyQualities then $selected else ''#>$Quality.get_quality_ui($curQuality)</option>
@ -78,7 +77,7 @@
</div>
<div class="manageCustom pull-left">
<h4 style="font-size:14px">Upgrade to</h4>
#set $bestQualityList = filter_list(lambda x: x > $Quality.SDTV, $Quality.qualityStrings)
#set $bestQualityList = list(filter(lambda x: x > $Quality.SDTV, $Quality.qualityStrings))
<select id="upgrade-qualities" name="best_qualities" multiple="multiple" size="$len($bestQualityList)">
#for $curQuality in sorted($bestQualityList):
<option value="$curQuality" #if $curQuality in $bestQualities then $selected else ''#>$Quality.get_quality_ui($curQuality)</option>

View file

@ -3,7 +3,6 @@
#from sickgear.common import *
#from sickgear.logger import reverseNames
#from sickgear.helpers import maybe_plural
#from _23 import list_keys
##
#set global $header = 'Log File'
#set global $title = 'Logs'
@ -23,7 +22,7 @@
<div class="h2footer pull-right">
<select name="minLevel" id="minLevel" class="form-control form-control-inline input-sm pull-right">
#set $levels = $list_keys($reverseNames)
#set $levels = $list($reverseNames)
#set void = $levels.sort(key=lambda x: $reverseNames[$x])
#set $level_count = len($levels)
#for $level in $levels

View file

@ -15,12 +15,25 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import datetime
from base64 import encodebytes as b64encodebytes
from collections import deque
from itertools import islice
# noinspection PyUnresolvedReferences
from configparser import ConfigParser
# noinspection PyUnresolvedReferences
from enum import Enum
from itertools import islice, zip_longest
# noinspection PyUnresolvedReferences
from inspect import getfullargspec as getargspec
# noinspection PyUnresolvedReferences
from os import scandir, DirEntry
# noinspection PyUnresolvedReferences
from subprocess import Popen
from sys import version_info
from six import binary_type, moves
import datetime
# noinspection PyUnresolvedReferences, PyPep8Naming
import xml.etree.ElementTree as etree
# noinspection PyUnresolvedReferences
from six.moves.urllib.parse import quote, quote_plus, unquote as six_unquote, unquote_plus as six_unquote_plus, \
urlencode, urlsplit, urlunparse, urlunsplit
@ -40,30 +53,24 @@ if False:
# noinspection PyTypeChecker
urlencode = urlsplit = urlunparse = urlunsplit = None # type: Callable
PY38 = version_info[0:2] >= (3, 8)
""" one off consumables (Iterators) """
filter_iter = moves.filter # type: Callable[[Callable, Iterable], Iterator]
map_iter = moves.map # type: Callable[[Callable, ...], Iterator]
def map_consume(*args):
# type: (...) -> None
"""Run a lambda over elements without returning anything"""
deque(moves.map(*args), maxlen=0)
deque(map(*args), maxlen=0)
def consume(iterator, n=None):
# type: (Iterator, Optional[int]) -> None
"""Advance the iterator n-steps ahead. If n is None, consume entirely. Returns nothing.
Useful if a method returns a Iterator but it's not used, but still all should be called,
Useful if a method returns an Iterator that is not used, but still all should be called,
for example if each iter element calls a function that should be called for all or
given amount of elements in Iterator
examples:
consume(filter_iter(...)) # consumes all elements of given function that returns a Iterator
consume(filter_iter(...), 3) # consumes next 3 elements of given function that returns a Iterator
consume(filter_iter(...)) # consumes all elements of given function that returns an Iterator
consume(filter_iter(...), 3) # consumes next 3 elements of given function that returns an Iterator
"""
# Use functions that consume iterators at C speed.
if n is None:
@ -76,7 +83,7 @@ def consume(iterator, n=None):
def decode_str(s, encoding='utf-8', errors=None):
# type: (...) -> AnyStr
if isinstance(s, binary_type):
if isinstance(s, bytes):
if None is errors:
return s.decode(encoding)
return s.decode(encoding, errors)
@ -99,7 +106,7 @@ def html_unescape(s):
def list_range(*args, **kwargs):
# type: (...) -> List
return list(moves.range(*args, **kwargs))
return list(range(*args, **kwargs))
def urlparse(url, scheme='', allow_fragments=True):
@ -135,181 +142,26 @@ def b64encodestring(s, keep_eol=False):
return data.rstrip()
if 2 != version_info[0]:
# ---------
# Python 3+
# ---------
# noinspection PyUnresolvedReferences,PyProtectedMember
from base64 import decodebytes, encodebytes
b64decodebytes = decodebytes
b64encodebytes = encodebytes
# noinspection PyUnresolvedReferences,PyCompatibility
from configparser import ConfigParser
# noinspection PyUnresolvedReferences
from enum import Enum
# noinspection PyUnresolvedReferences
from os import scandir, DirEntry
# noinspection PyUnresolvedReferences
from itertools import zip_longest
# noinspection PyUnresolvedReferences
from inspect import getfullargspec as getargspec
native_timestamp = datetime.datetime.timestamp # type: Callable[[datetime.datetime], float]
# noinspection PyUnresolvedReferences
from subprocess import Popen
# noinspection PyUnresolvedReferences, PyPep8Naming
import xml.etree.ElementTree as etree
def unquote(string, encoding='utf-8', errors='replace'):
return decode_str(six_unquote(decode_str(string, encoding, errors), encoding=encoding, errors=errors),
encoding, errors)
ordered_dict = dict
native_timestamp = datetime.datetime.timestamp # type: Callable[[datetime.datetime], float]
def unquote_plus(string, encoding='utf-8', errors='replace'):
return decode_str(six_unquote_plus(decode_str(string, encoding, errors), encoding=encoding, errors=errors),
encoding, errors)
def unquote(string, encoding='utf-8', errors='replace'):
return decode_str(six_unquote(decode_str(string, encoding, errors), encoding=encoding, errors=errors),
encoding, errors)
def unquote_plus(string, encoding='utf-8', errors='replace'):
return decode_str(six_unquote_plus(decode_str(string, encoding, errors), encoding=encoding, errors=errors),
encoding, errors)
def decode_bytes(d, encoding='utf-8', errors='replace'):
if not isinstance(d, bytes):
# noinspection PyArgumentList
return bytes(d, encoding=encoding, errors=errors)
return d
def decode_bytes(d, encoding='utf-8', errors='replace'):
if not isinstance(d, binary_type):
# noinspection PyArgumentList
return bytes(d, encoding=encoding, errors=errors)
return d
def filter_list(*args):
# type: (...) -> List
return list(filter(*args))
def list_items(d):
# type: (Dict) -> List[Tuple[Any, Any]]
"""
equivalent to python 2 .items()
"""
return list(d.items())
def list_keys(d):
# type: (Dict) -> List
"""
equivalent to python 2 .keys()
"""
return list(d)
def list_values(d):
# type: (Dict) -> List
"""
equivalent to python 2 .values()
"""
return list(d.values())
def map_list(*args):
# type: (...) -> List
return list(map(*args))
def map_none(*args):
# type: (...) -> List
return list(zip_longest(*args))
def unidecode(data):
# type: (AnyStr) -> AnyStr
return data
else:
# ---------
# Python 2
# ---------
import time
from lib.unidecode import unidecode as unicode_decode
# noinspection PyProtectedMember,PyDeprecation
from base64 import decodestring, encodestring
# noinspection PyDeprecation
b64decodebytes = decodestring
# noinspection PyDeprecation
b64encodebytes = encodestring
# noinspection PyUnresolvedReferences
from lib.backports.configparser import ConfigParser
# noinspection PyUnresolvedReferences
from lib.enum34 import Enum
# noinspection PyProtectedMember,PyUnresolvedReferences
from lib.scandir.scandir import scandir, GenericDirEntry as DirEntry
# noinspection PyUnresolvedReferences,PyDeprecation
from inspect import getargspec
try:
# noinspection PyPep8Naming
import xml.etree.cElementTree as etree
except ImportError:
# noinspection PyPep8Naming
import xml.etree.ElementTree as etree
from collections import OrderedDict
ordered_dict = OrderedDict
def _totimestamp(dt=None):
# type: (datetime.datetime) -> float
""" This function should only be used in this module due to its 1970s+ limitation as that's all we need here and
sgdatatime can't be used at this module level
"""
return time.mktime(dt.timetuple())
native_timestamp = _totimestamp # type: Callable[[datetime.datetime], float]
from subprocess import Popen as _Popen
class Popen(_Popen):
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
for x in filter_iter(lambda y: y, [self.stdout, self.stderr, self.stdin]):
x.close()
self.wait()
def unquote(string, encoding='utf-8', errors='replace'):
return decode_str(six_unquote(decode_str(string, encoding, errors)), encoding, errors)
def unquote_plus(string, encoding='utf-8', errors='replace'):
return decode_str(six_unquote_plus(decode_str(string, encoding, errors)), encoding, errors)
# noinspection PyUnusedLocal
def decode_bytes(d, encoding='utf-8', errors='replace'):
if not isinstance(d, binary_type):
return bytes(d)
return d
def filter_list(*args):
# type: (...) -> List
# noinspection PyTypeChecker
return filter(*args)
def list_items(d):
# type: (Dict) -> List[Tuple[Any, Any]]
# noinspection PyTypeChecker
return d.items()
def list_keys(d):
# type: (Dict) -> List
# noinspection PyTypeChecker
return d.keys()
def list_values(d):
# type: (Dict) -> List
# noinspection PyTypeChecker
return d.values()
def map_list(*args):
# type: (...) -> List
# noinspection PyTypeChecker
return map(*args)
def map_none(*args):
# type: (...) -> List
# noinspection PyTypeChecker
return map(None, *args)
def unidecode(data):
# type: (AnyStr) -> AnyStr
# noinspection PyUnresolvedReferences
return isinstance(data, unicode) and unicode_decode(data) or data
def map_none(*args):
# type: (...) -> List
return list(zip_longest(*args))

View file

@ -22,7 +22,7 @@ import threading
from datetime import timedelta
from time import sleep, time
from _23 import ConfigParser
from configparser import ConfigParser
from .aniDBlink import AniDBLink
from .aniDBcommands import *

View file

@ -21,7 +21,6 @@ from lib.tvinfo_base import CastList, PersonGenders, RoleTypes, \
from json_helper import json_dumps
from sg_helpers import clean_data, get_url, iterate_chunk, try_int
from _23 import filter_list
from six import iteritems
# noinspection PyUnreachableCode
@ -682,12 +681,12 @@ class TmdbIndexer(TVInfoBase):
season_cast_obj['id'] for season_cast_obj in
season_data[season_obj[0]].get('cast') or []])
for person_obj in sorted(filter_list(lambda a: a['id'] in main_cast_ids,
show_data['aggregate_credits']['cast'] or [])[:50],
for person_obj in sorted(list(filter(lambda a: a['id'] in main_cast_ids,
show_data['aggregate_credits']['cast'] or []))[:50],
key=lambda c: (main_cast_ids.get(c['id'], 0) or 0,
c['total_episode_count'], c['order'] * -1), reverse=True):
for character in sorted(filter_list(lambda b: b['credit_id'] in main_cast_credit_ids,
person_obj.get('roles', []) or []),
for character in sorted(list(filter(lambda b: b['credit_id'] in main_cast_credit_ids,
person_obj.get('roles', []) or [])),
key=lambda c: c['episode_count'], reverse=True):
character_obj = TVInfoCharacter(
name=clean_data(character['character']),

View file

@ -39,7 +39,6 @@ from lib.tvinfo_base import CastList, TVInfoCharacter, CrewList, TVInfoPerson, R
from .tvdb_exceptions import TvdbError, TvdbShownotfound, TvdbTokenexpired
from .tvdb_ui import BaseUI, ConsoleUI
from _23 import filter_list, list_keys, list_values, map_list
from six import integer_types, iteritems, PY2, string_types
# noinspection PyUnreachableCode
@ -290,7 +289,7 @@ class Tvdb(TVInfoBase):
'nl': 'nld', 'no': 'nor',
'pl': 'pol', 'pt': 'pot', 'ru': 'rus', 'sk': 'slv', 'sv': 'swe', 'zh': 'zho', '_1': 'srp',
}
self.config['valid_languages_3'] = list_values(self.config['langabbv_23'])
self.config['valid_languages_3'] = list(self.config['langabbv_23'].values())
# TheTvdb.com should be based around numeric language codes,
# but to link to a series like http://thetvdb.com/?tab=series&id=79349&lid=16
@ -358,7 +357,7 @@ class Tvdb(TVInfoBase):
else:
d_m = shows
if d_m:
results = map_list(map_data, [d_m['data']])
results = list(map(map_data, [d_m['data']]))
if ids.get(TVINFO_TVDB_SLUG):
cache_id_key = 's-id-%s-%s' % (TVINFO_TVDB, ids[TVINFO_TVDB_SLUG])
is_none, shows = self._get_cache_entry(cache_id_key)
@ -373,7 +372,7 @@ class Tvdb(TVInfoBase):
if d_m:
for r in d_m:
if ids.get(TVINFO_TVDB_SLUG) == r['slug']:
results = map_list(map_data, [r])
results = list(map(map_data, [r]))
break
if name:
for n in ([name], name)[isinstance(name, list)]:
@ -390,7 +389,7 @@ class Tvdb(TVInfoBase):
if r:
if not isinstance(r, list):
r = [r]
results.extend(map_list(map_data, r))
results.extend(list(map(map_data, r)))
seen = set()
results = [seen.add(r['id']) or r for r in results if r['id'] not in seen]
@ -613,8 +612,8 @@ class Tvdb(TVInfoBase):
# type: (int, Optional[str]) -> Optional[dict]
results = self.search_tvs(sid, language=language)
for cur_result in (isinstance(results, dict) and results.get('results') or []):
result = filter_list(lambda r: 'series' == r['type'] and sid == r['id'],
cur_result.get('nbHits') and cur_result.get('hits') or [])
result = list(filter(lambda r: 'series' == r['type'] and sid == r['id'],
cur_result.get('nbHits') and cur_result.get('hits') or []))
if 1 == len(result):
result[0]['overview'] = self.clean_overview(
result[0]['overviews'][self.config['langabbv_23'].get(language) or 'eng'])
@ -627,7 +626,7 @@ class Tvdb(TVInfoBase):
# notify of new keys
if ENV.get('SG_DEV_MODE'):
new_keys = set(list_keys(result[0])).difference({
new_keys = set(list(result[0])).difference({
'_highlightResult', 'aliases', 'banner',
'fanart', 'firstaired', 'follower_count',
'id', 'image', 'is_tvdb_searchable', 'is_tvt_searchable',
@ -788,7 +787,7 @@ class Tvdb(TVInfoBase):
series_found = self._getetsrc(self.config['url_search_series'], params=self.config['params_search_series'],
language=self.config['language'])
if series_found:
return list_values(series_found)[0]
return list(series_found.values())[0]
except (BaseException, Exception):
pass
@ -899,15 +898,15 @@ class Tvdb(TVInfoBase):
try:
for cur_result in (isinstance(results, dict) and results.get('results') or []):
# sorts 'banners/images/missing/' to last before filter
people = filter_list(
people = list(filter(
lambda r: 'person' == r['type']
and rc_clean.sub(name, '') == rc_clean.sub(r['name'], ''),
cur_result.get('nbHits')
and sorted(cur_result.get('hits'),
key=lambda x: len(x['image']), reverse=True) or [])
key=lambda x: len(x['image']), reverse=True) or []))
if ENV.get('SG_DEV_MODE'):
for person in people:
new_keys = set(list_keys(person)).difference({
new_keys = set(list(person)).difference({
'_highlightResult', 'banner', 'id', 'image',
'is_tvdb_searchable', 'is_tvt_searchable', 'name',
'objectID', 'people_birthdate', 'people_died',

View file

@ -27,7 +27,6 @@ from lib.tvinfo_base import TVInfoBase, TVInfoImage, TVInfoImageSize, TVInfoImag
crew_type_names, TVInfoPerson, RoleTypes, TVInfoShow, TVInfoEpisode, TVInfoIDs, TVInfoNetwork, TVInfoSeason, \
PersonGenders, TVINFO_TVMAZE, TVINFO_TVDB, TVINFO_IMDB
from _23 import filter_iter
from six import integer_types, iteritems, string_types
# noinspection PyUnreachableCode
@ -683,7 +682,7 @@ class TvMaze(TVInfoBase):
premieres = []
returning = []
rc_lang = re.compile('(?i)eng|jap')
for cur_show in filter_iter(lambda s: 1 == s.episode_number and (
for cur_show in filter(lambda s: 1 == s.episode_number and (
None is s.show.language or rc_lang.search(s.show.language)), schedule):
if 1 == cur_show.season_number:
premieres += [cur_show]

File diff suppressed because it is too large Load diff

View file

@ -1,274 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import abc
import os
try:
from collections.abc import MutableMapping
except ImportError:
from collections import MutableMapping
try:
from collections import UserDict
except ImportError:
from UserDict import UserDict
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
try:
import pathlib
except ImportError:
pathlib = None
from io import open
import sys
try:
from thread import get_ident
except ImportError:
try:
from _thread import get_ident
except ImportError:
from _dummy_thread import get_ident
__all__ = ['UserDict', 'OrderedDict', 'open']
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
native_str = str
str = type('str')
def from_none(exc):
"""raise from_none(ValueError('a')) == raise ValueError('a') from None"""
exc.__cause__ = None
exc.__suppress_context__ = True
return exc
# from reprlib 3.2.1
def recursive_repr(fillvalue='...'):
'Decorator to make a repr function return fillvalue for a recursive call'
def decorating_function(user_function):
repr_running = set()
def wrapper(self):
key = id(self), get_ident()
if key in repr_running:
return fillvalue
repr_running.add(key)
try:
result = user_function(self)
finally:
repr_running.discard(key)
return result
# Can't use functools.wraps() here because of bootstrap issues
wrapper.__module__ = getattr(user_function, '__module__')
wrapper.__doc__ = getattr(user_function, '__doc__')
wrapper.__name__ = getattr(user_function, '__name__')
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
return wrapper
return decorating_function
# from collections 3.2.1
class _ChainMap(MutableMapping):
''' A ChainMap groups multiple dicts (or other mappings) together
to create a single, updateable view.
The underlying mappings are stored in a list. That list is public and can
accessed or updated using the *maps* attribute. There is no other state.
Lookups search the underlying mappings successively until a key is found.
In contrast, writes, updates, and deletions only operate on the first
mapping.
'''
def __init__(self, *maps):
'''Initialize a ChainMap by setting *maps* to the given mappings.
If no mappings are provided, a single empty dictionary is used.
'''
self.maps = list(maps) or [{}] # always at least one map
def __missing__(self, key):
raise KeyError(key)
def __getitem__(self, key):
for mapping in self.maps:
try:
# can't use 'key in mapping' with defaultdict
return mapping[key]
except KeyError:
pass
# support subclasses that define __missing__
return self.__missing__(key)
def get(self, key, default=None):
return self[key] if key in self else default
def __len__(self):
# reuses stored hash values if possible
return len(set().union(*self.maps))
def __iter__(self):
return iter(set().union(*self.maps))
def __contains__(self, key):
return any(key in m for m in self.maps)
@recursive_repr()
def __repr__(self):
return '{0.__class__.__name__}({1})'.format(
self, ', '.join(map(repr, self.maps))
)
@classmethod
def fromkeys(cls, iterable, *args):
'Create a ChainMap with a single dict created from the iterable.'
return cls(dict.fromkeys(iterable, *args))
def copy(self):
"""
New ChainMap or subclass with a new copy of
maps[0] and refs to maps[1:]
"""
return self.__class__(self.maps[0].copy(), *self.maps[1:])
__copy__ = copy
def new_child(self): # like Django's Context.push()
'New ChainMap with a new dict followed by all previous maps.'
return self.__class__({}, *self.maps)
@property
def parents(self): # like Django's Context.pop()
'New ChainMap from maps[1:].'
return self.__class__(*self.maps[1:])
def __setitem__(self, key, value):
self.maps[0][key] = value
def __delitem__(self, key):
try:
del self.maps[0][key]
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
def popitem(self):
"""
Remove and return an item pair from maps[0].
Raise KeyError is maps[0] is empty.
"""
try:
return self.maps[0].popitem()
except KeyError:
raise KeyError('No keys found in the first mapping.')
def pop(self, key, *args):
"""
Remove *key* from maps[0] and return its value.
Raise KeyError if *key* not in maps[0].
"""
try:
return self.maps[0].pop(key, *args)
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
def clear(self):
'Clear maps[0], leaving maps[1:] intact.'
self.maps[0].clear()
try:
from collections import ChainMap
except ImportError:
ChainMap = _ChainMap
_ABC = getattr(
abc,
'ABC',
# Python 3.3 compatibility
abc.ABCMeta(native_str('__ABC'), (object,), dict(__metaclass__=abc.ABCMeta)),
)
class _PathLike(_ABC):
"""Abstract base class for implementing the file system path protocol."""
@abc.abstractmethod
def __fspath__(self):
"""Return the file system path representation of the object."""
raise NotImplementedError
@classmethod
def __subclasshook__(cls, subclass):
return bool(
hasattr(subclass, '__fspath__')
# workaround for Python 3.5
or pathlib
and issubclass(subclass, pathlib.Path)
)
PathLike = getattr(os, 'PathLike', _PathLike)
def _fspath(path):
"""Return the path representation of a path-like object.
If str or bytes is passed in, it is returned unchanged. Otherwise the
os.PathLike interface is used to get the path representation. If the
path representation is not str or bytes, TypeError is raised. If the
provided path is not str, bytes, or os.PathLike, TypeError is raised.
"""
if isinstance(path, (str, bytes)):
return path
if not hasattr(path, '__fspath__') and isinstance(path, pathlib.Path):
# workaround for Python 3.5
return str(path)
# Work from the object's type to match method resolution of other magic
# methods.
path_type = type(path)
try:
path_repr = path_type.__fspath__(path)
except AttributeError:
if hasattr(path_type, '__fspath__'):
raise
else:
raise TypeError(
"expected str, bytes or os.PathLike object, "
"not " + path_type.__name__
)
if isinstance(path_repr, (str, bytes)):
return path_repr
else:
raise TypeError(
"expected {}.__fspath__() to return str or bytes, "
"not {}".format(path_type.__name__, type(path_repr).__name__)
)
fspath = getattr(os, 'fspath', _fspath)

View file

@ -1,196 +0,0 @@
from __future__ import absolute_import
import functools
from collections import namedtuple
from threading import RLock
_CacheInfo = namedtuple("_CacheInfo", ["hits", "misses", "maxsize", "currsize"])
@functools.wraps(functools.update_wrapper)
def update_wrapper(
wrapper,
wrapped,
assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES,
):
"""
Patch two bugs in functools.update_wrapper.
"""
# workaround for http://bugs.python.org/issue3445
assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr))
wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated)
# workaround for https://bugs.python.org/issue17482
wrapper.__wrapped__ = wrapped
return wrapper
class _HashedSeq(list):
__slots__ = 'hashvalue'
def __init__(self, tup, hash=hash):
self[:] = tup
self.hashvalue = hash(tup)
def __hash__(self):
return self.hashvalue
def _make_key(
args,
kwds,
typed,
kwd_mark=(object(),),
fasttypes=set([int, str, frozenset, type(None)]),
sorted=sorted,
tuple=tuple,
type=type,
len=len,
):
'Make a cache key from optionally typed positional and keyword arguments'
key = args
if kwds:
sorted_items = sorted(kwds.items())
key += kwd_mark
for item in sorted_items:
key += item
if typed:
key += tuple(type(v) for v in args)
if kwds:
key += tuple(type(v) for k, v in sorted_items)
elif len(key) == 1 and type(key[0]) in fasttypes:
return key[0]
return _HashedSeq(key)
def lru_cache(maxsize=100, typed=False): # noqa: C901
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and the cache
can grow without bound.
If *typed* is True, arguments of different types will be cached separately.
For example, f(3.0) and f(3) will be treated as distinct calls with
distinct results.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
"""
# Users should only access the lru_cache through its public API:
# cache_info, cache_clear, and f.__wrapped__
# The internals of the lru_cache are encapsulated for thread safety and
# to allow the implementation to change (including a possible C version).
def decorating_function(user_function):
cache = dict()
stats = [0, 0] # make statistics updateable non-locally
HITS, MISSES = 0, 1 # names for the stats fields
make_key = _make_key
cache_get = cache.get # bound method to lookup key or return None
_len = len # localize the global len() function
lock = RLock() # because linkedlist updates aren't threadsafe
root = [] # root of the circular doubly linked list
root[:] = [root, root, None, None] # initialize by pointing to self
nonlocal_root = [root] # make updateable non-locally
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
if maxsize == 0:
def wrapper(*args, **kwds):
# no caching, just do a statistics update after a successful call
result = user_function(*args, **kwds)
stats[MISSES] += 1
return result
elif maxsize is None:
def wrapper(*args, **kwds):
# simple caching without ordering or size limit
key = make_key(args, kwds, typed)
result = cache_get(
key, root
) # root used here as a unique not-found sentinel
if result is not root:
stats[HITS] += 1
return result
result = user_function(*args, **kwds)
cache[key] = result
stats[MISSES] += 1
return result
else:
def wrapper(*args, **kwds):
# size limited caching that tracks accesses by recency
key = make_key(args, kwds, typed) if kwds or typed else args
with lock:
link = cache_get(key)
if link is not None:
# record recent use of the key by moving it
# to the front of the list
(root,) = nonlocal_root
link_prev, link_next, key, result = link
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
last = root[PREV]
last[NEXT] = root[PREV] = link
link[PREV] = last
link[NEXT] = root
stats[HITS] += 1
return result
result = user_function(*args, **kwds)
with lock:
(root,) = nonlocal_root
if key in cache:
# getting here means that this same key was added to the
# cache while the lock was released. since the link
# update is already done, we need only return the
# computed result and update the count of misses.
pass
elif _len(cache) >= maxsize:
# use the old root to store the new key and result
oldroot = root
oldroot[KEY] = key
oldroot[RESULT] = result
# empty the oldest link and make it the new root
root = nonlocal_root[0] = oldroot[NEXT]
oldkey = root[KEY]
root[KEY] = root[RESULT] = None
# now update the cache dictionary for the new links
del cache[oldkey]
cache[key] = oldroot
else:
# put result in a new link at the front of the list
last = root[PREV]
link = [last, root, key, result]
last[NEXT] = root[PREV] = cache[key] = link
stats[MISSES] += 1
return result
def cache_info():
"""Report cache statistics"""
with lock:
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
def cache_clear():
"""Clear the cache and cache statistics"""
with lock:
cache.clear()
root = nonlocal_root[0]
root[:] = [root, root, None, None]
stats[:] = [0, 0]
wrapper.__wrapped__ = user_function
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return update_wrapper(wrapper, user_function)
return decorating_function

View file

@ -1,204 +0,0 @@
"""The match_hostname() function from Python 3.7.0, essential when using SSL."""
import sys
import socket as _socket
try:
# Divergence: Python-3.7+'s _ssl has this exception type but older Pythons do not
from _ssl import SSLCertVerificationError
CertificateError = SSLCertVerificationError
except:
class CertificateError(ValueError):
pass
__version__ = '3.7.0.1'
# Divergence: Added to deal with ipaddess as bytes on python2
def _to_text(obj):
if isinstance(obj, str) and sys.version_info < (3,):
obj = unicode(obj, encoding='ascii', errors='strict')
elif sys.version_info >= (3,) and isinstance(obj, bytes):
obj = str(obj, encoding='ascii', errors='strict')
return obj
def _to_bytes(obj):
if isinstance(obj, str) and sys.version_info >= (3,):
obj = bytes(obj, encoding='ascii', errors='strict')
elif sys.version_info < (3,) and isinstance(obj, unicode):
obj = obj.encode('ascii', 'strict')
return obj
def _dnsname_match(dn, hostname):
"""Matching according to RFC 6125, section 6.4.3
- Hostnames are compared lower case.
- For IDNA, both dn and hostname must be encoded as IDN A-label (ACE).
- Partial wildcards like 'www*.example.org', multiple wildcards, sole
wildcard or wildcards in labels other then the left-most label are not
supported and a CertificateError is raised.
- A wildcard must match at least one character.
"""
if not dn:
return False
wildcards = dn.count('*')
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
if wildcards > 1:
# Divergence .format() to percent formatting for Python < 2.6
raise CertificateError(
"too many wildcards in certificate DNS name: %s" % repr(dn))
dn_leftmost, sep, dn_remainder = dn.partition('.')
if '*' in dn_remainder:
# Only match wildcard in leftmost segment.
# Divergence .format() to percent formatting for Python < 2.6
raise CertificateError(
"wildcard can only be present in the leftmost label: "
"%s." % repr(dn))
if not sep:
# no right side
# Divergence .format() to percent formatting for Python < 2.6
raise CertificateError(
"sole wildcard without additional labels are not support: "
"%s." % repr(dn))
if dn_leftmost != '*':
# no partial wildcard matching
# Divergence .format() to percent formatting for Python < 2.6
raise CertificateError(
"partial wildcards in leftmost label are not supported: "
"%s." % repr(dn))
hostname_leftmost, sep, hostname_remainder = hostname.partition('.')
if not hostname_leftmost or not sep:
# wildcard must match at least one char
return False
return dn_remainder.lower() == hostname_remainder.lower()
def _inet_paton(ipname):
"""Try to convert an IP address to packed binary form
Supports IPv4 addresses on all platforms and IPv6 on platforms with IPv6
support.
"""
# inet_aton() also accepts strings like '1'
# Divergence: We make sure we have native string type for all python versions
try:
b_ipname = _to_bytes(ipname)
except UnicodeError:
raise ValueError("%s must be an all-ascii string." % repr(ipname))
# Set ipname in native string format
if sys.version_info < (3,):
n_ipname = b_ipname
else:
n_ipname = ipname
if n_ipname.count('.') == 3:
try:
return _socket.inet_aton(n_ipname)
# Divergence: OSError on late python3. socket.error earlier.
# Null bytes generate ValueError on python3(we want to raise
# ValueError anyway), TypeError # earlier
except (OSError, _socket.error, TypeError):
pass
try:
return _socket.inet_pton(_socket.AF_INET6, n_ipname)
# Divergence: OSError on late python3. socket.error earlier.
# Null bytes generate ValueError on python3(we want to raise
# ValueError anyway), TypeError # earlier
except (OSError, _socket.error, TypeError):
# Divergence .format() to percent formatting for Python < 2.6
raise ValueError("%s is neither an IPv4 nor an IP6 "
"address." % repr(ipname))
except AttributeError:
# AF_INET6 not available
pass
# Divergence .format() to percent formatting for Python < 2.6
raise ValueError("%s is not an IPv4 address." % repr(ipname))
def _ipaddress_match(ipname, host_ip):
"""Exact matching of IP addresses.
RFC 6125 explicitly doesn't define an algorithm for this
(section 1.7.2 - "Out of Scope").
"""
# OpenSSL may add a trailing newline to a subjectAltName's IP address
ip = _inet_paton(ipname.rstrip())
return ip == host_ip
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed.
The function matches IP addresses rather than dNSNames if hostname is a
valid ipaddress string. IPv4 addresses are supported on all platforms.
IPv6 addresses are supported on platforms with IPv6 support (AF_INET6
and inet_pton).
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED")
try:
# Divergence: Deal with hostname as bytes
host_ip = _inet_paton(_to_text(hostname))
except ValueError:
# Not an IP address (common case)
host_ip = None
except UnicodeError:
# Divergence: Deal with hostname as byte strings.
# IP addresses should be all ascii, so we consider it not
# an IP address if this fails
host_ip = None
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if host_ip is None and _dnsname_match(value, hostname):
return
dnsnames.append(value)
elif key == 'IP Address':
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")

View file

@ -1,216 +0,0 @@
"""
Patch recently added ABCs into the standard lib module
``collections.abc`` (Py3) or ``collections`` (Py2).
Usage::
import backports_abc
backports_abc.patch()
or::
try:
from collections.abc import Generator
except ImportError:
from backports_abc import Generator
"""
try:
import collections.abc as _collections_abc
except ImportError:
import collections as _collections_abc
def get_mro(cls):
try:
return cls.__mro__
except AttributeError:
return old_style_mro(cls)
def old_style_mro(cls):
yield cls
for base in cls.__bases__:
for c in old_style_mro(base):
yield c
def mk_gen():
from abc import abstractmethod
required_methods = (
'__iter__', '__next__' if hasattr(iter(()), '__next__') else 'next',
'send', 'throw', 'close')
class Generator(_collections_abc.Iterator):
__slots__ = ()
if '__next__' in required_methods:
def __next__(self):
return self.send(None)
else:
def next(self):
return self.send(None)
@abstractmethod
def send(self, value):
raise StopIteration
@abstractmethod
def throw(self, typ, val=None, tb=None):
if val is None:
if tb is None:
raise typ
val = typ()
if tb is not None:
val = val.with_traceback(tb)
raise val
def close(self):
try:
self.throw(GeneratorExit)
except (GeneratorExit, StopIteration):
pass
else:
raise RuntimeError('generator ignored GeneratorExit')
@classmethod
def __subclasshook__(cls, C):
if cls is Generator:
mro = get_mro(C)
for method in required_methods:
for base in mro:
if method in base.__dict__:
break
else:
return NotImplemented
return True
return NotImplemented
generator = type((lambda: (yield))())
Generator.register(generator)
return Generator
def mk_awaitable():
from abc import abstractmethod, ABCMeta
@abstractmethod
def __await__(self):
yield
@classmethod
def __subclasshook__(cls, C):
if cls is Awaitable:
for B in get_mro(C):
if '__await__' in B.__dict__:
if B.__dict__['__await__']:
return True
break
return NotImplemented
# calling metaclass directly as syntax differs in Py2/Py3
Awaitable = ABCMeta('Awaitable', (), {
'__slots__': (),
'__await__': __await__,
'__subclasshook__': __subclasshook__,
})
return Awaitable
def mk_coroutine():
from abc import abstractmethod
class Coroutine(Awaitable):
__slots__ = ()
@abstractmethod
def send(self, value):
"""Send a value into the coroutine.
Return next yielded value or raise StopIteration.
"""
raise StopIteration
@abstractmethod
def throw(self, typ, val=None, tb=None):
"""Raise an exception in the coroutine.
Return next yielded value or raise StopIteration.
"""
if val is None:
if tb is None:
raise typ
val = typ()
if tb is not None:
val = val.with_traceback(tb)
raise val
def close(self):
"""Raise GeneratorExit inside coroutine.
"""
try:
self.throw(GeneratorExit)
except (GeneratorExit, StopIteration):
pass
else:
raise RuntimeError('coroutine ignored GeneratorExit')
@classmethod
def __subclasshook__(cls, C):
if cls is Coroutine:
mro = get_mro(C)
for method in ('__await__', 'send', 'throw', 'close'):
for base in mro:
if method in base.__dict__:
break
else:
return NotImplemented
return True
return NotImplemented
return Coroutine
###
# make all ABCs available in this module
try:
Generator = _collections_abc.Generator
except AttributeError:
Generator = mk_gen()
try:
Awaitable = _collections_abc.Awaitable
except AttributeError:
Awaitable = mk_awaitable()
try:
Coroutine = _collections_abc.Coroutine
except AttributeError:
Coroutine = mk_coroutine()
try:
from inspect import isawaitable
except ImportError:
def isawaitable(obj):
return isinstance(obj, Awaitable)
###
# allow patching the stdlib
PATCHED = {}
def patch(patch_inspect=True):
"""
Main entry point for patching the ``collections.abc`` and ``inspect``
standard library modules.
"""
PATCHED['collections.abc.Generator'] = _collections_abc.Generator = Generator
PATCHED['collections.abc.Coroutine'] = _collections_abc.Coroutine = Coroutine
PATCHED['collections.abc.Awaitable'] = _collections_abc.Awaitable = Awaitable
if patch_inspect:
import inspect
PATCHED['inspect.isawaitable'] = inspect.isawaitable = isawaitable

View file

@ -16,20 +16,25 @@
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import random
from six import moves
# Browser apps represented in data
# noinspection PyUnresolvedReferences
__all__ = ['chrome', 'opera', 'firefox', 'safari', 'ie']
# noinspection PyUnreachableCode
if False:
from typing import AnyStr
def get_ua():
# type: (...) -> AnyStr
"""
Return a random browser user agent string
:return: A browser user agent string
:rtype: String
:return: A browser user agent
"""
ua = []
for x in moves.xrange(1, 10):
for x in range(1, 10):
ua += [random.choice(browser_ua.get(random.choice(__all__)))]
return random.choice(ua)

View file

@ -1,5 +1,6 @@
import re
from bs4 import BeautifulSoup, SoupStrainer
from bs4 import BeautifulSoup
from bs4.element import SoupStrainer
from six import iteritems

View file

@ -21,7 +21,7 @@ import string
import re
import struct
from six import string_types, integer_types
from _23 import decode_str, list_items
from _23 import decode_str
__all__ = ['resolve']
@ -845,7 +845,7 @@ FOURCC = {
}
# make it fool prove
for code, value in list_items(FOURCC):
for code, value in list(FOURCC.items()):
if not code.upper() in FOURCC:
FOURCC[code.upper()] = value
if code.endswith(' '):

View file

@ -14,8 +14,6 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from six import PY2, string_types
# noinspection PyUnreachableCode
if False:
from typing import AnyStr
@ -28,6 +26,7 @@ def ex(e):
return str(e)
# noinspection DuplicatedCode
class SickGearException(Exception):
"""Generic SickGear Exception - should never be thrown, only subclassed"""

File diff suppressed because it is too large Load diff

View file

@ -36,8 +36,6 @@ from .rpc import Method
from .torrent import Torrent, methods as torrent_methods
from .tracker import Tracker, methods as tracker_methods
from _23 import filter_iter, filter_list, map_list
__version__ = '0.2.10'
__author__ = 'Chris Lucas'
@ -184,15 +182,16 @@ class RTorrent(object):
@todo: add validity check for specified view
"""
self.torrents = []
retriever_methods = filter_list(lambda m: m.is_retriever() and m.is_available(self), torrent_methods)
retriever_methods = list(filter(lambda m: m.is_retriever() and m.is_available(self), torrent_methods))
mc = rpc.Multicall(self)
if self.method_exists('d.multicall2'):
mc.add('d.multicall2', '', view, 'd.hash=',
*map_list(lambda m2: ((getattr(m2, 'aliases') or [''])[-1] or m2.rpc_call) + '=', retriever_methods))
*list(map(lambda m2: ((getattr(m2, 'aliases') or [''])[-1] or m2.rpc_call) + '=',
retriever_methods)))
else:
mc.add('d.multicall', view, 'd.get_hash=',
*map_list(lambda m1: m1.rpc_call + '=', retriever_methods))
*list(map(lambda m1: m1.rpc_call + '=', retriever_methods)))
results = mc.call()[0] # only sent one call, only need first result
@ -240,7 +239,7 @@ class RTorrent(object):
try:
call, arg = x.split('=')
method = rpc.find_method(call)
method_name = next(filter_iter(lambda m: self.method_exists(m), (method.rpc_call,) + method.aliases))
method_name = next(filter(lambda m: self.method_exists(m), (method.rpc_call,) + method.aliases))
param += ['%s=%s' % (method_name, arg)]
except (BaseException, Exception):
pass
@ -267,7 +266,7 @@ class RTorrent(object):
max_retries = 10
while max_retries:
try:
t = next(filter_iter(lambda td: td.info_hash.upper() == info_hash, self.get_torrents()))
t = next(filter(lambda td: td.info_hash.upper() == info_hash, self.get_torrents()))
break
except (BaseException, Exception):
time.sleep(self.request_interval)
@ -326,7 +325,7 @@ class RTorrent(object):
if verify_load:
while verify_retries:
try:
t = next(filter_iter(lambda td: td.info_hash == info_hash, self.get_torrents()))
t = next(filter(lambda td: td.info_hash == info_hash, self.get_torrents()))
break
except (BaseException, Exception):
time.sleep(self.request_interval)
@ -437,7 +436,7 @@ class RTorrent(object):
method = rpc.find_method('d.get_local_id')
result = True
try:
func = next(filter_iter(lambda m: self.method_exists(m), (method.rpc_call,) + method.aliases))
func = next(filter(lambda m: self.method_exists(m), (method.rpc_call,) + method.aliases))
getattr(self.get_connection(), func)(info_hash)
except (BaseException, Exception):
result = False
@ -466,7 +465,7 @@ class RTorrent(object):
"""
mc = rpc.Multicall(self)
for method in filter_iter(lambda m: m.is_retriever() and m.is_available(self), methods):
for method in filter(lambda m: m.is_retriever() and m.is_available(self), methods):
mc.add(method)
mc.call()

View file

@ -22,8 +22,6 @@ from . import rpc
from .common import safe_repr
from .rpc import Method
from _23 import filter_iter
class File(object):
"""Represents an individual file within a L{Torrent} instance."""
@ -48,7 +46,7 @@ class File(object):
"""
mc = rpc.Multicall(self)
for method in filter_iter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), methods):
for method in filter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), methods):
mc.add(method, self.rpc_id)
mc.call()

View file

@ -21,8 +21,6 @@
from . import rpc
from .rpc import Method
from _23 import filter_iter
class Group(object):
__name__ = 'Group'
@ -72,7 +70,7 @@ class Group(object):
def _get_method(self, *choices):
try:
return next(filter_iter(lambda method: self._rt_obj.method_exists(method), choices))
return next(filter(lambda method: self._rt_obj.method_exists(method), choices))
except (BaseException, Exception):
pass

View file

@ -27,8 +27,6 @@ import re
import rtorrent
from _23 import filter_iter, map_list
def get_varname(rpc_call):
"""Transform rpc method into variable name.
@ -94,8 +92,8 @@ class Method(object):
if rt_obj.get_client_version_tuple() >= self.min_version:
try:
self.varname = get_varname(next(filter_iter(lambda f: rt_obj.method_exists(f),
(self.rpc_call,) + tuple(getattr(self, 'aliases', '')))))
self.varname = get_varname(next(filter(lambda f: rt_obj.method_exists(f),
(self.rpc_call,) + tuple(getattr(self, 'aliases', '')))))
return True
except (BaseException, Exception):
pass
@ -162,7 +160,7 @@ class Multicall(object):
getattr(xmc, rpc_call)(*args)
try:
results = tuple(next(filter_iter(lambda x: isinstance(x, list), xmc().results)))
results = tuple(next(filter(lambda x: isinstance(x, list), xmc().results)))
except (BaseException, Exception):
return [[]]
@ -216,8 +214,8 @@ def find_method(rpc_call):
"""Return L{Method} instance associated with given RPC call"""
try:
rpc_call = rpc_call.lower()
return next(filter_iter(lambda m: rpc_call in map_list(
lambda n: n.lower(), [m.rpc_call] + list(getattr(m, 'aliases', []))),
return next(filter(lambda m: rpc_call in list(map(
lambda n: n.lower(), [m.rpc_call] + list(getattr(m, 'aliases', [])))),
rtorrent.methods + rtorrent.torrent.methods +
rtorrent.file.methods + rtorrent.tracker.methods + rtorrent.peer.methods))
except (BaseException, Exception):

View file

@ -25,8 +25,6 @@ from .peer import Peer, methods as peer_methods
from .rpc import Method
from .tracker import Tracker, methods as tracker_methods
from _23 import filter_iter, filter_list
class Torrent(object):
"""Represents an individual torrent within a L{RTorrent} instance."""
@ -70,7 +68,7 @@ class Torrent(object):
@note: also assigns return value to self.peers
"""
self.peers = []
retriever_methods = filter_list(lambda m: m.is_retriever() and m.is_available(self._rt_obj), peer_methods)
retriever_methods = list(filter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), peer_methods))
mc = rpc.Multicall(self)
# need to leave 2nd arg empty (dunno why)
@ -97,7 +95,7 @@ class Torrent(object):
@note: also assigns return value to self.trackers
"""
self.trackers = []
retriever_methods = filter_list(lambda m: m.is_retriever() and m.is_available(self._rt_obj), tracker_methods)
retriever_methods = list(filter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), tracker_methods))
mc = rpc.Multicall(self)
# need to leave 2nd arg empty (dunno why)
@ -125,7 +123,7 @@ class Torrent(object):
"""
self.files = []
retriever_methods = filter_list(lambda m: m.is_retriever() and m.is_available(self._rt_obj), file_methods)
retriever_methods = list(filter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), file_methods))
mc = rpc.Multicall(self)
# 2nd arg can be anything, but it'll return all files in torrent
@ -155,7 +153,7 @@ class Torrent(object):
def _get_method(self, *choices):
try:
return next(filter_iter(lambda method: self._rt_obj.method_exists(method), choices))
return next(filter(lambda method: self._rt_obj.method_exists(method), choices))
except (BaseException, Exception):
pass
@ -276,7 +274,7 @@ class Torrent(object):
"""
mc = rpc.Multicall(self)
for method in filter_iter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), methods):
for method in filter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), methods):
mc.add(method, self.rpc_id)
mc.call()

View file

@ -22,8 +22,6 @@ from . import rpc
from .common import safe_repr
from .rpc import Method
from _23 import filter_iter
class Tracker(object):
"""Represents an individual tracker within a L{Torrent} instance."""
@ -64,7 +62,7 @@ class Tracker(object):
"""
mc = rpc.Multicall(self)
for method in filter_iter(lambda fx: fx.is_retriever() and fx.is_available(self._rt_obj), methods):
for method in filter(lambda fx: fx.is_retriever() and fx.is_available(self._rt_obj), methods):
mc.add(method, self.rpc_id)
mc.call()

View file

@ -1,27 +0,0 @@
Copyright (c) 2012, Ben Hoyt
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Ben Hoyt nor the names of its contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -1,697 +0,0 @@
"""scandir, a better directory iterator and faster os.walk(), now in the Python 3.5 stdlib
scandir() is a generator version of os.listdir() that returns an
iterator over files in a directory, and also exposes the extra
information most OSes provide while iterating files in a directory
(such as type and stat information).
This module also includes a version of os.walk() that uses scandir()
to speed it up significantly.
See README.md or https://github.com/benhoyt/scandir for rationale and
docs, or read PEP 471 (https://www.python.org/dev/peps/pep-0471/) for
more details on its inclusion into Python 3.5
scandir is released under the new BSD 3-clause license. See
LICENSE.txt for the full license text.
"""
from __future__ import division
from errno import ENOENT
from os import listdir, lstat, stat, strerror
from os.path import join, islink
from stat import S_IFDIR, S_IFLNK, S_IFREG
import collections
import sys
try:
import _scandir
except ImportError:
_scandir = None
try:
import ctypes
except ImportError:
ctypes = None
if None is not _scandir and None is not ctypes and not getattr(_scandir, 'DirEntry', None):
import warnings
warnings.warn("scandir compiled _scandir C module is too old, using slow generic fallback")
_scandir = None
elif _scandir is None and ctypes is None:
import warnings
warnings.warn("scandir can't find the compiled _scandir C module or ctypes, using slow generic fallback")
__version__ = '1.10.0'
__all__ = ['scandir', 'walk']
# Windows FILE_ATTRIBUTE constants for interpreting the
# FIND_DATA.dwFileAttributes member
FILE_ATTRIBUTE_ARCHIVE = 32
FILE_ATTRIBUTE_COMPRESSED = 2048
FILE_ATTRIBUTE_DEVICE = 64
FILE_ATTRIBUTE_DIRECTORY = 16
FILE_ATTRIBUTE_ENCRYPTED = 16384
FILE_ATTRIBUTE_HIDDEN = 2
FILE_ATTRIBUTE_INTEGRITY_STREAM = 32768
FILE_ATTRIBUTE_NORMAL = 128
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192
FILE_ATTRIBUTE_NO_SCRUB_DATA = 131072
FILE_ATTRIBUTE_OFFLINE = 4096
FILE_ATTRIBUTE_READONLY = 1
FILE_ATTRIBUTE_REPARSE_POINT = 1024
FILE_ATTRIBUTE_SPARSE_FILE = 512
FILE_ATTRIBUTE_SYSTEM = 4
FILE_ATTRIBUTE_TEMPORARY = 256
FILE_ATTRIBUTE_VIRTUAL = 65536
IS_PY3 = sys.version_info >= (3, 0)
if IS_PY3:
unicode = str # Because Python <= 3.2 doesn't have u'unicode' syntax
class GenericDirEntry(object):
__slots__ = ('name', '_stat', '_lstat', '_scandir_path', '_path')
def __init__(self, scandir_path, name):
self._scandir_path = scandir_path
self.name = name
self._stat = None
self._lstat = None
self._path = None
@property
def path(self):
if self._path is None:
self._path = join(self._scandir_path, self.name)
return self._path
def stat(self, follow_symlinks=True):
if follow_symlinks:
if self._stat is None:
self._stat = stat(self.path)
return self._stat
else:
if self._lstat is None:
self._lstat = lstat(self.path)
return self._lstat
# The code duplication below is intentional: this is for slightly
# better performance on systems that fall back to GenericDirEntry.
# It avoids an additional attribute lookup and method call, which
# are relatively slow on CPython.
def is_dir(self, follow_symlinks=True):
try:
st = self.stat(follow_symlinks=follow_symlinks)
except OSError as e:
if e.errno != ENOENT:
raise
return False # Path doesn't exist or is a broken symlink
return st.st_mode & 0o170000 == S_IFDIR
def is_file(self, follow_symlinks=True):
try:
st = self.stat(follow_symlinks=follow_symlinks)
except OSError as e:
if e.errno != ENOENT:
raise
return False # Path doesn't exist or is a broken symlink
return st.st_mode & 0o170000 == S_IFREG
def is_symlink(self):
try:
st = self.stat(follow_symlinks=False)
except OSError as e:
if e.errno != ENOENT:
raise
return False # Path doesn't exist or is a broken symlink
return st.st_mode & 0o170000 == S_IFLNK
def inode(self):
st = self.stat(follow_symlinks=False)
return st.st_ino
def __str__(self):
return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
__repr__ = __str__
def _scandir_generic(path=unicode('.')):
"""Like os.listdir(), but yield DirEntry objects instead of returning
a list of names.
"""
for name in listdir(path):
yield GenericDirEntry(path, name)
if IS_PY3 and sys.platform == 'win32':
def scandir_generic(path=unicode('.')):
if isinstance(path, bytes):
raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead")
return _scandir_generic(path)
scandir_generic.__doc__ = _scandir_generic.__doc__
else:
scandir_generic = _scandir_generic
scandir_c = None
scandir_python = None
if sys.platform == 'win32':
if ctypes is not None:
from ctypes import wintypes
# Various constants from windows.h
INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value
ERROR_FILE_NOT_FOUND = 2
ERROR_NO_MORE_FILES = 18
IO_REPARSE_TAG_SYMLINK = 0xA000000C
# Numer of seconds between 1601-01-01 and 1970-01-01
SECONDS_BETWEEN_EPOCHS = 11644473600
kernel32 = ctypes.windll.kernel32
# ctypes wrappers for (wide string versions of) FindFirstFile,
# FindNextFile, and FindClose
FindFirstFile = kernel32.FindFirstFileW
FindFirstFile.argtypes = [
wintypes.LPCWSTR,
ctypes.POINTER(wintypes.WIN32_FIND_DATAW),
]
FindFirstFile.restype = wintypes.HANDLE
FindNextFile = kernel32.FindNextFileW
FindNextFile.argtypes = [
wintypes.HANDLE,
ctypes.POINTER(wintypes.WIN32_FIND_DATAW),
]
FindNextFile.restype = wintypes.BOOL
FindClose = kernel32.FindClose
FindClose.argtypes = [wintypes.HANDLE]
FindClose.restype = wintypes.BOOL
Win32StatResult = collections.namedtuple('Win32StatResult', [
'st_mode',
'st_ino',
'st_dev',
'st_nlink',
'st_uid',
'st_gid',
'st_size',
'st_atime',
'st_mtime',
'st_ctime',
'st_atime_ns',
'st_mtime_ns',
'st_ctime_ns',
'st_file_attributes',
])
def filetime_to_time(filetime):
"""Convert Win32 FILETIME to time since Unix epoch in seconds."""
total = filetime.dwHighDateTime << 32 | filetime.dwLowDateTime
return total / 10000000 - SECONDS_BETWEEN_EPOCHS
def find_data_to_stat(data):
"""Convert Win32 FIND_DATA struct to stat_result."""
# First convert Win32 dwFileAttributes to st_mode
attributes = data.dwFileAttributes
st_mode = 0
if attributes & FILE_ATTRIBUTE_DIRECTORY:
st_mode |= S_IFDIR | 0o111
else:
st_mode |= S_IFREG
if attributes & FILE_ATTRIBUTE_READONLY:
st_mode |= 0o444
else:
st_mode |= 0o666
if (attributes & FILE_ATTRIBUTE_REPARSE_POINT and
data.dwReserved0 == IO_REPARSE_TAG_SYMLINK):
st_mode ^= st_mode & 0o170000
st_mode |= S_IFLNK
st_size = data.nFileSizeHigh << 32 | data.nFileSizeLow
st_atime = filetime_to_time(data.ftLastAccessTime)
st_mtime = filetime_to_time(data.ftLastWriteTime)
st_ctime = filetime_to_time(data.ftCreationTime)
# Some fields set to zero per CPython's posixmodule.c: st_ino, st_dev,
# st_nlink, st_uid, st_gid
return Win32StatResult(st_mode, 0, 0, 0, 0, 0, st_size,
st_atime, st_mtime, st_ctime,
int(st_atime * 1000000000),
int(st_mtime * 1000000000),
int(st_ctime * 1000000000),
attributes)
class Win32DirEntryPython(object):
__slots__ = ('name', '_stat', '_lstat', '_find_data', '_scandir_path', '_path', '_inode')
def __init__(self, scandir_path, name, find_data):
self._scandir_path = scandir_path
self.name = name
self._stat = None
self._lstat = None
self._find_data = find_data
self._path = None
self._inode = None
@property
def path(self):
if self._path is None:
self._path = join(self._scandir_path, self.name)
return self._path
def stat(self, follow_symlinks=True):
if follow_symlinks:
if self._stat is None:
if self.is_symlink():
# It's a symlink, call link-following stat()
self._stat = stat(self.path)
else:
# Not a symlink, stat is same as lstat value
if self._lstat is None:
self._lstat = find_data_to_stat(self._find_data)
self._stat = self._lstat
return self._stat
else:
if self._lstat is None:
# Lazily convert to stat object, because it's slow
# in Python, and often we only need is_dir() etc
self._lstat = find_data_to_stat(self._find_data)
return self._lstat
def is_dir(self, follow_symlinks=True):
is_symlink = self.is_symlink()
if follow_symlinks and is_symlink:
try:
return self.stat().st_mode & 0o170000 == S_IFDIR
except OSError as e:
if e.errno != ENOENT:
raise
return False
elif is_symlink:
return False
else:
return (self._find_data.dwFileAttributes &
FILE_ATTRIBUTE_DIRECTORY != 0)
def is_file(self, follow_symlinks=True):
is_symlink = self.is_symlink()
if follow_symlinks and is_symlink:
try:
return self.stat().st_mode & 0o170000 == S_IFREG
except OSError as e:
if e.errno != ENOENT:
raise
return False
elif is_symlink:
return False
else:
return (self._find_data.dwFileAttributes &
FILE_ATTRIBUTE_DIRECTORY == 0)
def is_symlink(self):
return (self._find_data.dwFileAttributes &
FILE_ATTRIBUTE_REPARSE_POINT != 0 and
self._find_data.dwReserved0 == IO_REPARSE_TAG_SYMLINK)
def inode(self):
if self._inode is None:
self._inode = lstat(self.path).st_ino
return self._inode
def __str__(self):
return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
__repr__ = __str__
def win_error(error, filename):
exc = WindowsError(error, ctypes.FormatError(error))
exc.filename = filename
return exc
def _scandir_python(path=unicode('.')):
"""Like os.listdir(), but yield DirEntry objects instead of returning
a list of names.
"""
# Call FindFirstFile and handle errors
if isinstance(path, bytes):
is_bytes = True
filename = join(path.decode('mbcs', 'strict'), '*.*')
else:
is_bytes = False
filename = join(path, '*.*')
data = wintypes.WIN32_FIND_DATAW()
data_p = ctypes.byref(data)
handle = FindFirstFile(filename, data_p)
if handle == INVALID_HANDLE_VALUE:
error = ctypes.GetLastError()
if error == ERROR_FILE_NOT_FOUND:
# No files, don't yield anything
return
raise win_error(error, path)
# Call FindNextFile in a loop, stopping when no more files
try:
while True:
# Skip '.' and '..' (current and parent directory), but
# otherwise yield (filename, stat_result) tuple
name = data.cFileName
if name not in ('.', '..'):
if is_bytes:
name = name.encode('mbcs', 'replace')
yield Win32DirEntryPython(path, name, data)
data = wintypes.WIN32_FIND_DATAW()
data_p = ctypes.byref(data)
success = FindNextFile(handle, data_p)
if not success:
error = ctypes.GetLastError()
if error == ERROR_NO_MORE_FILES:
break
raise win_error(error, path)
finally:
if not FindClose(handle):
raise win_error(ctypes.GetLastError(), path)
if IS_PY3:
def scandir_python(path=unicode('.')):
if isinstance(path, bytes):
raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead")
return _scandir_python(path)
scandir_python.__doc__ = _scandir_python.__doc__
else:
scandir_python = _scandir_python
if _scandir is not None:
scandir_c = _scandir.scandir
DirEntry_c = _scandir.DirEntry
if _scandir is not None:
scandir = scandir_c
DirEntry = DirEntry_c
elif ctypes is not None:
scandir = scandir_python
DirEntry = Win32DirEntryPython
else:
scandir = scandir_generic
DirEntry = GenericDirEntry
# Linux, OS X, and BSD implementation
elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform:
have_dirent_d_type = (sys.platform != 'sunos5')
if ctypes is not None and have_dirent_d_type:
import ctypes.util
DIR_p = ctypes.c_void_p
# Rather annoying how the dirent struct is slightly different on each
# platform. The only fields we care about are d_name and d_type.
class Dirent(ctypes.Structure):
if sys.platform.startswith('linux'):
_fields_ = (
('d_ino', ctypes.c_ulong),
('d_off', ctypes.c_long),
('d_reclen', ctypes.c_ushort),
('d_type', ctypes.c_byte),
('d_name', ctypes.c_char * 256),
)
elif 'openbsd' in sys.platform:
_fields_ = (
('d_ino', ctypes.c_uint64),
('d_off', ctypes.c_uint64),
('d_reclen', ctypes.c_uint16),
('d_type', ctypes.c_uint8),
('d_namlen', ctypes.c_uint8),
('__d_padding', ctypes.c_uint8 * 4),
('d_name', ctypes.c_char * 256),
)
else:
_fields_ = (
('d_ino', ctypes.c_uint32), # must be uint32, not ulong
('d_reclen', ctypes.c_ushort),
('d_type', ctypes.c_byte),
('d_namlen', ctypes.c_byte),
('d_name', ctypes.c_char * 256),
)
DT_UNKNOWN = 0
DT_DIR = 4
DT_REG = 8
DT_LNK = 10
Dirent_p = ctypes.POINTER(Dirent)
Dirent_pp = ctypes.POINTER(Dirent_p)
libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True)
opendir = libc.opendir
opendir.argtypes = [ctypes.c_char_p]
opendir.restype = DIR_p
readdir_r = libc.readdir_r
readdir_r.argtypes = [DIR_p, Dirent_p, Dirent_pp]
readdir_r.restype = ctypes.c_int
closedir = libc.closedir
closedir.argtypes = [DIR_p]
closedir.restype = ctypes.c_int
file_system_encoding = sys.getfilesystemencoding()
class PosixDirEntry(object):
__slots__ = ('name', '_d_type', '_stat', '_lstat', '_scandir_path', '_path', '_inode')
def __init__(self, scandir_path, name, d_type, inode):
self._scandir_path = scandir_path
self.name = name
self._d_type = d_type
self._inode = inode
self._stat = None
self._lstat = None
self._path = None
@property
def path(self):
if self._path is None:
self._path = join(self._scandir_path, self.name)
return self._path
def stat(self, follow_symlinks=True):
if follow_symlinks:
if self._stat is None:
if self.is_symlink():
self._stat = stat(self.path)
else:
if self._lstat is None:
self._lstat = lstat(self.path)
self._stat = self._lstat
return self._stat
else:
if self._lstat is None:
self._lstat = lstat(self.path)
return self._lstat
def is_dir(self, follow_symlinks=True):
if (self._d_type == DT_UNKNOWN or
(follow_symlinks and self.is_symlink())):
try:
st = self.stat(follow_symlinks=follow_symlinks)
except OSError as e:
if e.errno != ENOENT:
raise
return False
return st.st_mode & 0o170000 == S_IFDIR
else:
return self._d_type == DT_DIR
def is_file(self, follow_symlinks=True):
if (self._d_type == DT_UNKNOWN or
(follow_symlinks and self.is_symlink())):
try:
st = self.stat(follow_symlinks=follow_symlinks)
except OSError as e:
if e.errno != ENOENT:
raise
return False
return st.st_mode & 0o170000 == S_IFREG
else:
return self._d_type == DT_REG
def is_symlink(self):
if self._d_type == DT_UNKNOWN:
try:
st = self.stat(follow_symlinks=False)
except OSError as e:
if e.errno != ENOENT:
raise
return False
return st.st_mode & 0o170000 == S_IFLNK
else:
return self._d_type == DT_LNK
def inode(self):
return self._inode
def __str__(self):
return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
__repr__ = __str__
def posix_error(filename):
errno = ctypes.get_errno()
exc = OSError(errno, strerror(errno))
exc.filename = filename
return exc
def scandir_python(path=unicode('.')):
"""Like os.listdir(), but yield DirEntry objects instead of returning
a list of names.
"""
if isinstance(path, bytes):
opendir_path = path
is_bytes = True
else:
opendir_path = path.encode(file_system_encoding)
is_bytes = False
dir_p = opendir(opendir_path)
if not dir_p:
raise posix_error(path)
try:
result = Dirent_p()
while True:
entry = Dirent()
if readdir_r(dir_p, entry, result):
raise posix_error(path)
if not result:
break
name = entry.d_name
if name not in (b'.', b'..'):
if not is_bytes:
name = name.decode(file_system_encoding)
yield PosixDirEntry(path, name, entry.d_type, entry.d_ino)
finally:
if closedir(dir_p):
raise posix_error(path)
if _scandir is not None:
scandir_c = _scandir.scandir
DirEntry_c = _scandir.DirEntry
if _scandir is not None:
scandir = scandir_c
DirEntry = DirEntry_c
elif ctypes is not None and have_dirent_d_type:
scandir = scandir_python
DirEntry = PosixDirEntry
else:
scandir = scandir_generic
DirEntry = GenericDirEntry
# Some other system -- no d_type or stat information
else:
scandir = scandir_generic
DirEntry = GenericDirEntry
def _walk(top, topdown=True, onerror=None, followlinks=False):
"""Like Python 3.5's implementation of os.walk() -- faster than
the pre-Python 3.5 version as it uses scandir() internally.
"""
dirs = []
nondirs = []
# We may not have read permission for top, in which case we can't
# get a list of the files the directory contains. os.walk
# always suppressed the exception then, rather than blow up for a
# minor reason when (say) a thousand readable directories are still
# left to visit. That logic is copied here.
try:
scandir_it = scandir(top)
except OSError as error:
if onerror is not None:
onerror(error)
return
while True:
try:
try:
entry = next(scandir_it)
except StopIteration:
break
except OSError as error:
if onerror is not None:
onerror(error)
return
try:
is_dir = entry.is_dir()
except OSError:
# If is_dir() raises an OSError, consider that the entry is not
# a directory, same behaviour than os.path.isdir().
is_dir = False
if is_dir:
dirs.append(entry.name)
else:
nondirs.append(entry.name)
if not topdown and is_dir:
# Bottom-up: recurse into sub-directory, but exclude symlinks to
# directories if followlinks is False
if followlinks:
walk_into = True
else:
try:
is_symlink = entry.is_symlink()
except OSError:
# If is_symlink() raises an OSError, consider that the
# entry is not a symbolic link, same behaviour than
# os.path.islink().
is_symlink = False
walk_into = not is_symlink
if walk_into:
for entry in walk(entry.path, topdown, onerror, followlinks):
yield entry
# Yield before recursion if going top down
if topdown:
yield top, dirs, nondirs
# Recurse into sub-directories
for name in dirs:
new_path = join(top, name)
# Issue #23605: os.path.islink() is used instead of caching
# entry.is_symlink() result during the loop on os.scandir() because
# the caller can replace the directory entry during the "yield"
# above.
if followlinks or not islink(new_path):
for entry in walk(new_path, topdown, onerror, followlinks):
yield entry
else:
# Yield after recursion if going bottom up
yield top, dirs, nondirs
if IS_PY3 or sys.platform != 'win32':
walk = _walk
else:
# Fix for broken unicode handling on Windows on Python 2.x, see:
# https://github.com/benhoyt/scandir/issues/54
file_system_encoding = sys.getfilesystemencoding()
def walk(top, topdown=True, onerror=None, followlinks=False):
if isinstance(top, bytes):
top = top.decode(file_system_encoding)
return _walk(top, topdown, onerror, followlinks)

View file

@ -14,9 +14,5 @@
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import sys
if 2 == sys.version_info[0]:
from .py2 import *
else:
from .py3 import *
from .py3 import *

View file

@ -1,13 +1,8 @@
import re
import sys
import threading
if 2 == sys.version_info[0]:
# noinspection PyProtectedMember
from .futures.thread import _WorkItem
else:
# noinspection PyCompatibility,PyProtectedMember
from concurrent.futures.thread import _WorkItem
# noinspection PyProtectedMember,PyUnresolvedReferences
from concurrent.futures.thread import _WorkItem
class GenericWorkItem(_WorkItem):

View file

@ -1,23 +0,0 @@
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Execute computations asynchronously using threads or processes."""
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
from ._base import (FIRST_COMPLETED,
FIRST_EXCEPTION,
ALL_COMPLETED,
CancelledError,
TimeoutError,
Future,
Executor,
wait,
as_completed)
from .thread import ThreadPoolExecutor
try:
from .process import ProcessPoolExecutor
except ImportError:
# some platforms don't have multiprocessing
pass

View file

@ -1,673 +0,0 @@
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
import collections
import logging
import threading
import itertools
import time
import types
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
FIRST_COMPLETED = 'FIRST_COMPLETED'
FIRST_EXCEPTION = 'FIRST_EXCEPTION'
ALL_COMPLETED = 'ALL_COMPLETED'
_AS_COMPLETED = '_AS_COMPLETED'
# Possible future states (for internal use by the futures package).
PENDING = 'PENDING'
RUNNING = 'RUNNING'
# The future was cancelled by the user...
CANCELLED = 'CANCELLED'
# ...and _Waiter.add_cancelled() was called by a worker.
CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED'
FINISHED = 'FINISHED'
_FUTURE_STATES = [
PENDING,
RUNNING,
CANCELLED,
CANCELLED_AND_NOTIFIED,
FINISHED
]
_STATE_TO_DESCRIPTION_MAP = {
PENDING: "pending",
RUNNING: "running",
CANCELLED: "cancelled",
CANCELLED_AND_NOTIFIED: "cancelled",
FINISHED: "finished"
}
# Logger for internal use by the futures package.
LOGGER = logging.getLogger("concurrent.futures")
class Error(Exception):
"""Base class for all future-related exceptions."""
pass
class CancelledError(Error):
"""The Future was cancelled."""
pass
class TimeoutError(Error):
"""The operation exceeded the given deadline."""
pass
class _Waiter(object):
"""Provides the event that wait() and as_completed() block on."""
def __init__(self):
self.event = threading.Event()
self.finished_futures = []
def add_result(self, future):
self.finished_futures.append(future)
def add_exception(self, future):
self.finished_futures.append(future)
def add_cancelled(self, future):
self.finished_futures.append(future)
class _AsCompletedWaiter(_Waiter):
"""Used by as_completed()."""
def __init__(self):
super(_AsCompletedWaiter, self).__init__()
self.lock = threading.Lock()
def add_result(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_result(future)
self.event.set()
def add_exception(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_exception(future)
self.event.set()
def add_cancelled(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_cancelled(future)
self.event.set()
class _FirstCompletedWaiter(_Waiter):
"""Used by wait(return_when=FIRST_COMPLETED)."""
def add_result(self, future):
super(_FirstCompletedWaiter, self).add_result(future)
self.event.set()
def add_exception(self, future):
super(_FirstCompletedWaiter, self).add_exception(future)
self.event.set()
def add_cancelled(self, future):
super(_FirstCompletedWaiter, self).add_cancelled(future)
self.event.set()
class _AllCompletedWaiter(_Waiter):
"""Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED)."""
def __init__(self, num_pending_calls, stop_on_exception):
self.num_pending_calls = num_pending_calls
self.stop_on_exception = stop_on_exception
self.lock = threading.Lock()
super(_AllCompletedWaiter, self).__init__()
def _decrement_pending_calls(self):
with self.lock:
self.num_pending_calls -= 1
if not self.num_pending_calls:
self.event.set()
def add_result(self, future):
super(_AllCompletedWaiter, self).add_result(future)
self._decrement_pending_calls()
def add_exception(self, future):
super(_AllCompletedWaiter, self).add_exception(future)
if self.stop_on_exception:
self.event.set()
else:
self._decrement_pending_calls()
def add_cancelled(self, future):
super(_AllCompletedWaiter, self).add_cancelled(future)
self._decrement_pending_calls()
class _AcquireFutures(object):
"""A context manager that does an ordered acquire of Future conditions."""
def __init__(self, futures):
self.futures = sorted(futures, key=id)
def __enter__(self):
for future in self.futures:
future._condition.acquire()
def __exit__(self, *args):
for future in self.futures:
future._condition.release()
def _create_and_install_waiters(fs, return_when):
if return_when == _AS_COMPLETED:
waiter = _AsCompletedWaiter()
elif return_when == FIRST_COMPLETED:
waiter = _FirstCompletedWaiter()
else:
pending_count = sum(
f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs)
if return_when == FIRST_EXCEPTION:
waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True)
elif return_when == ALL_COMPLETED:
waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False)
else:
raise ValueError("Invalid return condition: %r" % return_when)
for f in fs:
f._waiters.append(waiter)
return waiter
def _yield_finished_futures(fs, waiter, ref_collect):
"""
Iterate on the list *fs*, yielding finished futures one by one in
reverse order.
Before yielding a future, *waiter* is removed from its waiters
and the future is removed from each set in the collection of sets
*ref_collect*.
The aim of this function is to avoid keeping stale references after
the future is yielded and before the iterator resumes.
"""
while fs:
f = fs[-1]
for futures_set in ref_collect:
futures_set.remove(f)
with f._condition:
f._waiters.remove(waiter)
del f
# Careful not to keep a reference to the popped value
yield fs.pop()
def as_completed(fs, timeout=None):
"""An iterator over the given futures that yields each as it completes.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
iterate over.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
An iterator that yields the given Futures as they complete (finished or
cancelled). If any given Futures are duplicated, they will be returned
once.
Raises:
TimeoutError: If the entire result iterator could not be generated
before the given timeout.
"""
if timeout is not None:
end_time = timeout + time.time()
fs = set(fs)
total_futures = len(fs)
with _AcquireFutures(fs):
finished = set(
f for f in fs
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
pending = fs - finished
waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
finished = list(finished)
try:
for f in _yield_finished_futures(finished, waiter,
ref_collect=(fs,)):
f = [f]
yield f.pop()
while pending:
if timeout is None:
wait_timeout = None
else:
wait_timeout = end_time - time.time()
if wait_timeout < 0:
raise TimeoutError(
'%d (of %d) futures unfinished' % (
len(pending), total_futures))
waiter.event.wait(wait_timeout)
with waiter.lock:
finished = waiter.finished_futures
waiter.finished_futures = []
waiter.event.clear()
# reverse to keep finishing order
finished.reverse()
for f in _yield_finished_futures(finished, waiter,
ref_collect=(fs, pending)):
f = [f]
yield f.pop()
finally:
# Remove waiter from unfinished futures
for f in fs:
with f._condition:
f._waiters.remove(waiter)
DoneAndNotDoneFutures = collections.namedtuple(
'DoneAndNotDoneFutures', 'done not_done')
def wait(fs, timeout=None, return_when=ALL_COMPLETED):
"""Wait for the futures in the given sequence to complete.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
wait upon.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
return_when: Indicates when this function should return. The options
are:
FIRST_COMPLETED - Return when any future finishes or is
cancelled.
FIRST_EXCEPTION - Return when any future finishes by raising an
exception. If no future raises an exception
then it is equivalent to ALL_COMPLETED.
ALL_COMPLETED - Return when all futures finish or are cancelled.
Returns:
A named 2-tuple of sets. The first set, named 'done', contains the
futures that completed (is finished or cancelled) before the wait
completed. The second set, named 'not_done', contains uncompleted
futures.
"""
with _AcquireFutures(fs):
done = set(f for f in fs
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
not_done = set(fs) - done
if (return_when == FIRST_COMPLETED) and done:
return DoneAndNotDoneFutures(done, not_done)
elif (return_when == FIRST_EXCEPTION) and done:
if any(f for f in done
if not f.cancelled() and f.exception() is not None):
return DoneAndNotDoneFutures(done, not_done)
if len(done) == len(fs):
return DoneAndNotDoneFutures(done, not_done)
waiter = _create_and_install_waiters(fs, return_when)
waiter.event.wait(timeout)
for f in fs:
with f._condition:
f._waiters.remove(waiter)
done.update(waiter.finished_futures)
return DoneAndNotDoneFutures(done, set(fs) - done)
class Future(object):
"""Represents the result of an asynchronous computation."""
def __init__(self):
"""Initializes the future. Should not be called by clients."""
self._condition = threading.Condition()
self._state = PENDING
self._result = None
self._exception = None
self._traceback = None
self._waiters = []
self._done_callbacks = []
def _invoke_callbacks(self):
for callback in self._done_callbacks:
try:
callback(self)
except Exception:
LOGGER.exception('exception calling callback for %r', self)
except BaseException:
# Explicitly let all other new-style exceptions through so
# that we can catch all old-style exceptions with a simple
# "except:" clause below.
#
# All old-style exception objects are instances of
# types.InstanceType, but "except types.InstanceType:" does
# not catch old-style exceptions for some reason. Thus, the
# only way to catch all old-style exceptions without catching
# any new-style exceptions is to filter out the new-style
# exceptions, which all derive from BaseException.
raise
except:
# Because of the BaseException clause above, this handler only
# executes for old-style exception objects.
LOGGER.exception('exception calling callback for %r', self)
def __repr__(self):
with self._condition:
if self._state == FINISHED:
if self._exception:
return '<%s at %#x state=%s raised %s>' % (
self.__class__.__name__,
id(self),
_STATE_TO_DESCRIPTION_MAP[self._state],
self._exception.__class__.__name__)
else:
return '<%s at %#x state=%s returned %s>' % (
self.__class__.__name__,
id(self),
_STATE_TO_DESCRIPTION_MAP[self._state],
self._result.__class__.__name__)
return '<%s at %#x state=%s>' % (
self.__class__.__name__,
id(self),
_STATE_TO_DESCRIPTION_MAP[self._state])
def cancel(self):
"""Cancel the future if possible.
Returns True if the future was cancelled, False otherwise. A future
cannot be cancelled if it is running or has already completed.
"""
with self._condition:
if self._state in [RUNNING, FINISHED]:
return False
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
return True
self._state = CANCELLED
self._condition.notify_all()
self._invoke_callbacks()
return True
def cancelled(self):
"""Return True if the future was cancelled."""
with self._condition:
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
def running(self):
"""Return True if the future is currently executing."""
with self._condition:
return self._state == RUNNING
def done(self):
"""Return True of the future was cancelled or finished executing."""
with self._condition:
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]
def __get_result(self):
if self._exception:
if isinstance(self._exception, types.InstanceType):
# The exception is an instance of an old-style class, which
# means type(self._exception) returns types.ClassType instead
# of the exception's actual class type.
exception_type = self._exception.__class__
else:
exception_type = type(self._exception)
raise exception_type, self._exception, self._traceback
else:
return self._result
def add_done_callback(self, fn):
"""Attaches a callable that will be called when the future finishes.
Args:
fn: A callable that will be called with this future as its only
argument when the future completes or is cancelled. The callable
will always be called by a thread in the same process in which
it was added. If the future has already completed or been
cancelled then the callable will be called immediately. These
callables are called in the order that they were added.
"""
with self._condition:
if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
self._done_callbacks.append(fn)
return
fn(self)
def result(self, timeout=None):
"""Return the result of the call that the future represents.
Args:
timeout: The number of seconds to wait for the result if the future
isn't done. If None, then there is no limit on the wait time.
Returns:
The result of the call that the future represents.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
Exception: If the call raised then that exception will be raised.
"""
with self._condition:
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self.__get_result()
self._condition.wait(timeout)
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self.__get_result()
else:
raise TimeoutError()
def exception_info(self, timeout=None):
"""Return a tuple of (exception, traceback) raised by the call that the
future represents.
Args:
timeout: The number of seconds to wait for the exception if the
future isn't done. If None, then there is no limit on the wait
time.
Returns:
The exception raised by the call that the future represents or None
if the call completed without raising.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
"""
with self._condition:
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self._exception, self._traceback
self._condition.wait(timeout)
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self._exception, self._traceback
else:
raise TimeoutError()
def exception(self, timeout=None):
"""Return the exception raised by the call that the future represents.
Args:
timeout: The number of seconds to wait for the exception if the
future isn't done. If None, then there is no limit on the wait
time.
Returns:
The exception raised by the call that the future represents or None
if the call completed without raising.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
"""
return self.exception_info(timeout)[0]
# The following methods should only be used by Executors and in tests.
def set_running_or_notify_cancel(self):
"""Mark the future as running or process any cancel notifications.
Should only be used by Executor implementations and unit tests.
If the future has been cancelled (cancel() was called and returned
True) then any threads waiting on the future completing (though calls
to as_completed() or wait()) are notified and False is returned.
If the future was not cancelled then it is put in the running state
(future calls to running() will return True) and True is returned.
This method should be called by Executor implementations before
executing the work associated with this future. If this method returns
False then the work should not be executed.
Returns:
False if the Future was cancelled, True otherwise.
Raises:
RuntimeError: if this method was already called or if set_result()
or set_exception() was called.
"""
with self._condition:
if self._state == CANCELLED:
self._state = CANCELLED_AND_NOTIFIED
for waiter in self._waiters:
waiter.add_cancelled(self)
# self._condition.notify_all() is not necessary because
# self.cancel() triggers a notification.
return False
elif self._state == PENDING:
self._state = RUNNING
return True
else:
LOGGER.critical('Future %s in unexpected state: %s',
id(self),
self._state)
raise RuntimeError('Future in unexpected state')
def set_result(self, result):
"""Sets the return value of work associated with the future.
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
self._result = result
self._state = FINISHED
for waiter in self._waiters:
waiter.add_result(self)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception_info(self, exception, traceback):
"""Sets the result of the future as being the given exception
and traceback.
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
self._exception = exception
self._traceback = traceback
self._state = FINISHED
for waiter in self._waiters:
waiter.add_exception(self)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception(self, exception):
"""Sets the result of the future as being the given exception.
Should only be used by Executor implementations and unit tests.
"""
self.set_exception_info(exception, None)
class Executor(object):
"""This is an abstract base class for concrete asynchronous executors."""
def submit(self, fn, *args, **kwargs):
"""Submits a callable to be executed with the given arguments.
Schedules the callable to be executed as fn(*args, **kwargs) and returns
a Future instance representing the execution of the callable.
Returns:
A Future representing the given call.
"""
raise NotImplementedError()
def map(self, fn, *iterables, **kwargs):
"""Returns an iterator equivalent to map(fn, iter).
Args:
fn: A callable that will take as many arguments as there are
passed iterables.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
An iterator equivalent to: map(func, *iterables) but the calls may
be evaluated out-of-order.
Raises:
TimeoutError: If the entire result iterator could not be generated
before the given timeout.
Exception: If fn(*args) raises for any values.
"""
timeout = kwargs.get('timeout')
if timeout is not None:
end_time = timeout + time.time()
fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)]
# Yield must be hidden in closure so that the futures are submitted
# before the first iterator value is required.
def result_iterator():
try:
# reverse to keep finishing order
fs.reverse()
while fs:
# Careful not to keep a reference to the popped future
if timeout is None:
yield fs.pop().result()
else:
yield fs.pop().result(end_time - time.time())
finally:
for future in fs:
future.cancel()
return result_iterator()
def shutdown(self, wait=True):
"""Clean-up the resources associated with the Executor.
It is safe to call this method several times. Otherwise, no other
methods can be called after this one.
Args:
wait: If True then shutdown will not return until all running
futures have finished executing and the resources used by the
executor have been reclaimed.
"""
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown(wait=True)
return False
class BrokenExecutor(RuntimeError):
"""
Raised when a executor has become non-functional after a severe failure.
"""

View file

@ -1,363 +0,0 @@
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Implements ProcessPoolExecutor.
The follow diagram and text describe the data-flow through the system:
|======================= In-process =====================|== Out-of-process ==|
+----------+ +----------+ +--------+ +-----------+ +---------+
| | => | Work Ids | => | | => | Call Q | => | |
| | +----------+ | | +-----------+ | |
| | | ... | | | | ... | | |
| | | 6 | | | | 5, call() | | |
| | | 7 | | | | ... | | |
| Process | | ... | | Local | +-----------+ | Process |
| Pool | +----------+ | Worker | | #1..n |
| Executor | | Thread | | |
| | +----------- + | | +-----------+ | |
| | <=> | Work Items | <=> | | <= | Result Q | <= | |
| | +------------+ | | +-----------+ | |
| | | 6: call() | | | | ... | | |
| | | future | | | | 4, result | | |
| | | ... | | | | 3, except | | |
+----------+ +------------+ +--------+ +-----------+ +---------+
Executor.submit() called:
- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict
- adds the id of the _WorkItem to the "Work Ids" queue
Local worker thread:
- reads work ids from the "Work Ids" queue and looks up the corresponding
WorkItem from the "Work Items" dict: if the work item has been cancelled then
it is simply removed from the dict, otherwise it is repackaged as a
_CallItem and put in the "Call Q". New _CallItems are put in the "Call Q"
until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because
calls placed in the "Call Q" can no longer be cancelled with Future.cancel().
- reads _ResultItems from "Result Q", updates the future stored in the
"Work Items" dict and deletes the dict entry
Process #1..n:
- reads _CallItems from "Call Q", executes the calls, and puts the resulting
_ResultItems in "Request Q"
"""
import atexit
from . import _base
import Queue as queue
import multiprocessing
import threading
import weakref
import sys
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
# Workers are created as daemon threads and processes. This is done to allow the
# interpreter to exit when there are still idle processes in a
# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
# allowing workers to die with the interpreter has two undesirable properties:
# - The workers would still be running during interpretor shutdown,
# meaning that they would fail in unpredictable ways.
# - The workers could be killed while evaluating a work item, which could
# be bad if the callable being evaluated has external side-effects e.g.
# writing to a file.
#
# To work around this problem, an exit handler is installed which tells the
# workers to exit when their work queues are empty and then waits until the
# threads/processes finish.
_threads_queues = weakref.WeakKeyDictionary()
_shutdown = False
def _python_exit():
global _shutdown
_shutdown = True
items = list(_threads_queues.items()) if _threads_queues else ()
for t, q in items:
q.put(None)
for t, q in items:
t.join(sys.maxint)
# Controls how many more calls than processes will be queued in the call queue.
# A smaller number will mean that processes spend more time idle waiting for
# work while a larger number will make Future.cancel() succeed less frequently
# (Futures in the call queue cannot be cancelled).
EXTRA_QUEUED_CALLS = 1
class _WorkItem(object):
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
class _ResultItem(object):
def __init__(self, work_id, exception=None, result=None):
self.work_id = work_id
self.exception = exception
self.result = result
class _CallItem(object):
def __init__(self, work_id, fn, args, kwargs):
self.work_id = work_id
self.fn = fn
self.args = args
self.kwargs = kwargs
def _process_worker(call_queue, result_queue):
"""Evaluates calls from call_queue and places the results in result_queue.
This worker is run in a separate process.
Args:
call_queue: A multiprocessing.Queue of _CallItems that will be read and
evaluated by the worker.
result_queue: A multiprocessing.Queue of _ResultItems that will written
to by the worker.
shutdown: A multiprocessing.Event that will be set as a signal to the
worker that it should exit when call_queue is empty.
"""
while True:
call_item = call_queue.get(block=True)
if call_item is None:
# Wake up queue management thread
result_queue.put(None)
return
try:
r = call_item.fn(*call_item.args, **call_item.kwargs)
except:
e = sys.exc_info()[1]
result_queue.put(_ResultItem(call_item.work_id,
exception=e))
else:
result_queue.put(_ResultItem(call_item.work_id,
result=r))
def _add_call_item_to_queue(pending_work_items,
work_ids,
call_queue):
"""Fills call_queue with _WorkItems from pending_work_items.
This function never blocks.
Args:
pending_work_items: A dict mapping work ids to _WorkItems e.g.
{5: <_WorkItem...>, 6: <_WorkItem...>, ...}
work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
are consumed and the corresponding _WorkItems from
pending_work_items are transformed into _CallItems and put in
call_queue.
call_queue: A multiprocessing.Queue that will be filled with _CallItems
derived from _WorkItems.
"""
while True:
if call_queue.full():
return
try:
work_id = work_ids.get(block=False)
except queue.Empty:
return
else:
work_item = pending_work_items[work_id]
if work_item.future.set_running_or_notify_cancel():
call_queue.put(_CallItem(work_id,
work_item.fn,
work_item.args,
work_item.kwargs),
block=True)
else:
del pending_work_items[work_id]
continue
def _queue_management_worker(executor_reference,
processes,
pending_work_items,
work_ids_queue,
call_queue,
result_queue):
"""Manages the communication between this process and the worker processes.
This function is run in a local thread.
Args:
executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
this thread. Used to determine if the ProcessPoolExecutor has been
garbage collected and that this function can exit.
process: A list of the multiprocessing.Process instances used as
workers.
pending_work_items: A dict mapping work ids to _WorkItems e.g.
{5: <_WorkItem...>, 6: <_WorkItem...>, ...}
work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
call_queue: A multiprocessing.Queue that will be filled with _CallItems
derived from _WorkItems for processing by the process workers.
result_queue: A multiprocessing.Queue of _ResultItems generated by the
process workers.
"""
nb_shutdown_processes = [0]
def shutdown_one_process():
"""Tell a worker to terminate, which will in turn wake us again"""
call_queue.put(None)
nb_shutdown_processes[0] += 1
while True:
_add_call_item_to_queue(pending_work_items,
work_ids_queue,
call_queue)
result_item = result_queue.get(block=True)
if result_item is not None:
work_item = pending_work_items[result_item.work_id]
del pending_work_items[result_item.work_id]
if result_item.exception:
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
# Delete references to object. See issue16284
del work_item
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
# - The interpreter is shutting down OR
# - The executor that owns this worker has been collected OR
# - The executor that owns this worker has been shutdown.
if _shutdown or executor is None or executor._shutdown_thread:
# Since no new work items can be added, it is safe to shutdown
# this thread if there are no pending work items.
if not pending_work_items:
while nb_shutdown_processes[0] < len(processes):
shutdown_one_process()
# If .join() is not called on the created processes then
# some multiprocessing.Queue methods may deadlock on Mac OS
# X.
for p in processes:
p.join()
call_queue.close()
return
del executor
_system_limits_checked = False
_system_limited = None
def _check_system_limits():
global _system_limits_checked, _system_limited
if _system_limits_checked:
if _system_limited:
raise NotImplementedError(_system_limited)
_system_limits_checked = True
try:
import os
nsems_max = os.sysconf("SC_SEM_NSEMS_MAX")
except (AttributeError, ValueError):
# sysconf not available or setting not available
return
if nsems_max == -1:
# indetermine limit, assume that limit is determined
# by available memory only
return
if nsems_max >= 256:
# minimum number of semaphores available
# according to POSIX
return
_system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
raise NotImplementedError(_system_limited)
class ProcessPoolExecutor(_base.Executor):
def __init__(self, max_workers=None):
"""Initializes a new ProcessPoolExecutor instance.
Args:
max_workers: The maximum number of processes that can be used to
execute the given calls. If None or not given then as many
worker processes will be created as the machine has processors.
"""
_check_system_limits()
if max_workers is None:
self._max_workers = multiprocessing.cpu_count()
else:
if max_workers <= 0:
raise ValueError("max_workers must be greater than 0")
self._max_workers = max_workers
# Make the call queue slightly larger than the number of processes to
# prevent the worker processes from idling. But don't make it too big
# because futures in the call queue cannot be cancelled.
self._call_queue = multiprocessing.Queue(self._max_workers +
EXTRA_QUEUED_CALLS)
self._result_queue = multiprocessing.Queue()
self._work_ids = queue.Queue()
self._queue_management_thread = None
self._processes = set()
# Shutdown is a two-step process.
self._shutdown_thread = False
self._shutdown_lock = threading.Lock()
self._queue_count = 0
self._pending_work_items = {}
def _start_queue_management_thread(self):
# When the executor gets lost, the weakref callback will wake up
# the queue management thread.
def weakref_cb(_, q=self._result_queue):
q.put(None)
if self._queue_management_thread is None:
self._queue_management_thread = threading.Thread(
target=_queue_management_worker,
args=(weakref.ref(self, weakref_cb),
self._processes,
self._pending_work_items,
self._work_ids,
self._call_queue,
self._result_queue))
self._queue_management_thread.daemon = True
self._queue_management_thread.start()
_threads_queues[self._queue_management_thread] = self._result_queue
def _adjust_process_count(self):
for _ in range(len(self._processes), self._max_workers):
p = multiprocessing.Process(
target=_process_worker,
args=(self._call_queue,
self._result_queue))
p.start()
self._processes.add(p)
def submit(self, fn, *args, **kwargs):
with self._shutdown_lock:
if self._shutdown_thread:
raise RuntimeError('cannot schedule new futures after shutdown')
f = _base.Future()
w = _WorkItem(f, fn, args, kwargs)
self._pending_work_items[self._queue_count] = w
self._work_ids.put(self._queue_count)
self._queue_count += 1
# Wake up queue management thread
self._result_queue.put(None)
self._start_queue_management_thread()
self._adjust_process_count()
return f
submit.__doc__ = _base.Executor.submit.__doc__
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown_thread = True
if self._queue_management_thread:
# Wake up queue management thread
self._result_queue.put(None)
if wait:
self._queue_management_thread.join(sys.maxint)
# To reduce the risk of openning too many files, remove references to
# objects that use file descriptors.
self._queue_management_thread = None
self._call_queue = None
self._result_queue = None
self._processes = None
shutdown.__doc__ = _base.Executor.shutdown.__doc__
atexit.register(_python_exit)

View file

@ -1,207 +0,0 @@
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Implements ThreadPoolExecutor."""
import atexit
from six import PY2
if PY2:
from . import _base
else:
from concurrent.futures import _base
import itertools
import Queue as queue
import threading
import weakref
import sys
try:
from multiprocessing import cpu_count
except ImportError:
# some platforms don't have multiprocessing
def cpu_count():
return None
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
# Workers are created as daemon threads. This is done to allow the interpreter
# to exit when there are still idle threads in a ThreadPoolExecutor's thread
# pool (i.e. shutdown() was not called). However, allowing workers to die with
# the interpreter has two undesirable properties:
# - The workers would still be running during interpretor shutdown,
# meaning that they would fail in unpredictable ways.
# - The workers could be killed while evaluating a work item, which could
# be bad if the callable being evaluated has external side-effects e.g.
# writing to a file.
#
# To work around this problem, an exit handler is installed which tells the
# workers to exit when their work queues are empty and then waits until the
# threads finish.
_threads_queues = weakref.WeakKeyDictionary()
_shutdown = False
def _python_exit():
global _shutdown
_shutdown = True
items = list(_threads_queues.items()) if _threads_queues else ()
for t, q in items:
q.put(None)
for t, q in items:
t.join(sys.maxint)
atexit.register(_python_exit)
class _WorkItem(object):
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
def run(self):
if not self.future.set_running_or_notify_cancel():
return
try:
result = self.fn(*self.args, **self.kwargs)
except:
e, tb = sys.exc_info()[1:]
self.future.set_exception_info(e, tb)
else:
self.future.set_result(result)
def _worker(executor_reference, work_queue, initializer, initargs):
if initializer is not None:
try:
initializer(*initargs)
except BaseException:
_base.LOGGER.critical('Exception in initializer:', exc_info=True)
executor = executor_reference()
if executor is not None:
executor._initializer_failed()
return
try:
while True:
work_item = work_queue.get(block=True)
if work_item is not None:
work_item.run()
# Delete references to object. See issue16284
del work_item
# attempt to increment idle count
executor = executor_reference()
if executor is not None:
executor._idle_semaphore.release()
del executor
continue
executor = executor_reference()
# Exit if:
# - The interpreter is shutting down OR
# - The executor that owns the worker has been collected OR
# - The executor that owns the worker has been shutdown.
if _shutdown or executor is None or executor._shutdown:
# Notice other workers
work_queue.put(None)
return
del executor
except:
_base.LOGGER.critical('Exception in worker', exc_info=True)
class BrokenThreadPool(_base.BrokenExecutor):
"""
Raised when a worker thread in a ThreadPoolExecutor failed initializing.
"""
class ThreadPoolExecutor(_base.Executor):
# Used to assign unique thread names when thread_name_prefix is not supplied.
_counter = itertools.count().next
def __init__(self, max_workers=None, thread_name_prefix='', initializer=None, initargs=()):
"""Initializes a new ThreadPoolExecutor instance.
Args:
max_workers: The maximum number of threads that can be used to
execute the given calls.
thread_name_prefix: An optional name prefix to give our threads.
"""
if max_workers is None:
# Use this number because ThreadPoolExecutor is often
# used to overlap I/O instead of CPU work.
max_workers = (cpu_count() or 1) * 5
if max_workers <= 0:
raise ValueError("max_workers must be greater than 0")
self._max_workers = max_workers
self._initializer = initializer
self._initargs = initargs
self._work_queue = queue.Queue()
self._idle_semaphore = threading.Semaphore(0)
self._threads = set()
self._broken = False
self._shutdown = False
self._shutdown_lock = threading.Lock()
self._thread_name_prefix = (thread_name_prefix or
("ThreadPoolExecutor-%d" % self._counter()))
def submit(self, fn, *args, **kwargs):
with self._shutdown_lock:
if self._broken:
raise BrokenThreadPool(self._broken)
if self._shutdown:
raise RuntimeError('cannot schedule new futures after shutdown')
f = _base.Future()
w = _WorkItem(f, fn, args, kwargs)
self._work_queue.put(w)
self._adjust_thread_count()
return f
submit.__doc__ = _base.Executor.submit.__doc__
def _adjust_thread_count(self):
# if idle threads are available, don't spin new threads
if self._idle_semaphore.acquire(False):
return
# When the executor gets lost, the weakref callback will wake up
# the worker threads.
def weakref_cb(_, q=self._work_queue):
q.put(None)
num_threads = len(self._threads)
if num_threads < self._max_workers:
thread_name = '%s_%d' % (self._thread_name_prefix or self,
num_threads)
t = threading.Thread(name=thread_name, target=_worker,
args=(weakref.ref(self, weakref_cb),
self._work_queue, self._initializer, self._initargs))
t.daemon = True
t.start()
self._threads.add(t)
_threads_queues[t] = self._work_queue
def _initializer_failed(self):
with self._shutdown_lock:
self._broken = ('A thread initializer failed, the thread pool '
'is not usable anymore')
# Drain work queue and mark pending futures failed
while True:
try:
work_item = self._work_queue.get_nowait()
except queue.Empty:
break
if work_item is not None:
work_item.future.set_exception(BrokenThreadPool(self._broken))
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown = True
self._work_queue.put(None)
if wait:
for t in self._threads:
t.join(sys.maxint)
shutdown.__doc__ = _base.Executor.shutdown.__doc__

View file

@ -1,55 +0,0 @@
# coding=utf-8
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
# noinspection PyUnresolvedReferences
import sys
# noinspection PyProtectedMember
from .futures.thread import _base, BrokenThreadPool, ThreadPoolExecutor
from .base import *
class SgWorkItem(GenericWorkItem):
def run(self):
if self.future.set_running_or_notify_cancel():
try:
self._set_thread_name()
result = self.fn(*self.args, **self.kwargs)
except (BaseException, Exception):
e, tb = sys.exc_info()[1:]
self.future.set_exception_info(e, tb)
else:
self.future.set_result(result)
class SgThreadPoolExecutor(ThreadPoolExecutor):
def submit(self, fn, *args, **kwargs):
with self._shutdown_lock:
if self._broken:
raise BrokenThreadPool(self._broken)
if self._shutdown:
raise RuntimeError('cannot schedule new futures after shutdown')
f = _base.Future()
w = SgWorkItem(f, fn, args, kwargs)
self._work_queue.put(w)
self._adjust_thread_count()
return f

View file

@ -35,8 +35,8 @@ from send2trash import send2trash
from encodingKludge import SYS_ENCODING
import requests
from _23 import decode_bytes, filter_list, html_unescape, list_range, \
ordered_dict, Popen, scandir, urlparse, urlsplit, urlunparse
from _23 import decode_bytes, html_unescape, list_range, \
Popen, scandir, urlparse, urlsplit, urlunparse
from six import integer_types, iteritems, iterkeys, itervalues, moves, PY2, string_types, text_type
import zipfile
@ -159,7 +159,7 @@ class ConnectionFailDict(object):
if None is not db:
with self.lock:
my_db = db.DBConnection('cache.db')
if my_db.hasTable('connection_fails'):
if my_db.has_table('connection_fails'):
domains = my_db.select('SELECT DISTINCT domain_url from connection_fails')
for domain in domains:
self.domain_list[domain['domain_url']] = ConnectionFailList(domain['domain_url'])
@ -515,7 +515,7 @@ class ConnectionFailList(object):
def _load_fail_values(self):
if None is not DATA_DIR:
my_db = db.DBConnection('cache.db')
if my_db.hasTable('connection_fails_count'):
if my_db.has_table('connection_fails_count'):
r = my_db.select('SELECT * FROM connection_fails_count WHERE domain_url = ?', [self.url])
if r:
self._failure_count = try_int(r[0]['failure_count'], 0)
@ -536,7 +536,7 @@ class ConnectionFailList(object):
def _save_fail_value(self, field, value):
my_db = db.DBConnection('cache.db')
if my_db.hasTable('connection_fails_count'):
if my_db.has_table('connection_fails_count'):
r = my_db.action('UPDATE connection_fails_count SET %s = ? WHERE domain_url = ?' % field,
[value, self.url])
if 0 == r.rowcount:
@ -568,7 +568,7 @@ class ConnectionFailList(object):
with self.lock:
try:
my_db = db.DBConnection('cache.db')
if my_db.hasTable('connection_fails'):
if my_db.has_table('connection_fails'):
results = my_db.select('SELECT * FROM connection_fails WHERE domain_url = ?', [self.url])
self._fails = []
for r in results:
@ -586,7 +586,7 @@ class ConnectionFailList(object):
with self.lock:
try:
my_db = db.DBConnection('cache.db')
if my_db.hasTable('connection_fails'):
if my_db.has_table('connection_fails'):
# noinspection PyCallByClass,PyTypeChecker
time_limit = _totimestamp(datetime.datetime.now() - datetime.timedelta(days=28))
my_db.action('DELETE FROM connection_fails WHERE fail_time < ?', [time_limit])
@ -683,8 +683,9 @@ def get_system_temp_dir():
def proxy_setting(setting, request_url, force=False):
"""
Returns a list of a) proxy_setting address value or a PAC is fetched and parsed if proxy_setting
starts with "PAC:" (case-insensitive) and b) True/False if "PAC" is found in the proxy_setting.
Returns a list of
a) proxy_setting address value or a PAC is fetched and parsed if proxy_setting starts with "PAC:" (case-insensitive)
b) True/False if "PAC" is found in the proxy_setting.
The PAC data parser is crude, javascript is not eval'd. The first "PROXY URL" found is extracted with a list
of "url_a_part.url_remaining", "url_b_part.url_remaining", "url_n_part.url_remaining" and so on.
@ -720,7 +721,7 @@ def proxy_setting(setting, request_url, force=False):
request_url_match = False
parsed_url = urlparse(request_url)
netloc = parsed_url.netloc
for pac_data in re.finditer(r"""(?:[^'"]*['"])([^.]+\.[^'"]*)(?:['"])""", resp, re.I):
for pac_data in re.finditer(r"""[^'"]*['"]([^.]+\.[^'"]*)['"]""", resp, re.I):
data = re.search(r"""PROXY\s+([^'"]+)""", pac_data.group(1), re.I)
if data:
if force:
@ -810,8 +811,8 @@ def get_url(url, # type: AnyStr
response_attr = ('text', 'content')[as_binary]
# selectively mute some errors
mute = filter_list(lambda x: kwargs.pop(x, False), [
'mute_connect_err', 'mute_read_timeout', 'mute_connect_timeout', 'mute_http_error'])
mute = list(filter(lambda x: kwargs.pop(x, False), [
'mute_connect_err', 'mute_read_timeout', 'mute_connect_timeout', 'mute_http_error']))
# reuse or instantiate request session
resp_sess = kwargs.pop('resp_sess', None)
@ -1570,8 +1571,6 @@ def int_to_time(d_int):
"""
convert integer from dt_to_int back to datetime.time
:param d_int: integer
:return: datetime.time
"""
if None is d_int:
return None
@ -1610,19 +1609,19 @@ def ast_eval(value, default=None):
"""Convert string typed value into actual Python type and value
:param value: string value to convert
:param default: value to return if cannot convert
:param default: value to return if it cannot convert
:return: converted type and value or default
"""
if not isinstance(value, string_types):
return default
if 'OrderedDict()' == value:
value = ordered_dict()
value = dict()
elif 'OrderedDict([(' == value[0:14]:
try:
list_of_tuples = ast.literal_eval(value[12:-1])
value = ordered_dict()
value = dict()
for cur_tuple in list_of_tuples:
value[cur_tuple[0]] = cur_tuple[1]
except (BaseException, Exception):
@ -1667,8 +1666,8 @@ def calc_age(birthday, deathday=None, date=None):
# type: (datetime.date, datetime.date, Optional[datetime.date]) -> Optional[int]
"""
returns age based on current date or given date
:param birthday: birth date
:param deathday: death date
:param birthday: birthdate
:param deathday: deathdate
:param date:
"""
if isinstance(birthday, datetime.date):
@ -1677,7 +1676,7 @@ def calc_age(birthday, deathday=None, date=None):
try:
b_d = birthday.replace(year=today.year)
# raised when birth date is February 29
# raised when birthdate is February 29
# and the current year is not a leap year
except ValueError:
b_d = birthday.replace(year=today.year, month=birthday.month + 1, day=1)

View file

@ -47,7 +47,7 @@ class Itasa(ServiceBase):
quality_dict = {Quality.SDTV : '',
Quality.SDDVD : 'dvdrip',
Quality.RAWHDTV : '1080i',
Quality.HDTV : '720p',
Quality.HDTV : '720p',
Quality.FULLHDTV : ('1080p','720p'),
Quality.HDWEBDL : 'web-dl',
Quality.FULLHDWEBDL : 'web-dl',
@ -55,20 +55,20 @@ class Itasa(ServiceBase):
Quality.FULLHDBLURAY : ('bdrip', 'bluray'),
Quality.UNKNOWN : 'unknown' #Any subtitle will be downloaded
}
def init(self):
super(Itasa, self).init()
login_pattern = '<input type="hidden" name="return" value="([^\n\r\t ]+?)" /><input type="hidden" name="([^\n\r\t ]+?)" value="([^\n\r\t ]+?)" />'
response = requests.get(self.server_url + 'index.php')
if response.status_code != 200:
raise ServiceError('Initiate failed')
match = re.search(login_pattern, response.content, re.IGNORECASE | re.DOTALL)
if not match:
raise ServiceError('Can not find unique id parameter on page')
login_parameter = {'username': 'sickbeard',
'passwd': 'subliminal',
'remember': 'yes',
@ -77,7 +77,7 @@ class Itasa(ServiceBase):
'option': 'com_user',
'task': 'login',
'silent': 'true',
'return': match.group(1),
'return': match.group(1),
match.group(2): match.group(3)
}
@ -85,7 +85,7 @@ class Itasa(ServiceBase):
r = self.session.post(self.server_url + 'index.php', data=login_parameter)
if not re.search('logouticon.png', r.content, re.IGNORECASE | re.DOTALL):
raise ServiceError('Itasa Login Failed')
@cachedmethod
def get_series_id(self, name):
"""Get the show page and cache every show found in it"""
@ -100,7 +100,7 @@ class Itasa(ServiceBase):
series_id = int(match.group(1))
self.cache_for(self.get_series_id, args=(series_name,), result=series_id)
return self.cached_value(self.get_series_id, args=(name,))
def get_episode_id(self, series, series_id, season, episode, quality):
"""Get the id subtitle for episode with the given quality"""
@ -115,14 +115,14 @@ class Itasa(ServiceBase):
if seasons.text.lower().strip() == 'stagione %s' % str(season):
season_link = seasons['href']
break
if not season_link:
logger.debug(u'Could not find season %s for series %s' % (series, str(season)))
return None
r = self.session.get(season_link)
soup = BeautifulSoup(r.content, self.required_features)
all_qualities = soup.find('div', attrs = {'id' : 'remositorycontainerlist'})
for qualities in all_qualities.find_all(href=re.compile('func=select')):
if qualities.text.lower().strip() in self.quality_dict[quality]:
@ -131,11 +131,11 @@ class Itasa(ServiceBase):
soup = BeautifulSoup(r.content, self.required_features)
break
#If we want SDTV we are just on the right page so quality link will be None
#If we want SDTV we are just on the right page so quality link will be None
if not quality == Quality.SDTV and not quality_link:
logger.debug(u'Could not find a subtitle with required quality for series %s season %s' % (series, str(season)))
return None
all_episodes = soup.find('div', attrs = {'id' : 'remositoryfilelisting'})
for episodes in all_episodes.find_all(href=re.compile('func=fileinfo')):
ep_string = "%(seasonnumber)dx%(episodenumber)02d" % {'seasonnumber': season, 'episodenumber': episode}
@ -144,12 +144,12 @@ class Itasa(ServiceBase):
if match:
episode_id = match.group(1)
return episode_id
return episode_id
def list_checked(self, video, languages):
return self.query(video.path or video.release, languages, get_keywords(video.guess), video.series, video.season, video.episode)
def query(self, filepath, languages, keywords, series, season, episode):
logger.debug(u'Getting subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages))
@ -160,8 +160,8 @@ class Itasa(ServiceBase):
except KeyError:
logger.debug(u'Could not find series id for %s' % series)
return []
episode_id = self.get_episode_id(series, series_id, season, episode, Quality.nameQuality(filepath))
episode_id = self.get_episode_id(series, series_id, season, episode, Quality.name_quality(filepath))
if not episode_id:
logger.debug(u'Could not find subtitle for series %s' % series)
return []
@ -173,11 +173,11 @@ class Itasa(ServiceBase):
sub_language = self.get_language('it')
path = get_subtitle_path(filepath, sub_language, self.config.multi)
subtitle = ResultSubtitle(path, sub_language, self.__class__.__name__.lower(), sub_link)
return [subtitle]
def download(self, subtitle):
logger.info(u'Downloading %s in %s' % (subtitle.link, subtitle.path))
try:
r = self.session.get(subtitle.link, headers={'Referer': self.server_url, 'User-Agent': self.user_agent})
@ -204,13 +204,13 @@ class Itasa(ServiceBase):
else:
zipsub.close()
raise DownloadFailedError('No subtitles found in zip file')
zipsub.close()
except Exception as e:
if os.path.exists(subtitle.path):
os.remove(subtitle.path)
raise DownloadFailedError(str(e))
logger.debug(u'Download finished')
Service = Itasa
Service = Itasa

View file

@ -8,7 +8,6 @@ import time
from exceptions_helper import ex
from six import integer_types, iteritems, iterkeys, string_types, text_type
from _23 import list_items, list_values
from lib.tvinfo_base.exceptions import *
from sg_helpers import calc_age, make_path
@ -53,7 +52,7 @@ tv_src_names = {
TVINFO_IMDB: 'imdb',
TVINFO_TRAKT: 'trakt',
TVINFO_TMDB: 'tmdb',
TVINFO_TVDB_SLUG : 'tvdb slug',
TVINFO_TVDB_SLUG: 'tvdb slug',
TVINFO_TRAKT_SLUG: 'trakt slug',
TVINFO_SLUG: 'generic slug',
@ -67,7 +66,7 @@ tv_src_names = {
log = logging.getLogger('TVInfo')
log.addHandler(logging.NullHandler())
TVInfoShowContainer = {} # type: Dict[ShowContainer]
TVInfoShowContainer = {} # type: Dict[str, ShowContainer]
class ShowContainer(dict):
@ -94,7 +93,7 @@ class ShowContainer(dict):
if acquired_lock:
try:
current_time = time.time()
for k, v in list_items(self):
for k, v in list(self.items()):
if self.max_age < current_time - v[1]:
lock_acquired = self[k].lock.acquire(False)
if lock_acquired:
@ -125,7 +124,7 @@ class TVInfoIDs(object):
trakt=None, # type: integer_types
rage=None, # type: integer_types
ids=None # type: Dict[int, integer_types]
): # type: (...) -> TVInfoIDs
):
ids = ids or {}
self.tvdb = tvdb or ids.get(TVINFO_TVDB)
self.tmdb = tmdb or ids.get(TVINFO_TMDB)
@ -156,7 +155,7 @@ class TVInfoIDs(object):
class TVInfoSocialIDs(object):
def __init__(self, twitter=None, instagram=None, facebook=None, wikipedia=None, ids=None):
# type: (str_int, str_int, str_int, str_int, Dict[int, str_int]) -> TVInfoSocialIDs
# type: (str_int, str_int, str_int, str_int, Dict[int, str_int]) -> None
ids = ids or {}
self.twitter = twitter or ids.get(TVINFO_TWITTER)
self.instagram = instagram or ids.get(TVINFO_INSTAGRAM)
@ -231,7 +230,7 @@ class TVInfoImage(object):
lang=None, height=None, width=None, aspect_ratio=None):
self.img_id = img_id # type: Optional[integer_types]
self.image_type = image_type # type: integer_types
self.sizes = sizes # type: Dict[TVInfoImageSize, AnyStr]
self.sizes = sizes # type: Dict[int, AnyStr]
self.type_str = type_str # type: AnyStr
self.main_image = main_image # type: bool
self.rating = rating # type: Optional[Union[float, integer_types]]
@ -243,7 +242,7 @@ class TVInfoImage(object):
def __str__(self):
return '<TVInfoImage %s [%s]>' % (TVInfoImageType.reverse_str.get(self.image_type, 'unknown'),
', '.join(TVInfoImageSize.reverse_str.get(s, 'unkown') for s in self.sizes))
', '.join(TVInfoImageSize.reverse_str.get(s, 'unknown') for s in self.sizes))
__repr__ = __str__
@ -409,7 +408,7 @@ class TVInfoShow(dict):
match, and so on.
"""
results = []
for cur_season in list_values(self):
for cur_season in self.values():
searchresult = cur_season.search(term=term, key=key)
if 0 != len(searchresult):
results.extend(searchresult)
@ -487,7 +486,7 @@ class TVInfoSeason(dict):
instances.
"""
results = []
for ep in list_values(self):
for ep in self.values():
searchresult = ep.search(term=term, key=key)
if None is not searchresult:
results.append(searchresult)
@ -679,7 +678,7 @@ class PersonBase(dict):
ids=None, # type: Dict
thumb_url=None, # type: AnyStr
**kwargs # type: Dict
): # type: (...) -> PersonBase
):
super(PersonBase, self).__init__(**kwargs)
self.id = p_id # type: Optional[integer_types]
self.name = name # type: Optional[AnyStr]
@ -769,7 +768,7 @@ class TVInfoPerson(PersonBase):
real_name=None, # type: AnyStr
akas=None, # type: Set[AnyStr]
**kwargs # type: Dict
): # type: (...) -> TVInfoPerson
):
super(TVInfoPerson, self).__init__(
p_id=p_id, name=name, image=image, thumb_url=thumb_url, bio=bio, gender=gender,
birthdate=birthdate, deathdate=deathdate, country=country, images=images,
@ -795,7 +794,7 @@ class TVInfoPerson(PersonBase):
class TVInfoCharacter(PersonBase):
def __init__(self, person=None, voice=None, plays_self=None, regular=None, show=None, start_year=None,
end_year=None, **kwargs):
# type: (List[TVInfoPerson], bool, bool, bool, TVInfoShow, int, int, Dict) -> TVInfoCharacter
# type: (List[TVInfoPerson], bool, bool, bool, TVInfoShow, int, int, Dict) -> None
super(TVInfoCharacter, self).__init__(**kwargs)
self.person = person # type: List[TVInfoPerson]
self.voice = voice # type: Optional[bool]

View file

@ -43,7 +43,8 @@ versions = [((3, 7, 1), (3, 8, 16)),
((3, 9, 0), (3, 9, 2)), ((3, 9, 4), (3, 9, 16)),
((3, 10, 0), (3, 11, 2))] # inclusive version ranges
if not any(list(map(lambda v: v[0] <= sys.version_info[:3] <= v[1], versions))) and not int(os.environ.get('PYT', 0)):
print('Python %s.%s.%s detected.' % sys.version_info[:3])
major, minor, micro = sys.version_info[:3]
print('Python %s.%s.%s detected.' % (major, minor, micro))
print('Sorry, SickGear requires a Python version %s' % ', '.join(map(
lambda r: '%s - %s' % tuple(map(lambda v: str(v).replace(',', '.')[1:-1], r)), versions)))
sys.exit(1)
@ -225,7 +226,7 @@ class SickGear(object):
if o in ('-h', '--help'):
sys.exit(self.help_message())
# For now we'll just silence the logging
# For now, we'll just silence the logging
if o in ('-q', '--quiet'):
self.console_logging = False
@ -427,7 +428,7 @@ class SickGear(object):
('sickbeard.db', sickgear.mainDB.MIN_DB_VERSION, sickgear.mainDB.MAX_DB_VERSION,
sickgear.mainDB.TEST_BASE_VERSION, 'MainDb')
]:
cur_db_version = db.DBConnection(d).checkDBVersion()
cur_db_version = db.DBConnection(d).check_db_version()
# handling of standalone TEST db versions
load_msg = 'Downgrading %s to production version' % d
@ -436,7 +437,7 @@ class SickGear(object):
print('Your [%s] database version (%s) is a test db version and doesn\'t match SickGear required '
'version (%s), downgrading to production db' % (d, cur_db_version, max_v))
self.execute_rollback(mo, max_v, load_msg)
cur_db_version = db.DBConnection(d).checkDBVersion()
cur_db_version = db.DBConnection(d).check_db_version()
if 100000 <= cur_db_version:
print(u'Rollback to production failed.')
sys.exit(u'If you have used other forks, your database may be unusable due to their changes')
@ -445,13 +446,13 @@ class SickGear(object):
print(u'Rollback to production of [%s] successful.' % d)
sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Finished')
# handling of production version higher then current base of test db
# handling of production version higher than current base of test db
if isinstance(base_v, integer_types) and max_v >= 100000 > cur_db_version > base_v:
sickgear.classes.loading_msg.set_msg_progress(load_msg, 'Rollback')
print('Your [%s] database version (%s) is a db version and doesn\'t match SickGear required '
'version (%s), downgrading to production base db' % (d, cur_db_version, max_v))
self.execute_rollback(mo, base_v, load_msg)
cur_db_version = db.DBConnection(d).checkDBVersion()
cur_db_version = db.DBConnection(d).check_db_version()
if 100000 <= cur_db_version:
print(u'Rollback to production base failed.')
sys.exit(u'If you have used other forks, your database may be unusable due to their changes')
@ -473,7 +474,7 @@ class SickGear(object):
u' what this version of SickGear supports. Trying to rollback now. Please wait...' %
(d, cur_db_version))
self.execute_rollback(mo, max_v, load_msg)
if db.DBConnection(d).checkDBVersion() > max_v:
if db.DBConnection(d).check_db_version() > max_v:
print(u'Rollback failed.')
sys.exit(u'If you have used other forks, your database may be unusable due to their changes')
print(u'Rollback of [%s] successful.' % d)
@ -553,7 +554,7 @@ class SickGear(object):
# Build internal name cache
sickgear.classes.loading_msg.message = 'Build name cache'
name_cache.buildNameCache()
name_cache.build_name_cache()
# load all ids from xem
sickgear.classes.loading_msg.message = 'Loading xem data'
@ -816,7 +817,7 @@ class SickGear(object):
@staticmethod
def exit(code):
# noinspection PyProtectedMember
# noinspection PyProtectedMember,PyUnresolvedReferences
os._exit(code)

View file

@ -36,7 +36,7 @@ import zlib
from . import classes, db, helpers, image_cache, indexermapper, logger, metadata, naming, people_queue, providers, \
scene_exceptions, scene_numbering, scheduler, search_backlog, search_propers, search_queue, search_recent, \
show_queue, show_updater, subtitles, trakt_helpers, traktChecker, version_checker, watchedstate_queue
show_queue, show_updater, subtitles, trakt_helpers, version_checker, watchedstate_queue
from . import auto_post_processer, properFinder # must come after the above imports
from .common import SD, SKIPPED, USER_AGENT
from .config import check_section, check_setting_int, check_setting_str, ConfigMigrator, minimax
@ -55,8 +55,8 @@ from browser_ua import get_ua
from configobj import ConfigObj
from api_trakt import TraktAPI
from _23 import b64encodestring, decode_bytes, filter_iter, list_items, map_list, ordered_dict, scandir
from six import iteritems, PY2, string_types
from _23 import b64encodestring, decode_bytes, scandir
from six import iteritems, string_types
import sg_helpers
# noinspection PyUnreachableCode
@ -119,9 +119,9 @@ REMOVE_FILENAME_CHARS = None
IMPORT_DEFAULT_CHECKED_SHOWS = 0
# /non ui settings
providerList = []
newznabProviderList = []
torrentRssProviderList = []
provider_list = []
newznab_providers = []
torrent_rss_providers = []
metadata_provider_dict = {}
MODULE_UPDATE_STRING = None
@ -655,7 +655,7 @@ def initialize(console_logging=True):
def init_stage_1(console_logging):
# Misc
global showList, showDict, switched_shows, providerList, newznabProviderList, torrentRssProviderList, \
global showList, showDict, switched_shows, provider_list, newznab_providers, torrent_rss_providers, \
WEB_HOST, WEB_ROOT, ACTUAL_CACHE_DIR, CACHE_DIR, ZONEINFO_DIR, ADD_SHOWS_WO_DIR, ADD_SHOWS_METALANG, \
CREATE_MISSING_SHOW_DIRS, SHOW_DIRS_WITH_DOTS, \
RECENTSEARCH_STARTUP, NAMING_FORCE_FOLDERS, SOCKET_TIMEOUT, DEBUG, TVINFO_DEFAULT, \
@ -666,7 +666,7 @@ def init_stage_1(console_logging):
# Add Show Defaults
global QUALITY_DEFAULT, WANTED_BEGIN_DEFAULT, WANTED_LATEST_DEFAULT, SHOW_TAG_DEFAULT, PAUSE_DEFAULT, \
STATUS_DEFAULT, SCENE_DEFAULT, SUBTITLES_DEFAULT, FLATTEN_FOLDERS_DEFAULT, ANIME_DEFAULT
# Post processing
# Post-processing
global KEEP_PROCESSED_DIR, PROCESS_LAST_DIR, PROCESS_LAST_METHOD, PROCESS_LAST_CLEANUP
# Views
global GUI_NAME, HOME_LAYOUT, FOOTER_TIME_LAYOUT, POSTER_SORTBY, POSTER_SORTDIR, DISPLAY_SHOW_SPECIALS, \
@ -1353,10 +1353,10 @@ def init_stage_1(console_logging):
EPISODE_VIEW_MISSED_RANGE = check_setting_int(CFG, 'GUI', 'episode_view_missed_range', 7)
HISTORY_LAYOUT = check_setting_str(CFG, 'GUI', 'history_layout', 'detailed')
BROWSELIST_HIDDEN = map_list(
BROWSELIST_HIDDEN = list(map(
lambda y: TVidProdid.glue in y and y or '%s%s%s' % (
(TVINFO_TVDB, TVINFO_IMDB)[bool(helpers.parse_imdb_id(y))], TVidProdid.glue, y),
[x.strip() for x in check_setting_str(CFG, 'GUI', 'browselist_hidden', '').split('|~|') if x.strip()])
[x.strip() for x in check_setting_str(CFG, 'GUI', 'browselist_hidden', '').split('|~|') if x.strip()]))
BROWSELIST_MRU = sg_helpers.ast_eval(check_setting_str(CFG, 'GUI', 'browselist_prefs', None), {})
BACKUP_DB_PATH = check_setting_str(CFG, 'Backup', 'backup_db_path', '')
@ -1370,16 +1370,16 @@ def init_stage_1(console_logging):
sg_helpers.DOMAIN_FAILURES.load_from_db()
# initialize NZB and TORRENT providers
providerList = providers.makeProviderList()
provider_list = providers.provider_modules()
NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '')
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
newznab_providers = providers.newznab_source_list(NEWZNAB_DATA)
torrentrss_data = check_setting_str(CFG, 'TorrentRss', 'torrentrss_data', '')
torrentRssProviderList = providers.getTorrentRssProviderList(torrentrss_data)
torrent_rss_providers = providers.torrent_rss_source_list(torrentrss_data)
# dynamically load provider settings
for torrent_prov in [curProvider for curProvider in providers.sortedProviderList()
for torrent_prov in [curProvider for curProvider in providers.sorted_sources()
if GenericProvider.TORRENT == curProvider.providerType]:
prov_id = torrent_prov.get_id()
prov_id_uc = torrent_prov.get_id().upper()
@ -1424,7 +1424,7 @@ def init_stage_1(console_logging):
elif isinstance(default, int):
setattr(torrent_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default))
for nzb_prov in [curProvider for curProvider in providers.sortedProviderList()
for nzb_prov in [curProvider for curProvider in providers.sorted_sources()
if GenericProvider.NZB == curProvider.providerType]:
prov_id = nzb_prov.get_id()
prov_id_uc = nzb_prov.get_id().upper()
@ -1450,10 +1450,10 @@ def init_stage_1(console_logging):
setattr(nzb_prov, attr, check_setting_str(CFG, prov_id_uc, attr_check, default))
elif isinstance(default, int):
setattr(nzb_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default))
for cur_provider in filter_iter(lambda p: abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in (
for cur_provider in filter(lambda p: abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in (
1449593765, 1597250020, 1524942228, 160758496, 2925374331
) or (p.url and abs(zlib.crc32(decode_bytes(re.sub(r'[./]', '', p.url[-10:])))) + 40000400 in (
2417143804,)), providers.sortedProviderList()):
2417143804,)), providers.sorted_sources()):
header = {'User-Agent': get_ua()}
if hasattr(cur_provider, 'nn'):
cur_provider.nn = False
@ -1505,24 +1505,6 @@ def init_stage_1(console_logging):
pass
logger.sb_log_instance.init_logging(console_logging=console_logging)
if PY2:
try:
import _scandir
except ImportError:
_scandir = None
try:
import ctypes
except ImportError:
ctypes = None
if None is not _scandir and None is not ctypes and not getattr(_scandir, 'DirEntry', None):
MODULE_UPDATE_STRING = \
'Your scandir binary module is outdated, using the slow but newer Python module.' \
'<br>Upgrade the binary at a command prompt with' \
' # <span class="boldest">python -m pip install -U scandir</span>' \
'<br>Important: You <span class="boldest">must</span> Shutdown SickGear before upgrading'
showList = []
showDict = {}
@ -1557,19 +1539,19 @@ def init_stage_2():
# initialize main database
my_db = db.DBConnection()
db.MigrationCode(my_db)
db.migration_code(my_db)
# initialize the cache database
my_db = db.DBConnection('cache.db')
db.upgradeDatabase(my_db, cache_db.InitialSchema)
db.upgrade_database(my_db, cache_db.InitialSchema)
# initialize the failed downloads database
my_db = db.DBConnection('failed.db')
db.upgradeDatabase(my_db, failed_db.InitialSchema)
db.upgrade_database(my_db, failed_db.InitialSchema)
# fix up any db problems
my_db = db.DBConnection()
db.sanityCheckDatabase(my_db, mainDB.MainSanityCheck)
db.sanity_check_db(my_db, mainDB.MainSanityCheck)
# initialize metadata_providers
metadata_provider_dict = metadata.get_metadata_generator_dict()
@ -1592,40 +1574,40 @@ def init_stage_2():
update_now = datetime.timedelta(minutes=0)
update_software_scheduler = scheduler.Scheduler(
version_checker.SoftwareUpdater(),
cycleTime=datetime.timedelta(hours=UPDATE_INTERVAL),
threadName='SOFTWAREUPDATER',
cycle_time=datetime.timedelta(hours=UPDATE_INTERVAL),
thread_name='SOFTWAREUPDATER',
silent=False)
update_packages_scheduler = scheduler.Scheduler(
version_checker.PackagesUpdater(),
cycleTime=datetime.timedelta(hours=UPDATE_PACKAGES_INTERVAL),
cycle_time=datetime.timedelta(hours=UPDATE_PACKAGES_INTERVAL),
# run_delay=datetime.timedelta(minutes=2),
threadName='PACKAGESUPDATER',
thread_name='PACKAGESUPDATER',
silent=False)
show_queue_scheduler = scheduler.Scheduler(
show_queue.ShowQueue(),
cycleTime=datetime.timedelta(seconds=3),
threadName='SHOWQUEUE')
cycle_time=datetime.timedelta(seconds=3),
thread_name='SHOWQUEUE')
show_update_scheduler = scheduler.Scheduler(
show_updater.ShowUpdater(),
cycleTime=datetime.timedelta(hours=1),
cycle_time=datetime.timedelta(hours=1),
start_time=datetime.time(hour=SHOW_UPDATE_HOUR),
threadName='SHOWUPDATER',
thread_name='SHOWUPDATER',
prevent_cycle_run=show_queue_scheduler.action.is_show_update_running) # 3AM
people_queue_scheduler = scheduler.Scheduler(
people_queue.PeopleQueue(),
cycleTime=datetime.timedelta(seconds=3),
threadName='PEOPLEQUEUE'
cycle_time=datetime.timedelta(seconds=3),
thread_name='PEOPLEQUEUE'
)
# searchers
search_queue_scheduler = scheduler.Scheduler(
search_queue.SearchQueue(),
cycleTime=datetime.timedelta(seconds=3),
threadName='SEARCHQUEUE')
cycle_time=datetime.timedelta(seconds=3),
thread_name='SEARCHQUEUE')
init_search_delay = int(os.environ.get('INIT_SEARCH_DELAY', 0))
@ -1633,13 +1615,13 @@ def init_stage_2():
update_interval = datetime.timedelta(minutes=(RECENTSEARCH_INTERVAL, 1)[4499 == RECENTSEARCH_INTERVAL])
recent_search_scheduler = scheduler.Scheduler(
search_recent.RecentSearcher(),
cycleTime=update_interval,
cycle_time=update_interval,
run_delay=update_now if RECENTSEARCH_STARTUP else datetime.timedelta(minutes=init_search_delay or 5),
threadName='RECENTSEARCHER',
thread_name='RECENTSEARCHER',
prevent_cycle_run=search_queue_scheduler.action.is_recentsearch_in_progress)
if [x for x in providers.sortedProviderList() if x.is_active() and
getattr(x, 'enable_backlog', None) and GenericProvider.NZB == x.providerType]:
if [x for x in providers.sorted_sources()
if x.is_active() and getattr(x, 'enable_backlog', None) and GenericProvider.NZB == x.providerType]:
nextbacklogpossible = datetime.datetime.fromtimestamp(
search_backlog.BacklogSearcher().last_runtime) + datetime.timedelta(hours=23)
now = datetime.datetime.now()
@ -1655,9 +1637,9 @@ def init_stage_2():
backlogdelay = 10
backlog_search_scheduler = search_backlog.BacklogSearchScheduler(
search_backlog.BacklogSearcher(),
cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()),
cycle_time=datetime.timedelta(minutes=get_backlog_cycle_time()),
run_delay=datetime.timedelta(minutes=init_search_delay or backlogdelay),
threadName='BACKLOG',
thread_name='BACKLOG',
prevent_cycle_run=search_queue_scheduler.action.is_standard_backlog_in_progress)
propers_searcher = search_propers.ProperSearcher()
@ -1670,26 +1652,22 @@ def init_stage_2():
proper_finder_scheduler = scheduler.Scheduler(
propers_searcher,
cycleTime=datetime.timedelta(days=1),
cycle_time=datetime.timedelta(days=1),
run_delay=datetime.timedelta(minutes=init_search_delay or properdelay),
threadName='FINDPROPERS',
thread_name='FINDPROPERS',
prevent_cycle_run=search_queue_scheduler.action.is_propersearch_in_progress)
# processors
media_process_scheduler = scheduler.Scheduler(
auto_post_processer.PostProcesser(),
cycleTime=datetime.timedelta(minutes=MEDIAPROCESS_INTERVAL),
threadName='POSTPROCESSER',
cycle_time=datetime.timedelta(minutes=MEDIAPROCESS_INTERVAL),
thread_name='POSTPROCESSER',
silent=not PROCESS_AUTOMATICALLY)
"""
trakt_checker_scheduler = scheduler.Scheduler(
traktChecker.TraktChecker(), cycleTime=datetime.timedelta(hours=1),
threadName='TRAKTCHECKER', silent=not USE_TRAKT)
"""
subtitles_finder_scheduler = scheduler.Scheduler(
subtitles.SubtitlesFinder(),
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_INTERVAL),
threadName='FINDSUBTITLES',
cycle_time=datetime.timedelta(hours=SUBTITLES_FINDER_INTERVAL),
thread_name='FINDSUBTITLES',
silent=not USE_SUBTITLES)
background_mapping_task = threading.Thread(name='MAPPINGSUPDATER', target=indexermapper.load_mapped_ids,
@ -1697,20 +1675,20 @@ def init_stage_2():
watched_state_queue_scheduler = scheduler.Scheduler(
watchedstate_queue.WatchedStateQueue(),
cycleTime=datetime.timedelta(seconds=3),
threadName='WATCHEDSTATEQUEUE')
cycle_time=datetime.timedelta(seconds=3),
thread_name='WATCHEDSTATEQUEUE')
emby_watched_state_scheduler = scheduler.Scheduler(
EmbyWatchedStateUpdater(),
cycleTime=datetime.timedelta(minutes=EMBY_WATCHEDSTATE_INTERVAL),
cycle_time=datetime.timedelta(minutes=EMBY_WATCHEDSTATE_INTERVAL),
run_delay=datetime.timedelta(minutes=5),
threadName='EMBYWATCHEDSTATE')
thread_name='EMBYWATCHEDSTATE')
plex_watched_state_scheduler = scheduler.Scheduler(
PlexWatchedStateUpdater(),
cycleTime=datetime.timedelta(minutes=PLEX_WATCHEDSTATE_INTERVAL),
cycle_time=datetime.timedelta(minutes=PLEX_WATCHEDSTATE_INTERVAL),
run_delay=datetime.timedelta(minutes=5),
threadName='PLEXWATCHEDSTATE')
thread_name='PLEXWATCHEDSTATE')
MEMCACHE['history_tab_limit'] = 11
MEMCACHE['history_tab'] = History.menu_tab(MEMCACHE['history_tab_limit'])
@ -1750,7 +1728,7 @@ def start():
and True is not TVInfoAPI(i).config.get('people_only')]
background_mapping_task.start()
for p in providers.sortedProviderList():
for p in providers.sorted_sources():
if p.is_active() and getattr(p, 'ping_iv', None):
# noinspection PyProtectedMember
provider_ping_thread_pool[p.get_id()] = threading.Thread(
@ -1863,9 +1841,9 @@ def save_config():
new_config = ConfigObj()
new_config.filename = CONFIG_FILE
# For passwords you must include the word `password` in the item_name and
# For passwords, you must include the word `password` in the item_name and
# add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
new_config['General'] = ordered_dict()
new_config['General'] = dict()
s_z = check_setting_int(CFG, 'General', 'stack_size', 0)
if s_z:
new_config['General']['stack_size'] = s_z
@ -1927,8 +1905,9 @@ def save_config():
new_config['General']['flatten_folders_default'] = int(FLATTEN_FOLDERS_DEFAULT)
new_config['General']['anime_default'] = int(ANIME_DEFAULT)
new_config['General']['provider_order'] = ' '.join(PROVIDER_ORDER)
new_config['General']['provider_homes'] = '%s' % dict([(pid, v) for pid, v in list_items(PROVIDER_HOMES) if pid in [
p.get_id() for p in [x for x in providers.sortedProviderList() if GenericProvider.TORRENT == x.providerType]]])
new_config['General']['provider_homes'] = '%s' % dict([(pid, v) for pid, v in list(PROVIDER_HOMES.items())
if pid in [
p.get_id() for p in [x for x in providers.sorted_sources() if GenericProvider.TORRENT == x.providerType]]])
new_config['General']['update_notify'] = int(UPDATE_NOTIFY)
new_config['General']['update_auto'] = int(UPDATE_AUTO)
new_config['General']['update_interval'] = int(UPDATE_INTERVAL)
@ -2014,7 +1993,7 @@ def save_config():
new_config['Backup']['backup_db_max_count'] = BACKUP_DB_MAX_COUNT
default_not_zero = ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog', 'use_after_get_data')
for src in filter_iter(lambda px: GenericProvider.TORRENT == px.providerType, providers.sortedProviderList()):
for src in filter(lambda px: GenericProvider.TORRENT == px.providerType, providers.sorted_sources()):
src_id = src.get_id()
src_id_uc = src_id.upper()
new_config[src_id_uc] = {}
@ -2052,19 +2031,19 @@ def save_config():
del new_config[src_id_uc]
default_not_zero = ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog')
for src in filter_iter(lambda px: GenericProvider.NZB == px.providerType, providers.sortedProviderList()):
for src in filter(lambda px: GenericProvider.NZB == px.providerType, providers.sorted_sources()):
src_id = src.get_id()
src_id_uc = src.get_id().upper()
new_config[src_id_uc] = {}
if int(src.enabled):
new_config[src_id_uc][src_id] = int(src.enabled)
for attr in filter_iter(lambda _a: None is not getattr(src, _a, None),
('api_key', 'digest', 'username', 'search_mode')):
for attr in filter(lambda _a: None is not getattr(src, _a, None),
('api_key', 'digest', 'username', 'search_mode')):
if 'search_mode' != attr or 'eponly' != getattr(src, attr):
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = getattr(src, attr)
for attr in filter_iter(lambda _a: None is not getattr(src, _a, None), (
for attr in filter(lambda _a: None is not getattr(src, _a, None), (
'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active',
'scene_rej_nuked', 'scene_nuked_active',
@ -2280,7 +2259,7 @@ def save_config():
cfg_lc = cfg.lower()
cfg_keys += [cfg]
new_config[cfg] = {}
for (k, v) in filter_iter(lambda arg: any([arg[1]]) or (
for (k, v) in filter(lambda arg: any([arg[1]]) or (
# allow saving where item value default is non-zero but 0 is a required setting value
cfg_lc in ('kodi', 'xbmc', 'synoindex', 'nzbget', 'torrent', 'telegram')
and arg[0] in ('always_on', 'priority', 'send_image'))
@ -2320,13 +2299,13 @@ def save_config():
new_config[notifier]['%s_notify_onsubtitledownload' % notifier.lower()] = int(onsubtitledownload)
# remove empty stanzas
for k in filter_iter(lambda c: not new_config[c], cfg_keys):
for k in filter(lambda c: not new_config[c], cfg_keys):
del new_config[k]
new_config['Newznab'] = {}
new_config['Newznab']['newznab_data'] = NEWZNAB_DATA
torrent_rss = '!!!'.join([x.config_str() for x in torrentRssProviderList])
torrent_rss = '!!!'.join([x.config_str() for x in torrent_rss_providers])
if torrent_rss:
new_config['TorrentRss'] = {}
new_config['TorrentRss']['torrentrss_data'] = torrent_rss

View file

@ -1,828 +0,0 @@
# coding=utf-8
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
#
# This file contains deprecated routes and parameters
# Eventually, this file and its use will be removed from SG core.
#
import threading
import traceback
import sickgear
from . import logger
from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TVDB
from .tv import TVidProdid
from requests.compat import urljoin
from tornado import gen
from tornado.escape import utf8
from tornado.web import RequestHandler
from _23 import decode_str, filter_iter
from six import iteritems
from sg_futures import SgThreadPoolExecutor
try:
from multiprocessing import cpu_count
except ImportError:
# some platforms don't have multiprocessing
def cpu_count():
return None
""" deprecated_item, remove in 2020 = 8 items """
""" prevent issues with requests using legacy params = 3 items"""
# TODO: deprecated items, find the above comments and remove in 2020
class LegacyBase(RequestHandler):
# todo: move to RouteHandler after removing _legacy module
executor = SgThreadPoolExecutor(thread_name_prefix='WEBSERVER', max_workers=min(32, (cpu_count() or 1) + 4))
# todo: move to RouteHandler after removing _legacy module
def redirect(self, url, permanent=False, status=None):
"""Send a redirect to the given (optionally relative) URL.
----->>>>> NOTE: Removed self.finish <<<<<-----
If the ``status`` argument is specified, that value is used as the
HTTP status code; otherwise either 301 (permanent) or 302
(temporary) is chosen based on the ``permanent`` argument.
The default is 302 (temporary).
"""
if not url.startswith(sickgear.WEB_ROOT):
url = sickgear.WEB_ROOT + url
# noinspection PyUnresolvedReferences
if self._headers_written:
raise Exception('Cannot redirect after headers have been written')
if status is None:
status = 301 if permanent else 302
else:
assert isinstance(status, int)
assert 300 <= status <= 399
self.set_status(status)
self.set_header('Location', urljoin(utf8(self.request.uri),
utf8(url)))
# todo: move to RouteHandler after removing _legacy module
def write_error(self, status_code, **kwargs):
body = ''
try:
if self.request.body:
body = '\nRequest body: %s' % decode_str(self.request.body)
except (BaseException, Exception):
pass
logger.log('Sent %s error response to a `%s` request for `%s` with headers:\n%s%s' %
(status_code, self.request.method, self.request.path, self.request.headers, body), logger.WARNING)
# suppress traceback by removing 'exc_info' kwarg
if 'exc_info' in kwargs:
logger.log('Gracefully handled exception text:\n%s' % traceback.format_exception(*kwargs["exc_info"]),
logger.DEBUG)
del kwargs['exc_info']
return super(LegacyBase, self).write_error(status_code, **kwargs)
def data_received(self, *args):
pass
class LegacyBaseHandler(LegacyBase):
def redirect_args(self, new_url, exclude=(None,), **kwargs):
args = '&'.join(['%s=%s' % (k, v) for (k, v) in
filter_iter(lambda arg: arg[1] not in exclude, iteritems(kwargs))])
self.redirect('%s%s' % (new_url, ('', '?' + args)[bool(args)]), permanent=True)
""" deprecated from BaseHandler ------------------------------------------------------------------------------------
"""
def getImage(self, *args, **kwargs):
return self.get_image(*args, **kwargs)
def get_image(self, *args, **kwargs):
# abstract method
pass
def showPoster(self, show=None, **kwargs):
# test: /showPoster/?show=73141&which=poster_thumb
return self.show_poster(TVidProdid(show)(), **kwargs)
def show_poster(self, *args, **kwargs):
# abstract method
pass
""" deprecated from MainHandler ------------------------------------------------------------------------------------
"""
def episodeView(self, **kwargs):
self.redirect_args('/daily-schedule', exclude=(None, False), **kwargs)
def setHomeLayout(self, *args, **kwargs):
return self.set_layout_view_shows(*args, **kwargs)
def set_layout_view_shows(self, *args, **kwargs):
# abstract method
pass
def setPosterSortBy(self, *args):
return self.set_poster_sortby(*args)
@staticmethod
def set_poster_sortby(*args):
# abstract method
pass
def setPosterSortDir(self, *args):
return self.set_poster_sortdir(*args)
@staticmethod
def set_poster_sortdir(*args):
# abstract method
pass
def setEpisodeViewLayout(self, *args):
return self.set_layout_daily_schedule(*args)
def set_layout_daily_schedule(self, *args):
# abstract method
pass
def toggleEpisodeViewDisplayPaused(self):
return self.toggle_display_paused_daily_schedule()
# completely deprecated for the three way state set_ function
# def toggle_display_paused_daily_schedule(self):
# # abstract method
# pass
def toggle_display_paused_daily_schedule(self):
return self.set_display_paused_daily_schedule(not sickgear.EPISODE_VIEW_DISPLAY_PAUSED)
def set_display_paused_daily_schedule(self, *args, **kwargs):
# abstract method
pass
def setEpisodeViewCards(self, *args, **kwargs):
return self.set_cards_daily_schedule(*args, **kwargs)
def set_cards_daily_schedule(self, *args, **kwargs):
# abstract method
pass
def setEpisodeViewSort(self, *args, **kwargs):
return self.set_sort_daily_schedule(*args, **kwargs)
def set_sort_daily_schedule(self, *args, **kwargs):
# abstract method
pass
def getFooterTime(self, *args, **kwargs):
return self.get_footer_time(*args, **kwargs)
@staticmethod
def get_footer_time(*args, **kwargs):
# abstract method
pass
def toggleDisplayShowSpecials(self, **kwargs):
return self.toggle_specials_view_show(TVidProdid(kwargs.get('show'))())
def toggle_specials_view_show(self, *args):
# abstract method
pass
def setHistoryLayout(self, *args):
return self.set_layout_history(*args)
def set_layout_history(self, *args):
# abstract method
pass
""" deprecated from Home -------------------------------------------------------------------------------------------
"""
def showlistView(self):
self.redirect('/view-shows', permanent=True)
def viewchanges(self):
self.redirect('/home/view-changes', permanent=True)
def displayShow(self, **kwargs):
self.migrate_redir('view-show', **kwargs)
def editShow(self, **kwargs):
kwargs['any_qualities'] = kwargs.pop('anyQualities', None)
kwargs['best_qualities'] = kwargs.pop('bestQualities', None)
kwargs['exceptions_list'] = kwargs.pop('exceptions_list', None)
kwargs['direct_call'] = kwargs.pop('directCall', False)
kwargs['tvinfo_lang'] = kwargs.pop('indexerLang', None)
kwargs['subs'] = kwargs.pop('subtitles', None)
self.migrate_redir('edit-show', **kwargs)
def testRename(self, **kwargs):
self.migrate_redir('rename-media', **kwargs)
def migrate_redir(self, new_url, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
self.redirect_args('/home/%s' % new_url, exclude=(None, False), **kwargs)
def setStatus(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.set_show_status(**kwargs)
def set_show_status(self, **kwargs):
# abstract method
pass
def branchCheckout(self, *args):
return self.branch_checkout(*args)
def branch_checkout(self, *args):
# abstract method
pass
def pullRequestCheckout(self, *args):
return self.pull_request_checkout(*args)
def pull_request_checkout(self, *args):
# abstract method
pass
def display_season(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.season_render(**kwargs)
def season_render(self, **kwargs):
# abstract method
pass
def plotDetails(self, show, *args):
return self.plot_details(TVidProdid(show)(), *args)
@staticmethod
def plot_details(*args):
# abstract method
pass
def sceneExceptions(self, show):
return self.scene_exceptions(TVidProdid(show)())
@staticmethod
def scene_exceptions(*args):
# abstract method
pass
def saveMapping(self, show, **kwargs):
kwargs['m_tvid'] = kwargs.pop('mindexer', 0)
kwargs['m_prodid'] = kwargs.pop('mindexerid', 0)
return self.save_mapping(TVidProdid(show)(), **kwargs)
def save_mapping(self, *args, **kwargs):
# abstract method
pass
def forceMapping(self, show, **kwargs):
return self.force_mapping(TVidProdid(show)(), **kwargs)
@staticmethod
def force_mapping(*args, **kwargs):
# abstract method
pass
def deleteShow(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.delete_show(**kwargs)
def delete_show(self, *args, **kwargs):
# abstract method
pass
def refreshShow(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.refresh_show(**kwargs)
def refresh_show(self, *args, **kwargs):
# abstract method
pass
def updateShow(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.update_show(**kwargs)
def update_show(self, *args, **kwargs):
# abstract method
pass
def subtitleShow(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.subtitle_show(**kwargs)
def subtitle_show(self, *args, **kwargs):
# abstract method
pass
def doRename(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.do_rename(**kwargs)
def do_rename(self, *args, **kwargs):
# abstract method
pass
def episode_search(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.search_episode(**kwargs)
def search_episode(self, *args, **kwargs):
# abstract method
pass
def searchEpisodeSubtitles(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.search_episode_subtitles(**kwargs)
def search_episode_subtitles(self, *args, **kwargs):
# abstract method
pass
def setSceneNumbering(self, **kwargs):
return self.set_scene_numbering(
tvid_prodid={kwargs.pop('indexer', ''): kwargs.pop('show', '')},
for_season=kwargs.get('forSeason'), for_episode=kwargs.get('forEpisode'),
scene_season=kwargs.get('sceneSeason'), scene_episode=kwargs.get('sceneEpisode'),
scene_absolute=kwargs.get('sceneAbsolute'))
@staticmethod
def set_scene_numbering(*args, **kwargs):
# abstract method
pass
def update_emby(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.update_mb(**kwargs)
def update_mb(self, *args, **kwargs):
# abstract method
pass
def search_q_progress(self, **kwargs):
kwargs['tvid_prodid'] = TVidProdid(kwargs.pop('show', ''))()
return self.search_q_status(**kwargs)
def search_q_status(self, *args, **kwargs):
# abstract method
pass
""" deprecated from NewHomeAddShows i.e. HomeAddShows --------------------------------------------------------------
"""
def addExistingShows(self, **kwargs):
kwargs['prompt_for_settings'] = kwargs.pop('promptForSettings', None)
self.redirect_args('/add-shows/add-existing-shows', **kwargs)
def addAniDBShow(self, **kwargs):
self.migrate_redir_add_shows('info-anidb', TVINFO_TVDB, **kwargs)
def addIMDbShow(self, **kwargs):
self.migrate_redir_add_shows('info-imdb', TVINFO_IMDB, **kwargs)
def addTraktShow(self, **kwargs):
self.migrate_redir_add_shows('info-trakt', TVINFO_TVDB, **kwargs)
def migrate_redir_add_shows(self, new_url, tvinfo, **kwargs):
prodid = kwargs.pop('indexer_id', None)
if prodid:
kwargs['ids'] = prodid
if TVINFO_TVDB == tvinfo and prodid:
kwargs['ids'] = TVidProdid({tvinfo: prodid})()
kwargs['show_name'] = kwargs.pop('showName', None)
self.redirect_args('/add-shows/%s' % new_url, **kwargs)
def getIndexerLanguages(self):
return self.get_infosrc_languages()
@staticmethod
def get_infosrc_languages():
# abstract method
pass
def searchIndexersForShowName(self, *args, **kwargs):
return self.search_tvinfo_for_showname(*args, **kwargs)
def search_tvinfo_for_showname(self, *args, **kwargs):
# abstract method
pass
def massAddTable(self, **kwargs):
return self.mass_add_table(
root_dir=kwargs.pop('rootDir', None), **kwargs)
def mass_add_table(self, *args, **kwargs):
# abstract method
pass
def addNewShow(self, **kwargs):
return self.add_new_show(
provided_tvid=kwargs.pop('providedIndexer', None),
which_series=kwargs.pop('whichSeries', None),
tvinfo_lang=kwargs.pop('indexerLang', 'en'),
root_dir=kwargs.pop('rootDir', None),
default_status=kwargs.pop('defaultStatus', None),
any_qualities=kwargs.pop('anyQualities', None),
best_qualities=kwargs.pop('bestQualities', None),
subs=kwargs.pop('subtitles', None),
full_show_path=kwargs.pop('fullShowPath', None),
skip_show=kwargs.pop('skipShow', None),
**kwargs)
def add_new_show(self, *args, **kwargs):
# abstract method
pass
""" deprecated from ConfigGeneral ----------------------------------------------------------------------------------
"""
def generateKey(self):
return self.generate_key()
@staticmethod
def generate_key():
# abstract method
pass
def saveRootDirs(self, **kwargs):
return self.save_root_dirs(root_dir_string=kwargs.get('rootDirString'))
@staticmethod
def save_root_dirs(**kwargs):
# abstract method
pass
def saveResultPrefs(self, **kwargs):
return self.save_result_prefs(**kwargs)
@staticmethod
def save_result_prefs(**kwargs):
# abstract method
pass
def saveAddShowDefaults(self, *args, **kwargs):
return self.save_add_show_defaults(*args, **kwargs)
@staticmethod
def save_add_show_defaults(*args, **kwargs):
# abstract method
pass
def saveGeneral(self, **kwargs):
return self.save_general(**kwargs)
def save_general(self, **kwargs):
# abstract method
pass
""" deprecated from ConfigSearch -----------------------------------------------------------------------------------
"""
def saveSearch(self, **kwargs):
return self.save_search(**kwargs)
def save_search(self, **kwargs):
# abstract method
pass
""" deprecated from ConfigProviders --------------------------------------------------------------------------------
"""
def canAddNewznabProvider(self, *args):
return self.can_add_newznab_provider(*args)
@staticmethod
def can_add_newznab_provider(*args):
# abstract method
pass
def getNewznabCategories(self, *args):
return self.get_newznab_categories(*args)
@staticmethod
def get_newznab_categories(*args):
# abstract method
pass
def canAddTorrentRssProvider(self, *args):
return self.can_add_torrent_rss_provider(*args)
@staticmethod
def can_add_torrent_rss_provider(*args):
# abstract method
pass
def checkProvidersPing(self):
return self.check_providers_ping()
@staticmethod
def check_providers_ping():
# abstract method
pass
def saveProviders(self, *args, **kwargs):
return self.save_providers(*args, **kwargs)
def save_providers(self, *args, **kwargs):
# abstract method
pass
""" deprecated from ConfigPostProcessing ---------------------------------------------------------------------------
"""
def savePostProcessing(self, **kwargs):
return self.save_post_processing(**kwargs)
def save_post_processing(self, **kwargs):
# abstract method
pass
def testNaming(self, *args, **kwargs):
return self.test_naming(*args, **kwargs)
@staticmethod
def test_naming(*args, **kwargs):
# abstract method
pass
def isNamingValid(self, *args, **kwargs):
return self.is_naming_valid(*args, **kwargs)
@staticmethod
def is_naming_valid(*args, **kwargs):
# abstract method
pass
def isRarSupported(self):
return self.is_rar_supported()
@staticmethod
def is_rar_supported():
# abstract method
pass
""" deprecated from ConfigSubtitles --------------------------------------------------------------------------------
"""
def saveSubtitles(self, **kwargs):
return self.save_subtitles(**kwargs)
def save_subtitles(self, **kwargs):
# abstract method
pass
""" deprecated from ConfigAnime ------------------------------------------------------------------------------------
"""
def saveAnime(self, **kwargs):
return self.save_anime(**kwargs)
def save_anime(self, **kwargs):
# abstract method
pass
""" deprecated from Manage -----------------------------------------------------------------------------------------
"""
def episode_statuses(self, **kwargs):
self.redirect_args('/manage/episode-overview', **kwargs)
def subtitleMissed(self, **kwargs):
kwargs['which_subs'] = kwargs.pop('whichSubs', None)
self.redirect_args('/manage/subtitle_missed', **kwargs)
def show_episode_statuses(self, **kwargs):
return self.get_status_episodes(TVidProdid(kwargs.get('indexer_id'))(), kwargs.get('which_status'))
@staticmethod
def get_status_episodes(*args):
# abstract method
pass
def showSubtitleMissed(self, **kwargs):
return self.show_subtitle_missed(TVidProdid(kwargs.get('indexer_id'))(), kwargs.get('whichSubs'))
@staticmethod
def show_subtitle_missed(*args):
# abstract method
pass
def downloadSubtitleMissed(self, **kwargs):
return self.download_subtitle_missed(**kwargs)
def download_subtitle_missed(self, **kwargs):
# abstract method
pass
def backlogShow(self, **kwargs):
return self.backlog_show(TVidProdid(kwargs.get('indexer_id'))())
def backlog_show(self, *args):
# abstract method
pass
def backlogOverview(self):
self.redirect('/manage/backlog_overview', permanent=True)
def massEdit(self, **kwargs):
return self.mass_edit(to_edit=kwargs.get('toEdit'))
def mass_edit(self, **kwargs):
# abstract method
pass
def massEditSubmit(self, **kwargs):
kwargs['to_edit'] = kwargs.pop('toEdit', None)
kwargs['subs'] = kwargs.pop('subtitles', None)
kwargs['any_qualities'] = kwargs.pop('anyQualities', None)
kwargs['best_qualities'] = kwargs.pop('bestQualities', None)
return self.mass_edit_submit(**kwargs)
def mass_edit_submit(self, **kwargs):
# abstract method
pass
def bulkChange(self, **kwargs):
return self.bulk_change(
to_update=kwargs.get('toUpdate'), to_refresh=kwargs.get('toRefresh'),
to_rename=kwargs.get('toRename'), to_delete=kwargs.get('toDelete'), to_remove=kwargs.get('toRemove'),
to_metadata=kwargs.get('toMetadata'), to_subtitle=kwargs.get('toSubtitle'))
def bulk_change(self, **kwargs):
# abstract method
pass
def failedDownloads(self, **kwargs):
kwargs['to_remove'] = kwargs.pop('toRemove', None)
return self.failed_downloads(**kwargs)
def failed_downloads(self, **kwargs):
# abstract method
pass
""" deprecated from ManageSearches ---------------------------------------------------------------------------------
"""
def retryProvider(self, **kwargs):
return self.retry_provider(**kwargs)
@staticmethod
def retry_provider(**kwargs):
# abstract method
pass
def forceVersionCheck(self):
return self.check_update()
def check_update(self):
# abstract method
pass
def forceBacklog(self):
return self.force_backlog()
def force_backlog(self):
# abstract method
pass
def forceSearch(self):
return self.force_search()
def force_search(self):
# abstract method
pass
def forceFindPropers(self):
return self.force_find_propers()
def force_find_propers(self):
# abstract method
pass
def pauseBacklog(self, **kwargs):
return self.pause_backlog(**kwargs)
def pause_backlog(self, **kwargs):
# abstract method
pass
""" deprecated from ShowProcesses ----------------------------------------------------------------------------------
"""
def forceShowUpdate(self):
return self.force_show_update()
def force_show_update(self):
# abstract method
pass
""" deprecated from History ----------------------------------------------------------------------------------------
"""
def clearHistory(self):
return self.clear_history()
def clear_history(self):
# abstract method
pass
def trimHistory(self):
return self.trim_history()
def trim_history(self):
# abstract method
pass
""" deprecated from ErrorLogs --------------------------------------------------------------------------------------
"""
def clearerrors(self):
self.redirect('/errors/clear-log')
def viewlog(self, **kwargs):
self.redirect_args('/events/view-log/', **kwargs)
def downloadlog(self):
return self.download_log()
def download_log(self):
# abstract method
pass
""" ------------------------------------------------------------------------------------------------------------ """
""" ------------------------------------------------------------------------------------------------------------ """
""" end of base deprecated function stubs """
""" ------------------------------------------------------------------------------------------------------------ """
""" ------------------------------------------------------------------------------------------------------------ """
class LegacyRouteHandler(RequestHandler):
def data_received(self, *args):
pass
def __init__(self, *arg, **kwargs):
super(LegacyRouteHandler, self).__init__(*arg, **kwargs)
self.lock = threading.Lock()
def set_default_headers(self):
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
if sickgear.SEND_SECURITY_HEADERS:
self.set_header('X-Frame-Options', 'SAMEORIGIN')
# noinspection PyUnusedLocal
@gen.coroutine
def get(self, *args, **kwargs):
getattr(self, 'index')()
def redirect(self, url, permanent=False, status=None):
if not url.startswith(sickgear.WEB_ROOT):
url = sickgear.WEB_ROOT + url
super(LegacyRouteHandler, self).redirect(url, permanent, status)
class LegacyManageManageSearches(LegacyRouteHandler):
""" deprecated from ManageSearches ---------------------------------------------------------------------------------
"""
def index(self):
self.redirect('/manage/search-tasks/', permanent=True)
class LegacyManageShowProcesses(LegacyRouteHandler):
""" deprecated from ManageShowProcesses ----------------------------------------------------------------------------
"""
def index(self):
self.redirect('/manage/show-tasks/', permanent=True)
class LegacyConfigPostProcessing(LegacyRouteHandler):
""" deprecated from ConfigPostProcessing ---------------------------------------------------------------------------
"""
def index(self):
self.redirect('/config/media-process/', permanent=True)
class LegacyHomeAddShows(LegacyRouteHandler):
""" deprecated from NewHomeAddShows i.e. HomeAddShows --------------------------------------------------------------
"""
def index(self):
self.redirect('/add-shows/', permanent=True)
class LegacyErrorLogs(LegacyRouteHandler):
""" deprecated from ErrorLogs --------------------------------------------------------------------------------------
"""
def index(self):
self.redirect('/events/', permanent=True)

View file

@ -46,7 +46,7 @@ def get_win_drives():
def folders_at_path(path, include_parent=False, include_files=False):
""" Returns a list of dictionaries with the folders contained at the given path
Give the empty string as the path to list the contents of the root path
under Unix this means "/", on Windows this will be a list of drive letters)
under Unix this means "/", (on Windows this will be a list of drive letters)
"""
# walk up the tree until we find a valid path

View file

@ -25,7 +25,7 @@ import sickgear
from ._legacy_classes import LegacySearchResult, LegacyProper
from .common import Quality
from six import integer_types, iteritems, PY2, string_types
from six import integer_types, iteritems, string_types
# noinspection PyUnreachableCode
if False:
@ -155,7 +155,7 @@ class SearchResult(LegacySearchResult):
class NZBSearchResult(SearchResult):
"""
Regular NZB result with an URL to the NZB
Regular NZB result with a URL to the NZB
"""
resultType = 'nzb'
@ -169,7 +169,7 @@ class NZBDataSearchResult(SearchResult):
class TorrentSearchResult(SearchResult):
"""
Torrent result with an URL to the torrent
Torrent result with a URL to the torrent
"""
resultType = 'torrent'
@ -359,41 +359,11 @@ class OrderedDefaultdict(OrderedDict):
args = (self.default_factory,) if self.default_factory else ()
return self.__class__, args, None, None, iteritems(self)
if PY2:
# backport from python 3
def move_to_end(self, key, last=True):
"""Move an existing element to the end (or beginning if last==False).
def first_key(self):
return next(iter(self))
Raises KeyError if the element does not exist.
When last=True, acts like a fast version of self[key]=self.pop(key).
"""
link_prev, link_next, key = link = getattr(self, '_OrderedDict__map')[key]
link_prev[1] = link_next
link_next[0] = link_prev
root = getattr(self, '_OrderedDict__root')
if last:
last = root[0]
link[0] = last
link[1] = root
last[1] = root[0] = link
else:
first = root[1]
link[0] = root
link[1] = first
root[1] = first[0] = link
def first_key(self):
return getattr(self, '_OrderedDict__root')[1][2]
def last_key(self):
return getattr(self, '_OrderedDict__root')[0][2]
else:
def first_key(self):
return next(iter(self))
def last_key(self):
return next(reversed(self))
def last_key(self):
return next(reversed(self))
class ImageUrlList(list):
@ -455,61 +425,14 @@ class EnvVar(object):
pass
def __getitem__(self, key):
return os.environ(key)
return os.environ[key]
@staticmethod
def get(key, default=None):
return os.environ.get(key, default)
if not PY2:
sickgear.ENV = EnvVar()
elif 'nt' == os.name:
from ctypes import windll, create_unicode_buffer
# noinspection PyCompatibility
class WinEnvVar(EnvVar):
@staticmethod
def get_environment_variable(name):
# noinspection PyUnresolvedReferences
name = unicode(name) # ensures string argument is unicode
n = windll.kernel32.GetEnvironmentVariableW(name, None, 0)
env_value = None
if n:
buf = create_unicode_buffer(u'\0' * n)
windll.kernel32.GetEnvironmentVariableW(name, buf, n)
env_value = buf.value
return env_value
def __getitem__(self, key):
return self.get_environment_variable(key)
def get(self, key, default=None):
r = self.get_environment_variable(key)
return r if None is not r else default
sickgear.ENV = WinEnvVar()
else:
# noinspection PyCompatibility
class LinuxEnvVar(EnvVar):
# noinspection PyMissingConstructor
def __init__(self, environ):
self.environ = environ
def __getitem__(self, key):
v = self.environ.get(key)
try:
return v if not isinstance(v, str) else v.decode(sickgear.SYS_ENCODING)
except (UnicodeDecodeError, UnicodeEncodeError):
return v
def get(self, key, default=None):
v = self[key]
return v if None is not v else default
sickgear.ENV = LinuxEnvVar(os.environ)
sickgear.ENV = EnvVar()
# backport from python 3
@ -533,7 +456,7 @@ class SimpleNamespace(object):
# list that supports weak reference
class weakList(list):
class WeakList(list):
__slots__ = ('__weakref__',)

View file

@ -26,7 +26,7 @@ from .. import logger
from ..sgdatetime import timestamp_near
import sickgear
from _23 import filter_iter, filter_list, map_list, unquote_plus
from _23 import unquote_plus
from six import string_types
# noinspection PyUnreachableCode
@ -96,21 +96,21 @@ class DownloadStationAPI(GenericClient):
id=t['id'], title=t['title'], total_size=t.get('size') or 0,
added_ts=d.get('create_time'), last_completed_ts=d.get('completed_time'),
last_started_ts=d.get('started_time'), seed_elapsed_secs=d.get('seedelapsed'),
wanted_size=sum(map_list(lambda tf: wanted(tf) and tf.get('size') or 0, f)) or None,
wanted_down=sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, f)) or None,
wanted_size=sum(list(map(lambda tf: wanted(tf) and tf.get('size') or 0, f))) or None,
wanted_down=sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, f))) or None,
tally_down=downloaded(tx),
tally_up=tx.get('size_uploaded'),
state='done' if re.search('finish', t['status']) else ('seed', 'down')[any(filter_list(
lambda tf: wanted(tf) and (downloaded(tf, -1) < tf.get('size', 0)), f))]
state='done' if re.search('finish', t['status']) else ('seed', 'down')[any(list(filter(
lambda tf: wanted(tf) and (downloaded(tf, -1) < tf.get('size', 0)), f)))]
))
# only available during "download" and "seeding"
file_list = (lambda t: t.get('additional', {}).get('file', {}))
valid_stat = (lambda ti: not ti.get('error') and isinstance(ti.get('status'), string_types)
and sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti))))
result = map_list(lambda t: base_state(
and sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti)))))
result = list(map(lambda t: base_state(
t, t.get('additional', {}).get('detail', {}), t.get('additional', {}).get('transfer', {}), file_list(t)),
filter_list(lambda t: t['status'] in ('downloading', 'seeding', 'finished') and valid_stat(t),
tasks))
list(filter(lambda t: t['status'] in ('downloading', 'seeding', 'finished') and valid_stat(t),
tasks))))
return result
@ -133,13 +133,13 @@ class DownloadStationAPI(GenericClient):
t_params=dict(additional='detail,file,transfer'))['data']['tasks']
else:
# noinspection PyUnresolvedReferences
tasks = (filter_list(lambda d: d.get('id') == rid, self._testdata), self._testdata)[not rid]
tasks = (list(filter(lambda d: d.get('id') == rid, self._testdata)), self._testdata)[not rid]
result += tasks and (isinstance(tasks, list) and tasks or (isinstance(tasks, dict) and [tasks])) \
or ([], [{'error': True, 'id': rid}])[err]
except (BaseException, Exception):
if getinfo:
result += [dict(error=True, id=rid)]
for t in filter_iter(lambda d: isinstance(d.get('title'), string_types) and d.get('title'), result):
for t in filter(lambda d: isinstance(d.get('title'), string_types) and d.get('title'), result):
t['title'] = unquote_plus(t.get('title'))
return result
@ -164,8 +164,8 @@ class DownloadStationAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Pause item(s)
:param ids: Id(s) to pause
:return: True/Falsy if success/failure else Id(s) that failed to be paused
:param ids: ID(s) to pause
:return: True/Falsy if success/failure else ID(s) that failed to be paused
"""
return self._action(
'pause', ids,
@ -177,8 +177,8 @@ class DownloadStationAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Resume task(s) in client
:param ids: Id(s) to act on
:return: True if success, Id(s) that could not be resumed, else Falsy if failure
:param ids: ID(s) to act on
:return: True if success, ID(s) that could not be resumed, else Falsy if failure
"""
return self._perform_task(
'resume', ids,
@ -190,8 +190,8 @@ class DownloadStationAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Delete task(s) from client
:param ids: Id(s) to act on
:return: True if success, Id(s) that could not be deleted, else Falsy if failure
:param ids: ID(s) to act on
:return: True if success, ID(s) that could not be deleted, else Falsy if failure
"""
return self._perform_task(
'delete', ids,
@ -205,13 +205,13 @@ class DownloadStationAPI(GenericClient):
"""
Set up and send a method to client
:param method: Either `resume` or `delete`
:param ids: Id(s) to perform method on
:param ids: ID(s) to perform method on
:param filter_func: Call back function to filter tasks as failed or erroneous
:param pause_first: True if task should be paused prior to invoking method
:return: True if success, Id(s) that could not be acted upon, else Falsy if failure
:return: True if success, ID(s) that could not be acted upon, else Falsy if failure
"""
if isinstance(ids, (string_types, list)):
rids = ids if isinstance(ids, list) else map_list(lambda x: x.strip(), ids.split(','))
rids = ids if isinstance(ids, list) else list(map(lambda x: x.strip(), ids.split(',')))
result = pause_first and self._pause_torrent(rids) # get items not paused
result = (isinstance(result, list) and result or [])
@ -225,7 +225,7 @@ class DownloadStationAPI(GenericClient):
if isinstance(ids, (string_types, list)):
item = dict(fail=[], ignore=[])
for task in filter_iter(filter_func, self._tinf(ids, err=True)):
for task in filter(filter_func, self._tinf(ids, err=True)):
item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('id')]
# retry items not acted on
@ -237,7 +237,7 @@ class DownloadStationAPI(GenericClient):
logger.log('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i), logger.DEBUG)
time.sleep(i)
item['fail'] = []
for task in filter_iter(filter_func, self._tinf(retry_ids, err=True)):
for task in filter(filter_func, self._tinf(retry_ids, err=True)):
item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('id')]
if not item['fail']:
@ -256,7 +256,7 @@ class DownloadStationAPI(GenericClient):
"""
Add magnet to client (overridden class function)
:param search_result: A populated search result object
:return: Id of task in client, True if added but no ID, else Falsy if nothing added
:return: ID of task in client, True if added but no ID, else Falsy if nothing added
"""
if 3 <= self._task_version:
return self._add_torrent(uri={'uri': search_result.url})
@ -269,7 +269,7 @@ class DownloadStationAPI(GenericClient):
"""
Add file to client (overridden class function)
:param search_result: A populated search result object
:return: Id of task in client, True if added but no ID, else Falsy if nothing added
:return: ID of task in client, True if added but no ID, else Falsy if nothing added
"""
return self._add_torrent(
files={'file': ('%s.torrent' % re.sub(r'(\.torrent)+$', '', search_result.name), search_result.content)})
@ -280,7 +280,7 @@ class DownloadStationAPI(GenericClient):
Create client task
:param uri: URI param for client API
:param files: file param for client API
:return: Id of task in client, True if created but no id found, else Falsy if nothing created
:return: ID of task in client, True if created but no id found, else Falsy if nothing created
"""
if self._testmode:
# noinspection PyUnresolvedReferences
@ -303,7 +303,7 @@ class DownloadStationAPI(GenericClient):
# noinspection PyUnresolvedReferences
if response and response.get('success'):
for s in (1, 3, 5, 10, 15, 30, 60):
tasks = filter_list(lambda t: task_stamp <= t['additional']['detail']['create_time'], self._tinf())
tasks = list(filter(lambda t: task_stamp <= t['additional']['detail']['create_time'], self._tinf()))
try:
return str(self._client_has(tasks, uri, files)[0].get('id'))
except IndexError:
@ -324,8 +324,8 @@ class DownloadStationAPI(GenericClient):
if uri or files:
u = isinstance(uri, dict) and (uri.get('uri', '') or '').lower() or None
f = isinstance(files, dict) and (files.get('file', [''])[0]).lower() or None
result = filter_list(lambda t: u and t['additional']['detail']['uri'].lower() == u
or f and t['additional']['detail']['uri'].lower() in f, tasks)
result = list(filter(lambda t: u and t['additional']['detail']['uri'].lower() == u
or f and t['additional']['detail']['uri'].lower() in f, tasks))
return result
def _client_request(self, method, t_id=None, t_params=None, files=None):
@ -360,7 +360,7 @@ class DownloadStationAPI(GenericClient):
return self._error_task(response)
if None is not t_id and None is t_params and 'create' != method:
return filter_list(lambda r: r.get('error'), response.get('data', {})) or True
return list(filter(lambda r: r.get('error'), response.get('data', {}))) or True
return response

View file

@ -129,7 +129,7 @@ class GenericClient(object):
def _add_torrent_file(self, result):
"""
This should be overridden to return the True/False from the client
when a torrent is added via result.content (only .torrent file)
when a torrent is added via `result.content` (only .torrent file)
"""
return False
@ -179,9 +179,9 @@ class GenericClient(object):
"""
This should be overridden to resume task(s) in client
:param ids: Id(s) to act on
:param ids: ID(s) to act on
:type ids: list or string
:return: True if success, Id(s) that could not be resumed, else Falsy if failure
:return: True if success, ID(s) that could not be resumed, else Falsy if failure
:rtype: bool or list
"""
return False
@ -189,9 +189,9 @@ class GenericClient(object):
def _delete_torrent(self, ids):
"""
This should be overridden to delete task(s) from client
:param ids: Id(s) to act on
:param ids: ID(s) to act on
:type ids: list or string
:return: True if success, Id(s) that could not be deleted, else Falsy if failure
:return: True if success, ID(s) that could not be deleted, else Falsy if failure
:rtype: bool or list
"""
return False
@ -200,7 +200,7 @@ class GenericClient(object):
def _get_torrent_hash(result):
if result.url.startswith('magnet'):
result.hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0]
result.hash = re.findall(r'urn:btih:(\w{32,40})', result.url)[0]
if 32 == len(result.hash):
result.hash = make_btih(result.hash).lower()
else:

View file

@ -26,7 +26,7 @@ import sickgear
from requests.exceptions import HTTPError
from _23 import filter_iter, filter_list, map_list, unquote_plus
from _23 import unquote_plus
from six import string_types
# noinspection PyUnreachableCode
@ -58,9 +58,9 @@ class QbittorrentAPI(GenericClient):
id=t['hash'], title=t['name'], total_size=gp.get('total_size') or 0,
added_ts=gp.get('addition_date'), last_completed_ts=gp.get('completion_date'),
last_started_ts=None, seed_elapsed_secs=gp.get('seeding_time'),
wanted_size=sum(map_list(lambda tf: wanted(tf) and tf.get('size') or 0, f)) or None,
wanted_down=sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, f)) or None,
tally_down=sum(map_list(lambda tf: downloaded(tf) or 0, f)) or None,
wanted_size=sum(list(map(lambda tf: wanted(tf) and tf.get('size') or 0, f))) or None,
wanted_down=sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, f))) or None,
tally_down=sum(list(map(lambda tf: downloaded(tf) or 0, f))) or None,
tally_up=gp.get('total_uploaded'),
state='done' if 'pausedUP' == t.get('state') else ('down', 'seed')['up' in t.get('state').lower()]
))
@ -68,10 +68,10 @@ class QbittorrentAPI(GenericClient):
('torrents/files', 'query/propertiesFiles/%s' % ti['hash'])[not self.api_ns],
params=({'hash': ti['hash']}, {})[not self.api_ns], json=True) or {})
valid_stat = (lambda ti: not self._ignore_state(ti)
and sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti))))
result = map_list(lambda t: base_state(t, self._tinf(t['hash'])[0], file_list(t)),
filter_list(lambda t: re.search('(?i)queue|stall|(up|down)load|pausedUP', t['state']) and
valid_stat(t), self._tinf(ids, False)))
and sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti)))))
result = list(map(lambda t: base_state(t, self._tinf(t['hash'])[0], file_list(t)),
list(filter(lambda t: re.search('(?i)queue|stall|(up|down)load|pausedUP', t['state']) and
valid_stat(t), self._tinf(ids, False)))))
return result
@ -109,8 +109,7 @@ class QbittorrentAPI(GenericClient):
except (BaseException, Exception):
if getinfo:
result += [dict(error=True, id=rid)]
for t in filter_iter(lambda d: isinstance(d.get('name'), string_types) and d.get('name'),
(result, [])[getinfo]):
for t in filter(lambda d: isinstance(d.get('name'), string_types) and d.get('name'), (result, [])[getinfo]):
t['name'] = unquote_plus(t.get('name'))
return result
@ -148,7 +147,7 @@ class QbittorrentAPI(GenericClient):
"""
Set maximal priority in queue to torrent task
:param ids: ID(s) to promote
:return: True/Falsy if success/failure else Id(s) that failed to be changed
:return: True/Falsy if success/failure else ID(s) that failed to be changed
"""
def _maxpri_filter(t):
mark_fail = True
@ -180,7 +179,7 @@ class QbittorrentAPI(GenericClient):
"""
Set label/category to torrent task
:param ids: ID(s) to change
:return: True/Falsy if success/failure else Id(s) that failed to be changed
:return: True/Falsy if success/failure else ID(s) that failed to be changed
"""
def _label_filter(t):
mark_fail = True
@ -206,8 +205,8 @@ class QbittorrentAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Pause item(s)
:param ids: Id(s) to pause
:return: True/Falsy if success/failure else Id(s) that failed to be paused
:param ids: ID(s) to pause
:return: True/Falsy if success/failure else ID(s) that failed to be paused
"""
def _pause_filter(t):
mark_fail = True
@ -253,8 +252,8 @@ class QbittorrentAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Resume task(s) in client
:param ids: Id(s) to act on
:return: True if success, Id(s) that could not be resumed, else Falsy if failure
:param ids: ID(s) to act on
:return: True if success, ID(s) that could not be resumed, else Falsy if failure
"""
return self._perform_task(
'resume', ids,
@ -268,8 +267,8 @@ class QbittorrentAPI(GenericClient):
# type: (Union[AnyStr, list]) -> Union[bool, list]
"""
Delete task(s) from client
:param ids: Id(s) to act on
:return: True if success, Id(s) that could not be deleted, else Falsy if failure
:param ids: ID(s) to act on
:return: True if success, ID(s) that could not be deleted, else Falsy if failure
"""
return self._perform_task(
'delete', ids,
@ -284,13 +283,13 @@ class QbittorrentAPI(GenericClient):
"""
Set up and send a method to client
:param method: Either `resume` or `delete`
:param ids: Id(s) to perform method on
:param ids: ID(s) to perform method on
:param filter_func: Call back function passed to _action that will filter tasks as failed or erroneous
:param pause_first: True if task should be paused prior to invoking method
:return: True if success, Id(s) that could not be acted upon, else Falsy if failure
:return: True if success, ID(s) that could not be acted upon, else Falsy if failure
"""
if isinstance(ids, (string_types, list)):
rids = ids if isinstance(ids, list) else map_list(lambda x: x.strip(), ids.split(','))
rids = ids if isinstance(ids, list) else list(map(lambda x: x.strip(), ids.split(',')))
result = pause_first and self._pause_torrent(rids) # get items not paused
result = (isinstance(result, list) and result or [])
@ -304,7 +303,7 @@ class QbittorrentAPI(GenericClient):
if isinstance(ids, (string_types, list)):
item = dict(fail=[], ignore=[])
for task in filter_iter(filter_func, self._tinf(ids, use_props=False, err=True)):
for task in filter(filter_func, self._tinf(ids, use_props=False, err=True)):
item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('hash')]
# retry items that are not acted on
@ -316,7 +315,7 @@ class QbittorrentAPI(GenericClient):
logger.log('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i), logger.DEBUG)
time.sleep(i)
item['fail'] = []
for task in filter_iter(filter_func, self._tinf(retry_ids, use_props=False, err=True)):
for task in filter(filter_func, self._tinf(retry_ids, use_props=False, err=True)):
item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('hash')]
if not item['fail']:
@ -378,7 +377,7 @@ class QbittorrentAPI(GenericClient):
if True is response:
for s in (1, 3, 5, 10, 15, 30, 60):
if filter_list(lambda t: task_stamp <= t['addition_date'], self._tinf(data.hash)):
if list(filter(lambda t: task_stamp <= t['addition_date'], self._tinf(data.hash))):
return data.hash
time.sleep(s)
return True
@ -396,7 +395,7 @@ class QbittorrentAPI(GenericClient):
"""
Send a request to client
:param cmd: Api task to invoke
:param kwargs: keyword arguments to pass thru to helpers getURL function
:param kwargs: keyword arguments to pass through to helpers getURL function
:return: JSON decoded response dict, True if success and no response body, Text error or None if failure,
"""
authless = bool(re.search('(?i)login|version', cmd))

View file

@ -90,7 +90,7 @@ class RtorrentAPI(GenericClient):
# try:
# if ratio > 0:
#
# # Explicitly set all group options to ensure it is setup correctly
# # Explicitly set all group options to ensure it is set up correctly
# group.set_upload('1M')
# group.set_min(ratio)
# group.set_max(ratio)

View file

@ -84,7 +84,7 @@ class TransmissionAPI(GenericClient):
def _add_torrent(self, t_object):
# populate blankable and download_dir
# populate blanked and download_dir
if not self._get_auth():
logger.log('%s: Authentication failed' % self.name, logger.ERROR)
return False

View file

@ -24,17 +24,17 @@ from _23 import urlencode
from six import iteritems
class uTorrentAPI(GenericClient):
class UtorrentAPI(GenericClient):
def __init__(self, host=None, username=None, password=None):
super(uTorrentAPI, self).__init__('uTorrent', host, username, password)
super(UtorrentAPI, self).__init__('uTorrent', host, username, password)
self.url = self.host + 'gui/'
def _request(self, method='get', params=None, files=None, **kwargs):
params = {} if None is params else params
return super(uTorrentAPI, self)._request(
return super(UtorrentAPI, self)._request(
method=method,
params='token={0:s}&{1:s}'.format(self.auth, '&'.join(
['%s' % urlencode(dict([[key, str(value)]]))
@ -128,4 +128,4 @@ class uTorrentAPI(GenericClient):
return self._request(params=params)
api = uTorrentAPI()
api = UtorrentAPI()

View file

@ -25,7 +25,6 @@ import uuid
import sickgear
from _23 import map_list
from six import integer_types, iterkeys, string_types
# noinspection PyUnresolvedReferences
@ -180,7 +179,7 @@ class Quality(object):
return Quality.qualityStrings[quality].replace('SD DVD', 'SD DVD/BR/BD')
@staticmethod
def _getStatusStrings(status):
def _get_status_strings(status):
"""
:param status: status
@ -188,14 +187,14 @@ class Quality(object):
:return:
:rtype: AnyStr
"""
toReturn = {}
to_return = {}
for _x in Quality.qualityStrings:
toReturn[Quality.compositeStatus(status, _x)] = '%s (%s)' % (
to_return[Quality.composite_status(status, _x)] = '%s (%s)' % (
Quality.statusPrefixes[status], Quality.qualityStrings[_x])
return toReturn
return to_return
@staticmethod
def combineQualities(any_qualities, best_qualities):
def combine_qualities(any_qualities, best_qualities):
# type: (List[int], List[int]) -> int
"""
@ -211,7 +210,7 @@ class Quality(object):
return any_quality | (best_quality << 16)
@staticmethod
def splitQuality(quality):
def split_quality(quality):
# type: (int) -> Tuple[List[int], List[int]]
"""
@ -228,10 +227,10 @@ class Quality(object):
return sorted(any_qualities), sorted(best_qualities)
@staticmethod
def nameQuality(name, anime=False):
def name_quality(name, anime=False):
"""
Return The quality from an episode File renamed by SickGear
If no quality is achieved it will try sceneQuality regex
If no quality is achieved it will try scene_quality regex
:param name: name
:type name: AnyStr
:param anime: is anmie
@ -248,7 +247,7 @@ class Quality(object):
continue
if Quality.NONE == _x: # Last chance
return Quality.sceneQuality(name, anime)
return Quality.scene_quality(name, anime)
regex = r'\W' + Quality.qualityStrings[_x].replace(' ', r'\W') + r'\W'
regex_match = re.search(regex, name, re.I)
@ -256,7 +255,7 @@ class Quality(object):
return _x
@staticmethod
def sceneQuality(name, anime=False):
def scene_quality(name, anime=False):
"""
Return The quality from the scene episode File
:param name: name
@ -347,7 +346,7 @@ class Quality(object):
return Quality.UNKNOWN
@staticmethod
def fileQuality(filename):
def file_quality(filename):
"""
:param filename: filename
@ -406,7 +405,7 @@ class Quality(object):
return Quality.UNKNOWN
@staticmethod
def assumeQuality(name):
def assume_quality(name):
"""
:param name: name
@ -421,7 +420,7 @@ class Quality(object):
return Quality.UNKNOWN
@staticmethod
def compositeStatus(status, quality):
def composite_status(status, quality):
"""
:param status: status
@ -434,7 +433,7 @@ class Quality(object):
return status + 100 * quality
@staticmethod
def qualityDownloaded(status):
def quality_downloaded(status):
# type: (int) -> int
"""
@ -446,7 +445,7 @@ class Quality(object):
return (status - DOWNLOADED) // 100
@staticmethod
def splitCompositeStatus(status):
def split_composite_status(status):
# type: (int) -> Tuple[int, int]
"""Returns a tuple containing (status, quality)
:param status: status
@ -461,7 +460,7 @@ class Quality(object):
return status, Quality.NONE
@staticmethod
def statusFromName(name, assume=True, anime=False):
def status_from_name(name, assume=True, anime=False):
"""
:param name: name
@ -473,13 +472,13 @@ class Quality(object):
:return:
:rtype: int or long
"""
quality = Quality.nameQuality(name, anime)
quality = Quality.name_quality(name, anime)
if assume and Quality.UNKNOWN == quality:
quality = Quality.assumeQuality(name)
return Quality.compositeStatus(DOWNLOADED, quality)
quality = Quality.assume_quality(name)
return Quality.composite_status(DOWNLOADED, quality)
@staticmethod
def statusFromNameOrFile(file_path, assume=True, anime=False):
def status_from_name_or_file(file_path, assume=True, anime=False):
"""
:param file_path: file path
@ -491,12 +490,12 @@ class Quality(object):
:return:
:rtype: int or long
"""
quality = Quality.nameQuality(file_path, anime)
quality = Quality.name_quality(file_path, anime)
if Quality.UNKNOWN == quality:
quality = Quality.fileQuality(file_path)
quality = Quality.file_quality(file_path)
if assume and Quality.UNKNOWN == quality:
quality = Quality.assumeQuality(file_path)
return Quality.compositeStatus(DOWNLOADED, quality)
quality = Quality.assume_quality(file_path)
return Quality.composite_status(DOWNLOADED, quality)
SNATCHED = None
SNATCHED_PROPER = None
@ -516,7 +515,7 @@ class WantedQualities(dict):
super(WantedQualities, self).__init__(**kwargs)
def _generate_wantedlist(self, qualities):
initial_qualities, upgrade_qualities = Quality.splitQuality(qualities)
initial_qualities, upgrade_qualities = Quality.split_quality(qualities)
max_initial_quality = max(initial_qualities or [Quality.NONE])
min_upgrade_quality = min(upgrade_qualities or [1 << 16])
self[qualities] = {0: {self.bothlists: False, self.wantedlist: initial_qualities, self.upgradelist: False}}
@ -563,23 +562,23 @@ for (attr_name, qual_val) in [
('SNATCHED', SNATCHED), ('SNATCHED_PROPER', SNATCHED_PROPER), ('SNATCHED_BEST', SNATCHED_BEST),
('DOWNLOADED', DOWNLOADED), ('ARCHIVED', ARCHIVED), ('FAILED', FAILED),
]:
setattr(Quality, attr_name, map_list(lambda qk: Quality.compositeStatus(qual_val, qk),
iterkeys(Quality.qualityStrings)))
setattr(Quality, attr_name, list(map(lambda qk: Quality.composite_status(qual_val, qk),
iterkeys(Quality.qualityStrings))))
Quality.SNATCHED_ANY = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST
SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], [])
HD = Quality.combineQualities(
SD = Quality.combine_qualities([Quality.SDTV, Quality.SDDVD], [])
HD = Quality.combine_qualities(
[Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY],
[]) # HD720p + HD1080p
HD720p = Quality.combineQualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], [])
HD1080p = Quality.combineQualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], [])
UHD2160p = Quality.combineQualities([Quality.UHD4KWEB], [])
ANY = Quality.combineQualities(
HD720p = Quality.combine_qualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], [])
HD1080p = Quality.combine_qualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], [])
UHD2160p = Quality.combine_qualities([Quality.UHD4KWEB], [])
ANY = Quality.combine_qualities(
[Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD
# legacy template, can't remove due to reference in mainDB upgrade?
BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
BEST = Quality.combine_qualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
qualityPresets = (SD, HD, HD720p, HD1080p, UHD2160p, ANY)
@ -608,7 +607,7 @@ class StatusStrings(object):
def __getitem__(self, name):
if name in Quality.SNATCHED_ANY + Quality.DOWNLOADED + Quality.ARCHIVED:
status, quality = Quality.splitCompositeStatus(name)
status, quality = Quality.split_composite_status(name)
if quality == Quality.NONE:
return self.statusStrings[status]
return '%s (%s)' % (self.statusStrings[status], Quality.qualityStrings[quality])
@ -704,7 +703,7 @@ class NeededQualities(object):
"""
from sickgear.tv import TVShow
if isinstance(show_obj, TVShow):
init, upgrade = Quality.splitQuality(show_obj.quality)
init, upgrade = Quality.split_quality(show_obj.quality)
all_qual = set(init + upgrade)
need_sd = need_hd = need_uhd = need_webdl = False
for wanted_qualities in all_qual:

View file

@ -23,7 +23,7 @@ import sickgear.providers
from . import db, helpers, logger, naming
from lib.api_trakt import TraktAPI
from _23 import filter_list, urlsplit, urlunsplit
from _23 import urlsplit, urlunsplit
from six import string_types
@ -152,7 +152,7 @@ def schedule_mediaprocess(iv):
if sickgear.MEDIAPROCESS_INTERVAL < sickgear.MIN_MEDIAPROCESS_INTERVAL:
sickgear.MEDIAPROCESS_INTERVAL = sickgear.MIN_MEDIAPROCESS_INTERVAL
sickgear.media_process_scheduler.cycleTime = datetime.timedelta(minutes=sickgear.MEDIAPROCESS_INTERVAL)
sickgear.media_process_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.MEDIAPROCESS_INTERVAL)
sickgear.media_process_scheduler.set_paused_state()
@ -162,14 +162,14 @@ def schedule_recentsearch(iv):
if sickgear.RECENTSEARCH_INTERVAL < sickgear.MIN_RECENTSEARCH_INTERVAL:
sickgear.RECENTSEARCH_INTERVAL = sickgear.MIN_RECENTSEARCH_INTERVAL
sickgear.recent_search_scheduler.cycleTime = datetime.timedelta(minutes=sickgear.RECENTSEARCH_INTERVAL)
sickgear.recent_search_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.RECENTSEARCH_INTERVAL)
def schedule_backlog(iv):
sickgear.BACKLOG_PERIOD = minimax(iv, sickgear.DEFAULT_BACKLOG_PERIOD,
sickgear.MIN_BACKLOG_PERIOD, sickgear.MAX_BACKLOG_PERIOD)
sickgear.MIN_BACKLOG_PERIOD, sickgear.MAX_BACKLOG_PERIOD)
sickgear.backlog_search_scheduler.action.cycleTime = sickgear.BACKLOG_PERIOD
sickgear.backlog_search_scheduler.action.cycle_time = sickgear.BACKLOG_PERIOD
def schedule_update_software(iv):
@ -178,7 +178,7 @@ def schedule_update_software(iv):
if sickgear.UPDATE_INTERVAL < sickgear.MIN_UPDATE_INTERVAL:
sickgear.UPDATE_INTERVAL = sickgear.MIN_UPDATE_INTERVAL
sickgear.update_software_scheduler.cycleTime = datetime.timedelta(hours=sickgear.UPDATE_INTERVAL)
sickgear.update_software_scheduler.cycle_time = datetime.timedelta(hours=sickgear.UPDATE_INTERVAL)
def schedule_update_software_notify(update_notify):
@ -195,10 +195,10 @@ def schedule_update_software_notify(update_notify):
def schedule_update_packages(iv):
sickgear.UPDATE_PACKAGES_INTERVAL = minimax(iv, sickgear.DEFAULT_UPDATE_PACKAGES_INTERVAL,
sickgear.MIN_UPDATE_PACKAGES_INTERVAL,
sickgear.MAX_UPDATE_PACKAGES_INTERVAL)
sickgear.MIN_UPDATE_PACKAGES_INTERVAL,
sickgear.MAX_UPDATE_PACKAGES_INTERVAL)
sickgear.update_packages_scheduler.cycleTime = datetime.timedelta(hours=sickgear.UPDATE_PACKAGES_INTERVAL)
sickgear.update_packages_scheduler.cycle_time = datetime.timedelta(hours=sickgear.UPDATE_PACKAGES_INTERVAL)
def schedule_update_packages_notify(update_packages_notify):
@ -228,15 +228,6 @@ def schedule_trakt(use_trakt):
return
sickgear.USE_TRAKT = use_trakt
# if sickgear.USE_TRAKT:
# sickgear.trakt_checker_scheduler.start()
# else:
# sickgear.trakt_checker_scheduler.stop()
# logger.log(u'Waiting for the TRAKTCHECKER thread to exit')
# try:
# sickgear.trakt_checker_scheduler.join(10)
# except:
# pass
def schedule_subtitles(use_subtitles):
@ -250,7 +241,7 @@ def schedule_emby_watched(emby_watched_interval):
0, sickgear.MAX_WATCHEDSTATE_INTERVAL)
if emby_watched_iv and emby_watched_iv != sickgear.EMBY_WATCHEDSTATE_INTERVAL:
sickgear.EMBY_WATCHEDSTATE_INTERVAL = emby_watched_iv
sickgear.emby_watched_state_scheduler.cycleTime = datetime.timedelta(minutes=emby_watched_iv)
sickgear.emby_watched_state_scheduler.cycle_time = datetime.timedelta(minutes=emby_watched_iv)
sickgear.EMBY_WATCHEDSTATE_SCHEDULED = bool(emby_watched_iv)
sickgear.emby_watched_state_scheduler.set_paused_state()
@ -261,7 +252,7 @@ def schedule_plex_watched(plex_watched_interval):
0, sickgear.MAX_WATCHEDSTATE_INTERVAL)
if plex_watched_iv and plex_watched_iv != sickgear.PLEX_WATCHEDSTATE_INTERVAL:
sickgear.PLEX_WATCHEDSTATE_INTERVAL = plex_watched_iv
sickgear.plex_watched_state_scheduler.cycleTime = datetime.timedelta(minutes=plex_watched_iv)
sickgear.plex_watched_state_scheduler.cycle_time = datetime.timedelta(minutes=plex_watched_iv)
sickgear.PLEX_WATCHEDSTATE_SCHEDULED = bool(plex_watched_iv)
sickgear.plex_watched_state_scheduler.set_paused_state()
@ -345,7 +336,7 @@ def clean_hosts(hosts, default_port=None, allow_base=False):
def clean_url(url, add_slash=True):
""" Returns an cleaned url starting with a scheme and folder with trailing '/' or an empty string """
""" Returns a cleaned url starting with a scheme and folder with trailing '/' or an empty string """
if url and url.strip():
@ -437,7 +428,7 @@ def check_setting_float(config, cfg_name, item_name, def_val):
def check_setting_str(config, cfg_name, item_name, def_val, log=True):
"""
For passwords you must include the word `password` in the item_name and
For passwords, you must include the word `password` in the item_name and
add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
"""
@ -662,7 +653,7 @@ class ConfigMigrator(object):
Reads in the old naming settings from your config and generates a new config template from them.
"""
# get the old settings from the file and store them in the new variable names
for prov in [curProvider for curProvider in sickgear.providers.sortedProviderList()
for prov in [curProvider for curProvider in sickgear.providers.sorted_sources()
if 'omgwtfnzbs' == curProvider.name]:
prov.username = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_uid', '')
prov.api_key = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_key', '')
@ -773,13 +764,13 @@ class ConfigMigrator(object):
# Migration v6: Rename daily search to recent search
def _migrate_v6(self):
sickgear.RECENTSEARCH_INTERVAL = check_setting_int(self.config_obj, 'General', 'dailysearch_frequency',
sickgear.DEFAULT_RECENTSEARCH_INTERVAL)
sickgear.DEFAULT_RECENTSEARCH_INTERVAL)
sickgear.RECENTSEARCH_STARTUP = bool(check_setting_int(self.config_obj, 'General', 'dailysearch_startup', 1))
if sickgear.RECENTSEARCH_INTERVAL < sickgear.MIN_RECENTSEARCH_INTERVAL:
sickgear.RECENTSEARCH_INTERVAL = sickgear.MIN_RECENTSEARCH_INTERVAL
for curProvider in sickgear.providers.sortedProviderList():
for curProvider in sickgear.providers.sorted_sources():
if hasattr(curProvider, 'enable_recentsearch'):
curProvider.enable_recentsearch = bool(check_setting_int(
self.config_obj, curProvider.get_id().upper(), curProvider.get_id() + '_enable_dailysearch', 1))
@ -831,7 +822,7 @@ class ConfigMigrator(object):
# Migration v15: Transmithe.net variables
def _migrate_v15(self):
try:
neb = filter_list(lambda p: 'Nebulance' in p.name, sickgear.providers.sortedProviderList())[0]
neb = list(filter(lambda p: 'Nebulance' in p.name, sickgear.providers.sorted_sources()))[0]
except (BaseException, Exception):
return
# get the old settings from the file and store them in the new variable names

View file

@ -96,16 +96,16 @@ class InitialSchema(db.SchemaUpgrade):
])
def test(self):
return self.hasTable('lastUpdate')
return self.has_table('lastUpdate')
def execute(self):
self.do_query(self.queries[next(iter(self.queries))])
self.setDBVersion(MIN_DB_VERSION, check_db_version=False)
self.set_db_version(MIN_DB_VERSION, check_db_version=False)
class ConsolidateProviders(InitialSchema):
def test(self):
return 1 < self.checkDBVersion()
return 1 < self.call_check_db_version()
def execute(self):
keep_tables = {'lastUpdate', 'lastSearch', 'db_version',
@ -113,13 +113,13 @@ class ConsolidateProviders(InitialSchema):
# old provider_cache is dropped before re-creation
# noinspection SqlResolve
self.do_query(['DROP TABLE [provider_cache]'] + self.queries['consolidate_providers'] +
['DROP TABLE [%s]' % t for t in (set(self.listTables()) - keep_tables)])
['DROP TABLE [%s]' % t for t in (set(self.list_tables()) - keep_tables)])
self.finish(True)
class AddBacklogParts(ConsolidateProviders):
def test(self):
return 2 < self.checkDBVersion()
return 2 < self.call_check_db_version()
def execute(self):
# noinspection SqlResolve
@ -130,7 +130,7 @@ class AddBacklogParts(ConsolidateProviders):
class AddProviderFailureHandling(AddBacklogParts):
def test(self):
return 3 < self.checkDBVersion()
return 3 < self.call_check_db_version()
def execute(self):
self.do_query(self.queries['add_provider_fails'])
@ -139,17 +139,17 @@ class AddProviderFailureHandling(AddBacklogParts):
class AddIndexerToTables(AddProviderFailureHandling):
def test(self):
return 4 < self.checkDBVersion()
return 4 < self.call_check_db_version()
def execute(self):
self.do_query(self.queries['add_indexer_to_tables'])
self.addColumn('provider_cache', 'indexer', 'NUMERIC')
self.add_column('provider_cache', 'indexer', 'NUMERIC')
self.finish()
class AddGenericFailureHandling(AddBacklogParts):
def test(self):
return 5 < self.checkDBVersion()
return 5 < self.call_check_db_version()
def execute(self):
self.do_query(self.queries['connection_fails'])
@ -158,7 +158,7 @@ class AddGenericFailureHandling(AddBacklogParts):
class AddSaveQueues(AddGenericFailureHandling):
def test(self):
return 6 < self.checkDBVersion()
return 6 < self.call_check_db_version()
def execute(self):
self.do_query(self.queries['save_queues'])

View file

@ -28,7 +28,7 @@ TEST_BASE_VERSION = None # the base production db version, only needed for TEST
# Add new migrations at the bottom of the list; subclass the previous migration.
class InitialSchema(db.SchemaUpgrade):
def test(self):
return self.hasTable('failed')
return self.has_table('failed')
def execute(self):
queries = [
@ -45,18 +45,18 @@ class InitialSchema(db.SchemaUpgrade):
class SizeAndProvider(InitialSchema):
def test(self):
return self.hasColumn('failed', 'size') and self.hasColumn('failed', 'provider')
return self.has_column('failed', 'size') and self.has_column('failed', 'provider')
def execute(self):
self.addColumn('failed', 'size')
self.addColumn('failed', 'provider', 'TEXT', '')
self.add_column('failed', 'size')
self.add_column('failed', 'provider', 'TEXT', '')
class History(SizeAndProvider):
"""Snatch history that can't be modified by the user"""
def test(self):
return self.hasTable('history')
return self.has_table('history')
def execute(self):
self.connection.action('CREATE TABLE history (date NUMERIC, ' +
@ -67,21 +67,21 @@ class HistoryStatus(History):
"""Store episode status before snatch to revert to if necessary"""
def test(self):
return self.hasColumn('history', 'old_status')
return self.has_column('history', 'old_status')
def execute(self):
self.addColumn('history', 'old_status', 'NUMERIC', Quality.NONE)
self.addColumn('history', 'showid', 'NUMERIC', '-1')
self.addColumn('history', 'season', 'NUMERIC', '-1')
self.addColumn('history', 'episode', 'NUMERIC', '-1')
self.add_column('history', 'old_status', 'NUMERIC', Quality.NONE)
self.add_column('history', 'showid', 'NUMERIC', '-1')
self.add_column('history', 'season', 'NUMERIC', '-1')
self.add_column('history', 'episode', 'NUMERIC', '-1')
class AddIndexerToTables(HistoryStatus):
def test(self):
return self.hasColumn('history', 'indexer')
return self.has_column('history', 'indexer')
def execute(self):
self.addColumn('history', 'indexer', 'NUMERIC')
self.add_column('history', 'indexer', 'NUMERIC')
main_db = db.DBConnection('sickbeard.db')
show_ids = {s['prod_id']: s['tv_id'] for s in
@ -91,15 +91,15 @@ class AddIndexerToTables(HistoryStatus):
cl.append(['UPDATE history SET indexer = ? WHERE showid = ?', [i, s_id]])
self.connection.mass_action(cl)
if self.connection.hasTable('backup_history'):
if self.connection.has_table('backup_history'):
self.connection.action(
'REPLACE INTO history '
'(date, size, `release`, provider, old_status, showid, season, episode, indexer)'
' SELECT'
' date, size, `release`, provider, old_status, showid, season, episode, indexer'
' FROM backup_history')
self.connection.removeTable('backup_history')
self.connection.remove_table('backup_history')
self.connection.action('VACUUM')
self.setDBVersion(2, check_db_version=False)
self.set_db_version(2, check_db_version=False)

File diff suppressed because it is too large Load diff

View file

@ -32,11 +32,12 @@ from .sgdatetime import timestamp_near
from sg_helpers import make_path, compress_file, remove_file_perm, scantree
from _23 import filter_iter, filter_list, list_values, scandir
from _23 import scandir
from six import iterkeys, iteritems, itervalues
# noinspection PyUnreachableCode
if False:
# noinspection PyUnresolvedReferences
from typing import Any, AnyStr, Dict, List, Optional, Tuple, Union
@ -47,7 +48,7 @@ db_support_upsert = (3, 25, 0) <= sqlite3.sqlite_version_info # type: bool
db_supports_backup = hasattr(sqlite3.Connection, 'backup') and (3, 6, 11) <= sqlite3.sqlite_version_info # type: bool
def dbFilename(filename='sickbeard.db', suffix=None):
def db_filename(filename='sickbeard.db', suffix=None):
# type: (AnyStr, Optional[AnyStr]) -> AnyStr
"""
@param filename: The sqlite database filename to use. If not specified,
@ -70,7 +71,7 @@ def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True):
:param value_dict: dict of values to be set {'table_fieldname': value}
:param key_dict: dict of restrains for update {'table_fieldname': value}
:param sanitise: True to remove k, v pairs in keyDict from valueDict as they must not exist in both.
This option has a performance hit so it's best to remove key_dict keys from value_dict and set this False instead.
This option has a performance hit, so it's best to remove key_dict keys from value_dict and set this False instead.
:type sanitise: Boolean
:return: list of 2 sql command
"""
@ -80,12 +81,12 @@ def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True):
# sanity: remove k, v pairs in keyDict from valueDict
if sanitise:
value_dict = dict(filter_iter(lambda k: k[0] not in key_dict, iteritems(value_dict)))
value_dict = dict(filter(lambda k: k[0] not in key_dict, iteritems(value_dict)))
# noinspection SqlResolve
cl.append(['UPDATE [%s] SET %s WHERE %s' %
(table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict))),
list_values(value_dict) + list_values(key_dict)])
list(value_dict.values()) + list(key_dict.values())])
# noinspection SqlResolve
cl.append(['INSERT INTO [' + table_name + '] (' +
@ -104,9 +105,9 @@ class DBConnection(object):
from . import helpers
self.new_db = False
db_src = dbFilename(filename)
db_src = db_filename(filename)
if not os.path.isfile(db_src):
db_alt = dbFilename('sickrage.db')
db_alt = db_filename('sickrage.db')
if os.path.isfile(db_alt):
helpers.copy_file(db_alt, db_src)
@ -143,6 +144,7 @@ class DBConnection(object):
logger.log('Backup target file already exists', logger.ERROR)
return False, 'Backup target file already exists'
# noinspection PyUnusedLocal
def progress(status, remaining, total):
logger.log('Copied %s of %s pages...' % (total - remaining, total), logger.DEBUG)
@ -167,11 +169,11 @@ class DBConnection(object):
return True, 'Backup successful'
def checkDBVersion(self):
def check_db_version(self):
# type: (...) -> int
try:
if self.hasTable('db_version'):
if self.has_table('db_version'):
result = self.select('SELECT db_version FROM db_version')
else:
version = self.select('PRAGMA user_version')[0]['user_version']
@ -185,7 +187,7 @@ class DBConnection(object):
if result:
version = int(result[0]['db_version'])
if 10000 > version and self.hasColumn('db_version', 'db_minor_version'):
if 10000 > version and self.has_column('db_version', 'db_minor_version'):
# noinspection SqlResolve
minor = self.select('SELECT db_minor_version FROM db_version')
return version * 100 + int(minor[0]['db_minor_version'])
@ -304,16 +306,16 @@ class DBConnection(object):
query = 'UPDATE [%s] SET %s WHERE %s' % (
table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict)))
self.action(query, list_values(value_dict) + list_values(key_dict))
self.action(query, list(value_dict.values()) + list(key_dict.values()))
if self.connection.total_changes == changes_before:
# noinspection SqlResolve
query = 'INSERT INTO [' + table_name + ']' \
+ ' (%s)' % ', '.join(itertools.chain(iterkeys(value_dict), iterkeys(key_dict))) \
+ ' VALUES (%s)' % ', '.join(['?'] * (len(value_dict) + len(key_dict)))
self.action(query, list_values(value_dict) + list_values(key_dict))
self.action(query, list(value_dict.values()) + list(key_dict.values()))
def tableInfo(self, table_name):
def table_info(self, table_name):
# type: (AnyStr) -> Dict[AnyStr, Dict[AnyStr, AnyStr]]
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
@ -331,38 +333,32 @@ class DBConnection(object):
d[col[0]] = row[idx]
return d
def hasTable(self, table_name):
def has_table(self, table_name):
# type: (AnyStr) -> bool
return 0 < len(self.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (table_name,)))
def hasColumn(self, table_name, column):
def has_column(self, table_name, column):
# type: (AnyStr, AnyStr) -> bool
return column in self.tableInfo(table_name)
return column in self.table_info(table_name)
def hasIndex(self, table_name, index):
def has_index(self, table_name, index):
# type: (AnyStr, AnyStr) -> bool
sqlResults = self.select('PRAGMA index_list([%s])' % table_name)
for result in sqlResults:
sql_results = self.select('PRAGMA index_list([%s])' % table_name)
for result in sql_results:
if result['name'] == index:
return True
return False
def removeIndex(self, table, name):
def remove_index(self, table, name):
# type: (AnyStr, AnyStr) -> None
if self.hasIndex(table, name):
if self.has_index(table, name):
self.action('DROP INDEX' + ' [%s]' % name)
def removeTable(self, name):
def remove_table(self, name):
# type: (AnyStr) -> None
if self.hasTable(name):
if self.has_table(name):
self.action('DROP TABLE' + ' [%s]' % name)
# noinspection SqlResolve
def addColumn(self, table, column, data_type='NUMERIC', default=0):
# type: (AnyStr, AnyStr, AnyStr, Any) -> None
self.action('ALTER TABLE [%s] ADD %s %s' % (table, column, data_type))
self.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
def has_flag(self, flag_name):
# type: (AnyStr) -> bool
sql_result = self.select('SELECT flag FROM flags WHERE flag = ?', [flag_name])
@ -415,7 +411,7 @@ class DBConnection(object):
logger.load_log('Upgrading %s' % self.filename, to_log, log_level)
def sanityCheckDatabase(connection, sanity_check):
def sanity_check_db(connection, sanity_check):
sanity_check(connection).check()
@ -427,36 +423,36 @@ class DBSanityCheck(object):
pass
def upgradeDatabase(connection, schema):
def upgrade_database(connection, schema):
logger.log(u'Checking database structure...', logger.MESSAGE)
connection.is_upgrading = False
connection.new_db = 0 == connection.checkDBVersion()
_processUpgrade(connection, schema)
connection.new_db = 0 == connection.check_db_version()
_process_upgrade(connection, schema)
if connection.is_upgrading:
connection.upgrade_log('Finished')
def prettyName(class_name):
def _pretty_name(class_name):
# type: (AnyStr) -> AnyStr
return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)])
def restoreDatabase(filename, version):
def _restore_database(filename, version):
logger.log(u'Restoring database before trying upgrade again')
if not sickgear.helpers.restore_versioned_file(dbFilename(filename=filename, suffix='v%s' % version), version):
if not sickgear.helpers.restore_versioned_file(db_filename(filename=filename, suffix='v%s' % version), version):
logger.log_error_and_exit(u'Database restore failed, abort upgrading database')
return False
return True
def _processUpgrade(connection, upgrade_class):
def _process_upgrade(connection, upgrade_class):
instance = upgrade_class(connection)
logger.log('Checking %s database upgrade' % prettyName(upgrade_class.__name__), logger.DEBUG)
logger.log('Checking %s database upgrade' % _pretty_name(upgrade_class.__name__), logger.DEBUG)
if not instance.test():
connection.is_upgrading = True
connection.upgrade_log(getattr(upgrade_class, 'pretty_name', None) or prettyName(upgrade_class.__name__))
logger.log('Database upgrade required: %s' % prettyName(upgrade_class.__name__), logger.MESSAGE)
db_version = connection.checkDBVersion()
connection.upgrade_log(getattr(upgrade_class, 'pretty_name', None) or _pretty_name(upgrade_class.__name__))
logger.log('Database upgrade required: %s' % _pretty_name(upgrade_class.__name__), logger.MESSAGE)
db_version = connection.check_db_version()
try:
# only do backup if it's not a new db
0 < db_version and backup_database(connection, connection.filename, db_version)
@ -468,7 +464,7 @@ def _processUpgrade(connection, upgrade_class):
# close db before attempting restore
connection.close()
if restoreDatabase(connection.filename, db_version):
if _restore_database(connection.filename, db_version):
logger.log_error_and_exit('Successfully restored database version: %s' % db_version)
else:
logger.log_error_and_exit('Failed to restore database version: %s' % db_version)
@ -480,7 +476,7 @@ def _processUpgrade(connection, upgrade_class):
logger.log('%s upgrade not required' % upgrade_class.__name__, logger.DEBUG)
for upgradeSubClass in upgrade_class.__subclasses__():
_processUpgrade(connection, upgradeSubClass)
_process_upgrade(connection, upgradeSubClass)
# Base migration class. All future DB changes should be subclassed from this class
@ -488,11 +484,11 @@ class SchemaUpgrade(object):
def __init__(self, connection, **kwargs):
self.connection = connection
def hasTable(self, table_name):
def has_table(self, table_name):
return 0 < len(self.connection.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (table_name,)))
def hasColumn(self, table_name, column):
return column in self.connection.tableInfo(table_name)
def has_column(self, table_name, column):
return column in self.connection.table_info(table_name)
def list_tables(self):
# type: (...) -> List[AnyStr]
@ -511,13 +507,13 @@ class SchemaUpgrade(object):
['index'])]
# noinspection SqlResolve
def addColumn(self, table, column, data_type='NUMERIC', default=0, set_default=False):
def add_column(self, table, column, data_type='NUMERIC', default=0, set_default=False):
self.connection.action('ALTER TABLE [%s] ADD %s %s%s' %
(table, column, data_type, ('', ' DEFAULT "%s"' % default)[set_default]))
self.connection.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
# noinspection SqlResolve
def addColumns(self, table, column_list=None):
def add_columns(self, table, column_list=None):
# type: (AnyStr, List) -> None
if isinstance(column_list, list):
sql = []
@ -535,25 +531,21 @@ class SchemaUpgrade(object):
if sql:
self.connection.mass_action(sql)
def dropColumn(self, table, columns):
# type: (AnyStr, AnyStr) -> None
self.drop_columns(table, columns)
def drop_columns(self, table, column):
# type: (AnyStr, Union[AnyStr, List[AnyStr]]) -> None
# get old table columns and store the ones we want to keep
result = self.connection.select('pragma table_info([%s])' % table)
columns_list = ([column], column)[isinstance(column, list)]
keptColumns = filter_list(lambda col: col['name'] not in columns_list, result)
kept_columns = list(filter(lambda col: col['name'] not in columns_list, result))
keptColumnsNames = []
kept_columns_names = []
final = []
pk = []
# copy the old table schema, column by column
for column in keptColumns:
for column in kept_columns:
keptColumnsNames.append(column['name'])
kept_columns_names.append(column['name'])
cl = [column['name'], column['type']]
@ -574,7 +566,7 @@ class SchemaUpgrade(object):
# join all the table column creation fields
final = ', '.join(final)
keptColumnsNames = ', '.join(keptColumnsNames)
kept_columns_names = ', '.join(kept_columns_names)
# generate sql for the new table creation
if 0 == len(pk):
@ -586,12 +578,12 @@ class SchemaUpgrade(object):
# create new temporary table and copy the old table data across, barring the removed column
self.connection.action(sql)
# noinspection SqlResolve
self.connection.action('INSERT INTO [%s_new] SELECT %s FROM [%s]' % (table, keptColumnsNames, table))
self.connection.action('INSERT INTO [%s_new] SELECT %s FROM [%s]' % (table, kept_columns_names, table))
# copy the old indexes from the old table
result = self.connection.select("SELECT sql FROM sqlite_master WHERE tbl_name=? AND type='index'", [table])
# remove the old table and rename the new table to take it's place
# remove the old table and rename the new table to take its place
# noinspection SqlResolve
self.connection.action('DROP TABLE [%s]' % table)
# noinspection SqlResolve
@ -605,22 +597,19 @@ class SchemaUpgrade(object):
# vacuum the db as we will have a lot of space to reclaim after dropping tables
self.connection.action('VACUUM')
def checkDBVersion(self):
return self.connection.checkDBVersion()
def call_check_db_version(self):
return self.connection.check_db_version()
def incDBVersion(self):
new_version = self.checkDBVersion() + 1
def inc_db_version(self):
new_version = self.call_check_db_version() + 1
# noinspection SqlConstantCondition
self.connection.action('UPDATE db_version SET db_version = ? WHERE 1=1', [new_version])
return new_version
def setDBVersion(self, new_version, check_db_version=True):
def set_db_version(self, new_version, check_db_version=True):
# noinspection SqlConstantCondition
self.connection.action('UPDATE db_version SET db_version = ? WHERE 1=1', [new_version])
return check_db_version and self.checkDBVersion()
def listTables(self):
return self.list_tables()
return check_db_version and self.call_check_db_version()
def do_query(self, queries):
if not isinstance(queries, list):
@ -630,23 +619,23 @@ class SchemaUpgrade(object):
for query in queries:
tbl_name = re.findall(r'(?i)DROP.*?TABLE.*?\[?([^\s\]]+)', query)
if tbl_name and not self.hasTable(tbl_name[0]):
if tbl_name and not self.has_table(tbl_name[0]):
continue
tbl_name = re.findall(r'(?i)CREATE.*?TABLE.*?\s([^\s(]+)\s*\(', query)
if tbl_name and self.hasTable(tbl_name[0]):
if tbl_name and self.has_table(tbl_name[0]):
continue
self.connection.action(query)
def finish(self, tbl_dropped=False):
if tbl_dropped:
self.connection.action('VACUUM')
self.incDBVersion()
self.inc_db_version()
def upgrade_log(self, *args, **kwargs):
self.connection.upgrade_log(*args, **kwargs)
def MigrationCode(my_db):
def migration_code(my_db):
schema = {
0: sickgear.mainDB.InitialSchema,
9: sickgear.mainDB.AddSizeAndSceneNameFields,
@ -719,7 +708,7 @@ def MigrationCode(my_db):
# 20002: sickgear.mainDB.AddCoolSickGearFeature3,
}
db_version = my_db.checkDBVersion()
db_version = my_db.check_db_version()
my_db.new_db = 0 == db_version
logger.log(u'Detected database version: v%s' % db_version, logger.DEBUG)
@ -746,7 +735,7 @@ def MigrationCode(my_db):
my_db.close()
logger.log(u'Failed to update database with error: %s attempting recovery...' % ex(e), logger.ERROR)
if restoreDatabase(my_db.filename, db_version):
if _restore_database(my_db.filename, db_version):
# initialize the main SB database
logger.log_error_and_exit(u'Successfully restored database version: %s' % db_version)
else:
@ -759,9 +748,9 @@ def cleanup_old_db_backups(filename):
d, filename = os.path.split(filename)
if not d:
d = sickgear.DATA_DIR
for f in filter_iter(lambda fn: fn.is_file() and filename in fn.name and
re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name),
scandir(d)):
for f in filter(lambda fn: fn.is_file() and filename in fn.name and
re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name),
scandir(d)):
try:
os.unlink(f.path)
except (BaseException, Exception):
@ -777,7 +766,7 @@ def backup_database(db_connection, filename, version):
return
logger.log(u'Backing up database before upgrade')
if not sickgear.helpers.backup_versioned_file(dbFilename(filename), version):
if not sickgear.helpers.backup_versioned_file(db_filename(filename), version):
logger.log_error_and_exit(u'Database backup failed, abort upgrading database')
else:
logger.log(u'Proceeding with upgrade')
@ -841,7 +830,7 @@ def backup_all_dbs(target, compress=True, prefer_7z=True):
optional compress with zip or 7z (python 3 only, external lib py7zr required)
7z falls back to zip if py7zr is not available
:param target: target folder to backup to
:param target: target folder for backup db
:param compress: compress db backups
:param prefer_7z: prefer 7z compression if available
:return: success, message

View file

@ -33,7 +33,7 @@ class Events(threading.Thread):
# get event type
etype = self.queue.get(True, 1)
# perform callback if we got a event type
# perform callback if we got an event type
self.callback(etype)
# event completed

View file

@ -69,19 +69,19 @@ class FailedProcessor(LegacyFailedProcessor):
"""
self._log(u'Failed download detected: (%s, %s)' % (self.nzb_name, self.dir_name))
releaseName = show_name_helpers.determine_release_name(self.dir_name, self.nzb_name)
if None is releaseName:
release_name = show_name_helpers.determine_release_name(self.dir_name, self.nzb_name)
if None is release_name:
self._log(u'Warning: unable to find a valid release name.', logger.WARNING)
raise exceptions_helper.FailedProcessingFailed()
try:
parser = NameParser(False, show_obj=self.show_obj, convert=True)
parsed = parser.parse(releaseName)
parsed = parser.parse(release_name)
except InvalidNameException:
self._log(u'Error: release name is invalid: ' + releaseName, logger.DEBUG)
self._log(u'Error: release name is invalid: ' + release_name, logger.DEBUG)
raise exceptions_helper.FailedProcessingFailed()
except InvalidShowException:
self._log(u'Error: unable to parse release name %s into a valid show' % releaseName, logger.DEBUG)
self._log(u'Error: unable to parse release name %s into a valid show' % release_name, logger.DEBUG)
raise exceptions_helper.FailedProcessingFailed()
logger.log(u"name_parser info: ", logger.DEBUG)

View file

@ -25,7 +25,6 @@ from .history import dateFormat
from exceptions_helper import EpisodeNotFoundException, ex
from _23 import unquote
from six import PY2, text_type
# noinspection PyUnresolvedReferences
# noinspection PyUnreachableCode
@ -83,10 +82,6 @@ def prepare_failed_name(release):
fixed = re.sub(r'[.\-+ ]', '_', fixed)
# noinspection PyUnresolvedReferences
if PY2 and not isinstance(fixed, unicode):
fixed = text_type(fixed, 'utf-8', 'replace')
return fixed
@ -165,8 +160,8 @@ def set_episode_failed(ep_obj):
"""
try:
with ep_obj.lock:
quality = Quality.splitCompositeStatus(ep_obj.status)[1]
ep_obj.status = Quality.compositeStatus(FAILED, quality)
quality = Quality.split_composite_status(ep_obj.status)[1]
ep_obj.status = Quality.composite_status(FAILED, quality)
ep_obj.save_to_db()
except EpisodeNotFoundException as e:
@ -236,7 +231,7 @@ def revert_episode(ep_obj):
if ep_obj.episode in history_eps:
status_revert = history_eps[ep_obj.episode]['old_status']
status, quality = Quality.splitCompositeStatus(status_revert)
status, quality = Quality.split_composite_status(status_revert)
logger.log('Found in failed.db history with status: %s quality: %s' % (
statusStrings[status], Quality.qualityStrings[quality]))
else:

View file

@ -175,7 +175,7 @@ class GenericQueue(object):
"""
clear queue excluding internal defined types
:param action_types: only clear all of given action type
:param action_types: only clear supplied action types
"""
if not isinstance(action_types, list):
action_types = [action_types]

View file

@ -23,7 +23,7 @@ if False:
class GitHub(object):
"""
Simple api wrapper for the Github API v3. Currently only supports the small thing that SB
Simple api wrapper for the GitHub API v3. Currently only supports the small thing that SB
needs it for - list of commits.
"""
@ -34,7 +34,7 @@ class GitHub(object):
self.branch = branch
@staticmethod
def _access_API(path, params=None):
def _access_api(path, params=None):
"""
Access the API at the path given and with the optional params given.
@ -49,55 +49,57 @@ class GitHub(object):
if params and type(params) is dict:
url += '?' + '&'.join([str(x) + '=' + str(params[x]) for x in params])
parsedJSON = helpers.get_url(url, parse_json=True)
if not parsedJSON:
parsed_json = helpers.get_url(url, parse_json=True)
if not parsed_json:
return []
return parsedJSON
return parsed_json
def commits(self):
"""
Get a list of the 100 most recent commits from the specified user/repo/branch, starting from HEAD.
user: The github username of the person whose repo you're querying
user: The GitHub username of the person whose repo you're querying
repo: The repo name to query
branch: Optional, the branch name to show commits from
Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/
Returns a deserialized json object containing the commit info.
See https://developer.github.com/v3/repos/commits/
"""
access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'],
access_api = self._access_api(['repos', self.github_repo_user, self.github_repo, 'commits'],
params={'per_page': 100, 'sha': self.branch})
return access_API
return access_api
def compare(self, base, head, per_page=1):
"""
Uses the API to get a list of compares between base and head.
user: The github username of the person whose repo you're querying
user: The GitHub username of the person whose repo you're querying
repo: The repo name to query
base: Start compare from branch
head: Current commit sha or branch name to compare
per_page: number of items per page
Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits
Returns a deserialized json object containing the compare info.
See https://developer.github.com/v3/repos/commits
"""
access_API = self._access_API(
access_api = self._access_api(
['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head],
params={'per_page': per_page})
return access_API
return access_api
def branches(self):
access_API = self._access_API(
access_api = self._access_api(
['repos', self.github_repo_user, self.github_repo, 'branches'],
params={'per_page': 100})
return access_API
return access_api
def pull_requests(self):
access_API = self._access_API(
access_api = self._access_api(
['repos', self.github_repo_user, self.github_repo, 'pulls'],
params={'per_page': 100}) # type: Optional[Dict]
pulls = []
for x in access_API:
for x in access_api:
try:
pull = PullRequest(x['head']['ref'], x['number'])
pulls.append((repr(pull), pull.fetch_name()))

View file

@ -43,8 +43,9 @@ import requests
import requests.exceptions
import subliminal
from lxml_etree import etree, is_lxml
from base64 import decodebytes as b64decodebytes, encodebytes as b64encodebytes
from _23 import b64decodebytes, b64encodebytes, decode_bytes, decode_str, filter_iter, scandir
from _23 import decode_bytes, decode_str, scandir
from six import iteritems, string_types, text_type
# noinspection PyUnresolvedReferences
from six.moves import zip
@ -62,7 +63,7 @@ if False:
from typing import Any, AnyStr, Dict, Generator, NoReturn, Iterable, Iterator, List, Optional, Set, Tuple, Union
from .tv import TVShow
# the following workaround hack resolves a pyc resolution bug
from .name_cache import retrieveNameFromCache
from .name_cache import retrieve_name_from_cache
from six import integer_types
RE_XML_ENCODING = re.compile(r'^(<\?xml[^>]+)\s+(encoding\s*=\s*[\"\'][^\"\']*[\"\'])(\s*\?>|)', re.U)
@ -953,7 +954,7 @@ def get_show(name, try_scene_exceptions=False):
show_obj = None
try:
tvid, prodid = sickgear.name_cache.retrieveNameFromCache(name)
tvid, prodid = sickgear.name_cache.retrieve_name_from_cache(name)
if tvid and prodid:
show_obj = find_show_by_id({tvid: prodid})
@ -1283,7 +1284,7 @@ def check_port(host, port, timeout=1.0):
def clear_unused_providers():
providers = [x.cache.providerID for x in sickgear.providers.sortedProviderList() if x.is_active()]
providers = [x.cache.providerID for x in sickgear.providers.sorted_sources() if x.is_active()]
if providers:
my_db = db.DBConnection('cache.db')
@ -1317,7 +1318,7 @@ def has_anime():
:rtype: bool
"""
# noinspection PyTypeChecker
return False if not sickgear.showList else any(filter_iter(lambda show: show.is_anime, sickgear.showList))
return False if not sickgear.showList else any(filter(lambda show: show.is_anime, sickgear.showList))
def cpu_sleep():
@ -1390,7 +1391,7 @@ def should_delete_episode(status):
:return: should be deleted
:rtype: bool
"""
s = Quality.splitCompositeStatus(status)[0]
s = Quality.split_composite_status(status)[0]
if s not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED, IGNORED]:
return True
logger.log('not safe to delete episode from db because of status: %s' % statusStrings[s], logger.DEBUG)
@ -1514,7 +1515,7 @@ def get_overview(ep_status, show_quality, upgrade_once, split_snatch=False):
:type split_snatch: bool
:return: constant from classes Overview
"""
status, quality = Quality.splitCompositeStatus(ep_status)
status, quality = Quality.split_composite_status(ep_status)
if ARCHIVED == status:
return Overview.GOOD
if WANTED == status:
@ -1530,7 +1531,7 @@ def get_overview(ep_status, show_quality, upgrade_once, split_snatch=False):
if not split_snatch and status in SNATCHED_ANY:
return Overview.SNATCHED
void, best_qualities = Quality.splitQuality(show_quality)
void, best_qualities = Quality.split_quality(show_quality)
# if re-downloads aren't wanted then mark it "good" if there is anything
if not len(best_qualities):
return Overview.GOOD
@ -1682,7 +1683,7 @@ def upgrade_new_naming():
(d_entry.path, new_dir_name, repr(e), ex(e)), logger.WARNING)
if os.path.isdir(new_dir_name):
try:
f_n = filter_iter(lambda fn: fn.is_file(), scandir(new_dir_name))
f_n = filter(lambda fn: fn.is_file(), scandir(new_dir_name))
except OSError as e:
logger.log('Unable to rename %s / %s' % (repr(e), ex(e)),
logger.WARNING)

View file

@ -22,8 +22,6 @@ from .common import FAILED, SNATCHED, SNATCHED_PROPER, SUBTITLED, Quality
from .name_parser.parser import NameParser
import sickgear
from six import PY2, text_type
# noinspection PyUnreachableCode
if False:
from typing import Any, AnyStr
@ -47,9 +45,6 @@ def _log_history_item(action, tvid, prodid, season, episode, quality, resource,
"""
log_date = datetime.datetime.now().strftime(dateFormat)
if PY2 and not isinstance(resource, text_type):
resource = text_type(resource, 'utf-8', 'replace')
my_db = db.DBConnection()
my_db.action(
'INSERT INTO history'
@ -77,7 +72,7 @@ def log_snatch(search_result):
else:
provider = 'unknown'
action = Quality.compositeStatus((SNATCHED, SNATCHED_PROPER)[is_proper], search_result.quality)
action = Quality.composite_status((SNATCHED, SNATCHED_PROPER)[is_proper], search_result.quality)
resource = search_result.name
@ -125,8 +120,8 @@ def log_subtitle(tvid, prodid, season, episode, status, subtitle_result):
"""
resource = subtitle_result.path
provider = subtitle_result.service
status, quality = Quality.splitCompositeStatus(status)
action = Quality.compositeStatus(SUBTITLED, quality)
status, quality = Quality.split_composite_status(status)
action = Quality.composite_status(SUBTITLED, quality)
_log_history_item(action, tvid, prodid, season, episode, quality, resource, provider)
@ -140,8 +135,8 @@ def log_failed(ep_obj, release, provider=None):
:param release: release
:param provider: provider name
"""
status, quality = Quality.splitCompositeStatus(ep_obj.status)
action = Quality.compositeStatus(FAILED, quality)
status, quality = Quality.split_composite_status(ep_obj.status)
action = Quality.composite_status(FAILED, quality)
_log_history_item(action, ep_obj.show_obj.tvid, ep_obj.show_obj.prodid,
ep_obj.season, ep_obj.episode, quality, release, provider)
@ -215,7 +210,7 @@ def history_snatched_proper_fix():
continue
if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime):
cl.append(['UPDATE history SET action = ? WHERE rowid = ?',
[Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])),
[Quality.composite_status(SNATCHED_PROPER, int(r['quality'])),
r['rowid']]])
if cl:
my_db.mass_action(cl)

View file

@ -271,7 +271,7 @@ class ImageCache(object):
"""
:param image_file: image file
:type image_file: AnyStr
:return: true if a image_file exists
:return: true if an image_file exists
:rtype: bool
"""
result = []
@ -652,7 +652,7 @@ class ImageCache(object):
if thumb_img_data:
thumb_result = metadata_generator.write_image(thumb_img_data, dest_thumb_path, force=True)
if not thumb_result:
thumb_result = metadata_generator.write_image(img_data, dest_thumb_path, force=True)
metadata_generator.write_image(img_data, dest_thumb_path, force=True)
break
if result:

View file

@ -26,8 +26,7 @@ import sickgear
from lib.dateutil.parser import parse
from _23 import unidecode
from six import iteritems, moves, string_types, PY2
from six import iteritems, moves, string_types
# noinspection PyUnreachableCode
if False:
@ -133,7 +132,7 @@ def confirm_show(premiere_date, shows_premiere, expected_name, show_name):
# type: (Optional[datetime.date], Optional[Union[AnyStr, datetime.date]], AnyStr, AnyStr) -> bool
"""
confirm show possible confirmations:
1. premiere dates are less then 2 days apart
1. premiere dates are less than 2 days apart
2. show name is the same and premiere year is 1 year or less apart
:param premiere_date: expected show premiere date
@ -178,9 +177,7 @@ def clean_show_name(showname):
:return:
:rtype: AnyStr
"""
if not PY2:
return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', showname)
return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', unidecode(showname))
return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', showname)
def get_show_name_date(show_obj):
@ -255,7 +252,7 @@ def map_indexers_to_show(show_obj, update=False, force=False, recheck=False, im_
all_ids_srcs = [src_tv_id] + [s for s in (TVINFO_TRAKT, TVINFO_TMDB, TVINFO_TVMAZE, TVINFO_TVDB, TVINFO_IMDB)
if s != src_tv_id]
searched, confirmed = {}, False
for r in moves.range(len(all_ids_srcs)):
for _ in moves.range(len(all_ids_srcs)):
search_done = False
for i in all_ids_srcs:
if new_ids.verified.get(i):

View file

@ -20,8 +20,6 @@ from sg_helpers import proxy_setting
import sickgear
from lib.tvinfo_base import TVInfoBase
from _23 import list_values
# noinspection PyUnreachableCode
if False:
from typing import AnyStr, Dict
@ -83,13 +81,13 @@ class TVInfoAPI(object):
@property
def sources(self):
# type: () -> Dict[int, AnyStr]
return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if not x['mapped_only'] and
return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if not x['mapped_only'] and
True is not x.get('fallback') and True is not x.get('people_only')])
@property
def search_sources(self):
# type: () -> Dict[int, AnyStr]
return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if not x['mapped_only'] and
return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if not x['mapped_only'] and
x.get('active') and not x.get('defunct') and True is not x.get('fallback')
and True is not x.get('people_only')])
@ -99,7 +97,7 @@ class TVInfoAPI(object):
"""
:return: return all indexers including mapped only indexers excluding fallback indexers
"""
return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if True is not x.get('fallback')
return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if True is not x.get('fallback')
and True is not x.get('people_only')])
@property
@ -108,9 +106,9 @@ class TVInfoAPI(object):
"""
:return: return all fallback indexers
"""
return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if True is x.get('fallback')])
return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if True is x.get('fallback')])
@property
def xem_supported_sources(self):
# type: () -> Dict[int, AnyStr]
return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if x.get('xem_origin')])
return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if x.get('xem_origin')])

View file

@ -263,8 +263,8 @@ class SBRotatingLogHandler(object):
buf = fh.read(min(remaining_size, buf_size))
remaining_size -= buf_size
lines = buf.split('\n')
# the first line of the buffer is probably not a complete line so
# we'll save it and append it to the last line of the next buffer
# the first line of the buffer is probably not a complete line,
# so save it and append it to the last line of the next buffer
# we read
if None is not segment:
# if the previous chunk starts right from the beginning of line

View file

@ -19,14 +19,13 @@ __all__ = ['generic', 'helpers', 'kodi', 'mede8er', 'mediabrowser', 'ps3', 'tivo
import sys
from . import kodi, mede8er, mediabrowser, ps3, tivo, wdtv, xbmc, xbmc_12plus
from _23 import filter_list
def available_generators():
return filter_list(lambda x: x not in ('generic', 'helpers'), __all__)
return list(filter(lambda x: x not in ('generic', 'helpers'), __all__))
def _getMetadataModule(name):
def _get_metadata_module(name):
name = name.lower()
prefix = "sickgear.metadata."
if name in __all__ and prefix + name in sys.modules:
@ -34,8 +33,8 @@ def _getMetadataModule(name):
return None
def _getMetadataClass(name):
module = _getMetadataModule(name)
def _get_metadata_class(name):
module = _get_metadata_module(name)
if not module:
return None
@ -46,10 +45,10 @@ def _getMetadataClass(name):
def get_metadata_generator_dict():
result = {}
for cur_generator_id in available_generators():
cur_generator = _getMetadataClass(cur_generator_id)
cur_generator = _get_metadata_class(cur_generator_id)
if not cur_generator:
continue
result[cur_generator.name] = cur_generator
return result

View file

@ -35,7 +35,6 @@ from lib.fanart.core import Request as fanartRequest
import lib.fanart as fanart
from lxml_etree import etree
from _23 import filter_iter, list_keys
from six import iteritems, itervalues, string_types
# noinspection PyUnreachableCode
@ -614,7 +613,7 @@ class GenericMetadata(object):
logger.log(u"No thumb is available for this episode, not creating a thumb", logger.DEBUG)
return False
thumb_data = metadata_helpers.getShowImage(thumb_url, show_name=ep_obj.show_obj.name)
thumb_data = metadata_helpers.get_show_image(thumb_url, show_name=ep_obj.show_obj.name)
result = self._write_image(thumb_data, file_path)
@ -712,7 +711,7 @@ class GenericMetadata(object):
if 0 == len(cur_season_art):
continue
# Just grab whatever's there for now
# Just grab whatever is there for now
art_id, season_url = cur_season_art.popitem()
season_poster_file_path = self.get_season_poster_path(show_obj, cur_season)
@ -722,7 +721,7 @@ class GenericMetadata(object):
logger.DEBUG)
continue
season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name)
season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name)
if not season_data:
logger.log(u'No season poster data available, skipping this season', logger.DEBUG)
@ -757,7 +756,7 @@ class GenericMetadata(object):
if 0 == len(cur_season_art):
continue
# Just grab whatever's there for now
# Just grab whatever is there for now
art_id, season_url = cur_season_art.popitem()
season_banner_file_path = self.get_season_banner_path(show_obj, cur_season)
@ -767,7 +766,7 @@ class GenericMetadata(object):
logger.DEBUG)
continue
season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name)
season_data = metadata_helpers.get_show_image(season_url, show_name=show_obj.name)
if not season_data:
logger.log(u'No season banner data available, skipping this season', logger.DEBUG)
@ -855,7 +854,7 @@ class GenericMetadata(object):
def _get_show_info(tv_id):
try:
show_lang = show_obj.lang
# There's gotta be a better way of doing this but we don't wanna
# There's gotta be a better way of doing this, but we don't want to
# change the language value elsewhere
tvinfo_config = sickgear.TVInfoAPI(tv_id).api_params.copy()
tvinfo_config['fanart'] = True
@ -874,7 +873,7 @@ class GenericMetadata(object):
tv_id).name + ", not downloading images: " + ex(e), logger.WARNING)
# todo: when tmdb is added as tv source remove the hardcoded TVINFO_TMDB
for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list_keys(sickgear.TVInfoAPI().search_sources) +
for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list(sickgear.TVInfoAPI().search_sources) +
[TVINFO_TMDB])):
if tv_src != show_obj.tvid and not show_obj.ids.get(tv_src, {}).get('id'):
continue
@ -1059,7 +1058,7 @@ class GenericMetadata(object):
if image_type in ('poster', 'banner'):
if isinstance(image_url, tuple):
image_url = image_url[0]
img_data = metadata_helpers.getShowImage(image_url, which, show_obj.name)
img_data = metadata_helpers.get_show_image(image_url, which, show_obj.name)
if img_cache_type and img_cache_type != image_cache.which_type(img_data, is_binary=True):
img_data = None
continue
@ -1083,7 +1082,7 @@ class GenericMetadata(object):
result = {}
try:
# There's gotta be a better way of doing this but we don't wanna
# There's gotta be a better way of doing this, but we don't want to
# change the language value elsewhere
tvinfo_config = sickgear.TVInfoAPI(show_obj.tvid).api_params.copy()
tvinfo_config[image_type] = True
@ -1220,9 +1219,9 @@ class GenericMetadata(object):
resp = request.response()
itemlist = []
dedupe = []
for art in filter_iter(lambda i: 10 < len(i.get('url', '')) and (lang == i.get('lang', '')[0:2]),
# remove "[0:2]" ... to strictly use only data where "en" is at source
resp[types[image_type]]): # type: dict
for art in filter(lambda i: 10 < len(i.get('url', '')) and (lang == i.get('lang', '')[0:2]),
# remove "[0:2]" ... to strictly use only data where "en" is at source
resp[types[image_type]]): # type: dict
try:
url = (art['url'], art['url'].replace('/fanart/', '/preview/'))[thumb]
if url not in dedupe:

View file

@ -22,7 +22,7 @@ if False:
from typing import AnyStr, Optional
def getShowImage(url, img_num=None, show_name=None, supress_log=False):
def get_show_image(url, img_num=None, show_name=None, supress_log=False):
# type: (AnyStr, Optional[int], Optional[AnyStr], bool) -> Optional[bytes]
"""

View file

@ -29,7 +29,7 @@ import exceptions_helper
from exceptions_helper import ex
from lxml_etree import etree
from _23 import decode_str, map_iter
from _23 import decode_str
from six import string_types
# noinspection PyUnreachableCode
@ -107,7 +107,7 @@ class KODIMetadata(generic.GenericMetadata):
show_obj: a TVShow instance to create the NFO for
"""
show_ID = show_obj.prodid
show_id = show_obj.prodid
show_lang = show_obj.lang
tvinfo_config = sickgear.TVInfoAPI(show_obj.tvid).api_params.copy()
@ -125,9 +125,9 @@ class KODIMetadata(generic.GenericMetadata):
tv_node = etree.Element('tvshow')
try:
show_info = t[int(show_ID)]
show_info = t[int(show_id)]
except BaseTVinfoShownotfound as e:
logger.log('Unable to find show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI(
logger.log('Unable to find show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI(
show_obj.tvid).name), logger.ERROR)
raise e
except BaseTVinfoError as e:
@ -141,7 +141,7 @@ class KODIMetadata(generic.GenericMetadata):
# check for title and id
if None is getattr(show_info, 'seriesname', None) or None is getattr(show_info, 'id', None):
logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI(
logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI(
show_obj.tvid).name), logger.ERROR)
return False
@ -157,7 +157,7 @@ class KODIMetadata(generic.GenericMetadata):
has_id = False
tvdb_id = None
for tvid, slug in map_iter(
for tvid, slug in map(
lambda _tvid: (_tvid, sickgear.TVInfoAPI(_tvid).config.get('kodi_slug')),
list(sickgear.TVInfoAPI().all_sources)):
mid = slug and show_obj.ids[tvid].get('id')
@ -171,7 +171,7 @@ class KODIMetadata(generic.GenericMetadata):
uniqueid = etree.SubElement(tv_node, 'uniqueid', **kwargs)
uniqueid.text = '%s%s' % (('', 'tt')[TVINFO_IMDB == tvid], mid)
if not has_id:
logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_ID, sickgear.TVInfoAPI(
logger.log('Incomplete info for show with id %s on %s, skipping it' % (show_id, sickgear.TVInfoAPI(
show_obj.tvid).name), logger.ERROR)
return False

View file

@ -32,7 +32,7 @@ sceneNameCache = {}
nameCacheLock = threading.Lock()
def addNameToCache(name, tvid=0, prodid=0, season=-1):
def add_name_to_cache(name, tvid=0, prodid=0, season=-1):
"""Adds the show & tvdb id to the namecache
:param name: the show name to cache
@ -41,7 +41,7 @@ def addNameToCache(name, tvid=0, prodid=0, season=-1):
:type tvid: int
:param prodid: the production id that this show should be cached with (can be None/0 for unknown)
:type prodid: int or long
:param season: the season the the name exception belongs to. -1 for generic exception
:param season: the season the name exception belongs to. -1 for generic exception
:type season: int
"""
global nameCache
@ -53,7 +53,7 @@ def addNameToCache(name, tvid=0, prodid=0, season=-1):
nameCache[name] = [int(tvid), int(prodid), season]
def retrieveNameFromCache(name):
def retrieve_name_from_cache(name):
# type: (AnyStr) -> Union[Tuple[int, int], Tuple[None, None]]
"""Looks up the given name in the name cache
@ -71,7 +71,7 @@ def retrieveNameFromCache(name):
return None, None
def buildNameCache(show_obj=None, update_only_scene=False):
def build_name_cache(show_obj=None, update_only_scene=False):
# type: (Optional[Union[TVShow, TVShowBase]], bool) -> None
"""Adds all new name exceptions to the namecache memory and flushes any removed name exceptions
@ -104,7 +104,7 @@ def buildNameCache(show_obj=None, update_only_scene=False):
for cur_so in sickgear.showList if cur_so])
sceneNameCache = {}
cacheDB = db.DBConnection()
cache_db = db.DBConnection()
cache_results = []
if update_only_scene:
@ -117,7 +117,7 @@ def buildNameCache(show_obj=None, update_only_scene=False):
tmp_scene_name_cache = sceneNameCache.copy()
for t, s in iteritems(show_ids):
cache_results += cacheDB.select(
cache_results += cache_db.select(
'SELECT show_name, indexer AS tv_id, indexer_id AS prod_id, season'
' FROM scene_exceptions'
' WHERE indexer = %s AND indexer_id IN (%s)' % (t, ','.join(['%s' % i for i in s])))

View file

@ -39,8 +39,8 @@ from lib.tvinfo_base.exceptions import *
from ..classes import OrderedDefaultdict
from .._legacy_classes import LegacyParseResult
from _23 import decode_str, list_keys, list_range
from six import iteritems, iterkeys, itervalues, PY2, string_types, text_type
from _23 import decode_str, list_range
from six import iteritems, iterkeys, itervalues, string_types, text_type
# noinspection PyUnreachableCode
if False:
@ -166,7 +166,7 @@ class NameParser(object):
result.which_regex = [cur_regex_name]
result.score = 0 - cur_regex_num
named_groups = list_keys(match.groupdict())
named_groups = list(match.groupdict())
if 'series_name' in named_groups:
result.series_name = match.group('series_name')
@ -260,7 +260,7 @@ class NameParser(object):
if 'extra_info' in named_groups:
tmp_extra_info = match.group('extra_info')
# Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
# Show.S04.Special or Show.S05.Part.2.Extras are almost certainly not every episode in the season
if tmp_extra_info and 'season_only' == cur_regex_name and re.search(
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I):
continue
@ -292,7 +292,7 @@ class NameParser(object):
matches.append(result)
if len(matches):
# pick best match with highest score based on placement
# pick best match with the highest score based on placement
best_result = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score)
show_obj = None
@ -326,7 +326,7 @@ class NameParser(object):
# get quality
new_name = helpers.remove_non_release_groups(name, show_obj.is_anime)
best_result.quality = common.Quality.nameQuality(new_name, show_obj.is_anime)
best_result.quality = common.Quality.name_quality(new_name, show_obj.is_anime)
new_episode_numbers = []
new_season_numbers = []
@ -451,7 +451,7 @@ class NameParser(object):
'SickGear does not support this. '
'Sorry.' % (str(new_season_numbers)))
# I guess it's possible that we'd have duplicate episodes too, so lets
# I guess it's possible that we'd have duplicate episodes too, so let's
# eliminate them
new_episode_numbers = list(set(new_episode_numbers))
new_episode_numbers.sort()
@ -500,23 +500,20 @@ class NameParser(object):
if not second:
return getattr(first, attr)
a = getattr(first, attr, [])
b = getattr(second, attr)
first_val = getattr(first, attr, [])
second_val = getattr(second, attr)
# if a is good use it
if None is not a or (isinstance(a, list) and len(a)):
return a
# if first_val is good use it
if None is not first_val or (isinstance(first_val, list) and len(first_val)):
return first_val
# if not use b (if b isn't set it'll just be default)
return b
return second_val
@staticmethod
def _unicodify(obj, encoding='utf-8'):
if PY2 and isinstance(obj, string_types):
if not isinstance(obj, text_type):
obj = text_type(obj, encoding, 'replace')
if not PY2 and isinstance(obj, text_type):
def _unicodify(obj, encoding='utf8'):
if isinstance(obj, text_type):
try:
return obj.encode('latin1').decode('utf8')
return obj.encode('latin1').decode(encoding)
except (BaseException, Exception):
pass
return obj
@ -751,9 +748,7 @@ class ParseResult(LegacyParseResult):
self.release_group, self.air_date, tuple(self.ab_episode_numbers)))
def __str__(self):
if not PY2:
return self.__unicode__()
return self.__unicode__().encode('utf-8', errors='ignore')
return self.__unicode__()
def __unicode__(self):
if None is not self.series_name:

View file

@ -14,7 +14,7 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
# all regexes are case insensitive
# all regexes are case-insensitive
normal_regexes = [
('garbage_name',

View file

@ -109,7 +109,7 @@ class TVEpisodeSample(tv.TVEpisode):
self.scene_absolute_number = absolute_number # type: int
self._airdate = datetime.date(2010, 3, 9) # type: datetime.date
self.show_obj = TVShowSample() # type: TVShowSample
self._status = Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV) # type: int
self._status = Quality.composite_status(common.DOWNLOADED, common.Quality.SDTV) # type: int
self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' # type: AnyStr
self._is_proper = True # type: bool
self._version = 2 # type: int
@ -196,7 +196,7 @@ def check_valid_abd_naming(pattern=None):
def check_valid_sports_naming(pattern=None):
"""
Checks if the name is can be parsed back to its original form for an sports format.
Checks if the name is can be parsed back to its original form for a sports format.
Returns true if the naming is valid, false if not.
:param pattern: String Naming Pattern
@ -294,7 +294,7 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime=False, anime_t
# make a fake episode object
sample_ep_obj = TVEpisodeSample(2, 3, 3, 'Ep Name')
sample_ep_obj._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
sample_ep_obj._status = Quality.composite_status(DOWNLOADED, Quality.HDTV)
sample_ep_obj._airdate = datetime.date(2011, 3, 9)
if abd:
@ -313,14 +313,14 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime=False, anime_t
if None is not multi:
sample_ep_obj._name = 'Ep Name (1)'
second_ep = TVEpisodeSample(2, 4, 4, 'Ep Name (2)')
second_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
second_ep._status = Quality.composite_status(DOWNLOADED, Quality.HDTV)
normal_naming = not anime or 3 == anime_type
release_name = sample_ep_obj._release_name = second_ep._release_name = \
('Show.Name.003-004.HDTV.XviD-RLSGROUP', 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP')[normal_naming]
sample_ep_obj.related_ep_obj.append(second_ep)
if normal_naming:
third_ep = TVEpisodeSample(2, 5, 5, 'Ep Name (3)')
third_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
third_ep._status = Quality.composite_status(DOWNLOADED, Quality.HDTV)
third_ep._release_name = release_name
sample_ep_obj.related_ep_obj.append(third_ep)
else:

View file

@ -29,15 +29,14 @@ from lib.dateutil import tz, zoneinfo
from lib.tzlocal import get_localzone
from sg_helpers import remove_file_perm, scantree
from six import integer_types, iteritems, string_types, PY2
from _23 import list_keys
from six import integer_types, iteritems, string_types
# noinspection PyUnreachableCode
if False:
from _23 import DirEntry
from typing import AnyStr, Optional, Tuple, Union
# regex to parse time (12/24 hour format)
# regex to parse time (12/24-hour format)
time_regex = re.compile(r'(\d{1,2})(([:.](\d{2}))? ?([PA][. ]? ?M)|[:.](\d{2}))\b', flags=re.I)
am_regex = re.compile(r'(A[. ]? ?M)', flags=re.I)
pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.I)
@ -175,7 +174,7 @@ def _update_zoneinfo():
url_data = helpers.get_url(url)
if None is url_data:
update_last_retry()
# when None is urlData, trouble connecting to github
# when None is urlData, trouble connecting to GitHub
logger.log(u'Fetching zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url,
logger.WARNING)
return
@ -264,13 +263,13 @@ def update_network_dict():
network_tz_data = {}
# network timezones are stored on github pages
# network timezones are stored on GitHub pages
url = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/network_timezones.txt'
url_data = helpers.get_url(url)
if url_data in (None, ''):
update_last_retry()
# When None is urlData, trouble connecting to github
# When None is urlData, trouble connecting to GitHub
logger.debug(u'Updating network timezones failed, this can happen from time to time. URL: %s' % url)
load_network_dict(load=False)
return
@ -414,7 +413,7 @@ def parse_time(time_of_day):
hour = helpers.try_int(time_parsed.group(1))
mins = helpers.try_int(time_parsed.group(4))
ampm = time_parsed.group(5)
# convert am/pm to 24 hour clock
# convert am/pm to 24-hour clock
if None is not ampm:
if None is not pm_regex.search(ampm) and 12 != hour:
hour += 12
@ -506,13 +505,13 @@ def _load_network_conversions():
conversions_in = []
# network conversions are stored on github pages
# network conversions are stored on GitHub pages
url = 'https://raw.githubusercontent.com/prinz23/sg_network_conversions/master/conversions.txt'
url_data = helpers.get_url(url)
if url_data in (None, ''):
update_last_retry()
# when no url_data, trouble connecting to github
# when no url_data, trouble connecting to GitHub
logger.debug(u'Updating network conversions failed, this can happen from time to time. URL: %s' % url)
return
@ -547,7 +546,7 @@ def _load_network_conversions():
# remove deleted records
if 0 < len(conversions_db):
network_name = list_keys(conversions_db)
network_name = list(conversions_db)
cl.append(['DELETE FROM network_conversions WHERE tvdb_network'
' IN (%s)' % ','.join(['?'] * len(network_name)), network_name])
@ -632,8 +631,6 @@ def get_episode_time(d, # type: int
if d and None is not ep_time and None is not tzinfo:
ep_date = datetime.date.fromordinal(helpers.try_int(d))
if PY2:
return datetime.datetime.combine(ep_date, ep_time).replace(tzinfo=tzinfo)
return datetime.datetime.combine(ep_date, ep_time, tzinfo)
return parse_date_time(d, t, tzinfo)

View file

@ -25,8 +25,6 @@ from . import emby, kodi, plex, xbmc, \
import sickgear
from _23 import filter_iter, list_values
class NotifierFactory(object):
@ -68,32 +66,27 @@ class NotifierFactory(object):
:return: ID String
:rtype: String
"""
for n in filter_iter(lambda v: v.is_enabled(),
list_values(self.notifiers)):
for n in filter(lambda v: v.is_enabled(), list(self.notifiers.values())):
yield n.id()
@property
def enabled_onsnatch(self):
for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_onsnatch(),
list_values(self.notifiers)):
for n in filter(lambda v: v.is_enabled() and v.is_enabled_onsnatch(), list(self.notifiers.values())):
yield n.id()
@property
def enabled_ondownload(self):
for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_ondownload(),
list_values(self.notifiers)):
for n in filter(lambda v: v.is_enabled() and v.is_enabled_ondownload(), list(self.notifiers.values())):
yield n.id()
@property
def enabled_onsubtitledownload(self):
for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_onsubtitledownload(),
list_values(self.notifiers)):
for n in filter(lambda v: v.is_enabled() and v.is_enabled_onsubtitledownload(), list(self.notifiers.values())):
yield n.id()
@property
def enabled_library(self):
for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_library(),
list_values(self.notifiers)):
for n in filter(lambda v: v.is_enabled() and v.is_enabled_library(), list(self.notifiers.values())):
yield n.id()
def get(self, nid):

View file

@ -21,7 +21,7 @@ from .generic import Notifier
from json_helper import json_loads
import sickgear
from _23 import decode_bytes, decode_str, map_list
from _23 import decode_bytes, decode_str
class EmbyNotifier(Notifier):
@ -50,7 +50,7 @@ class EmbyNotifier(Notifier):
timeout=20, hooks=dict(response=self._cb_response), json=True)
return self.response and self.response.get('ok') and 200 == self.response.get('status_code') and \
version <= map_list(lambda x: int(x), (response and response.get('Version') or '0.0.0.0').split('.'))
version <= list(map(lambda x: int(x), (response and response.get('Version') or '0.0.0.0').split('.')))
def update_library(self, show_obj=None, **kwargs):
""" Update library function

View file

@ -20,8 +20,8 @@ from .generic import Notifier
import sickgear
from exceptions_helper import ex
from _23 import b64encodestring, decode_str, etree, filter_iter, list_values, unquote_plus, urlencode
from six import iteritems, text_type, PY2
from _23 import b64encodestring, decode_str, etree, unquote_plus, urlencode
from six import iteritems
# noinspection PyUnresolvedReferences
from six.moves import urllib
@ -49,8 +49,7 @@ class PLEXNotifier(Notifier):
return False
for key in command:
if not PY2 or type(command[key]) == text_type:
command[key] = command[key].encode('utf-8')
command[key] = command[key].encode('utf-8')
enc_command = urlencode(command)
self._log_debug(u'Encoded API command: ' + enc_command)
@ -203,7 +202,7 @@ class PLEXNotifier(Notifier):
hosts_failed.append(cur_host)
continue
for section in filter_iter(lambda x: 'show' == x.attrib['type'], sections):
for section in filter(lambda x: 'show' == x.attrib['type'], sections):
if str(section.attrib['key']) in hosts_all:
continue
keyed_host = [(str(section.attrib['key']), cur_host)]
@ -247,18 +246,14 @@ class PLEXNotifier(Notifier):
return ''
hosts = [
host.replace('http://', '') for host in filter_iter(lambda x: x.startswith('http:'),
list_values(hosts_all))]
host.replace('http://', '') for host in filter(lambda x: x.startswith('http:'), list(hosts_all.values()))]
secured = [
host.replace('https://', '') for host in filter_iter(lambda x: x.startswith('https:'),
list_values(hosts_all))]
host.replace('https://', '') for host in filter(lambda x: x.startswith('https:'), list(hosts_all.values()))]
failed = ', '.join([
host.replace('http://', '') for host in filter_iter(lambda x: x.startswith('http:'),
hosts_failed)])
failed_secured = ', '.join(filter_iter(
host.replace('http://', '') for host in filter(lambda x: x.startswith('http:'), hosts_failed)])
failed_secured = ', '.join(filter(
lambda x: x not in hosts,
[host.replace('https://', '') for host in filter_iter(lambda x: x.startswith('https:'),
hosts_failed)]))
[host.replace('https://', '') for host in filter(lambda x: x.startswith('https:'), hosts_failed)]))
return '<br>' + '<br>'.join([result for result in [
('', 'Fail: username/password when fetching credentials from plex.tv')[False is token_arg],

View file

@ -22,7 +22,6 @@ import sickgear
from lib.api_trakt import TraktAPI, exceptions
from exceptions_helper import ConnectionSkipException
from _23 import list_keys
from six import iteritems
# noinspection PyUnreachableCode
@ -38,7 +37,7 @@ class TraktNotifier(BaseNotifier):
def is_enabled_library(cls):
if sickgear.TRAKT_ACCOUNTS:
for tid, locations in iteritems(sickgear.TRAKT_UPDATE_COLLECTION):
if tid in list_keys(sickgear.TRAKT_ACCOUNTS):
if tid in list(sickgear.TRAKT_ACCOUNTS):
return True
return False
@ -89,7 +88,7 @@ class TraktNotifier(BaseNotifier):
data['shows'][0]['seasons'][0]['episodes'].append({'number': cur_ep_obj.episode})
for tid, locations in iteritems(sickgear.TRAKT_UPDATE_COLLECTION):
if tid not in list_keys(sickgear.TRAKT_ACCOUNTS):
if tid not in list(sickgear.TRAKT_ACCOUNTS):
continue
for loc in locations:
if not ep_obj.location.startswith('%s%s' % (loc.rstrip(os.path.sep), os.path.sep)):

View file

@ -23,7 +23,6 @@ from exceptions_helper import ex
from json_helper import json_dumps, json_load
from _23 import b64encodestring, decode_str, etree, quote, unquote, unquote_plus, urlencode
from six import PY2, text_type
# noinspection PyUnresolvedReferences
from six.moves import urllib
@ -150,8 +149,7 @@ class XBMCNotifier(Notifier):
password = self._choose(password, sickgear.XBMC_PASSWORD)
for key in command:
if not PY2 or type(command[key]) == text_type:
command[key] = command[key].encode('utf-8')
command[key] = command[key].encode('utf-8')
enc_command = urlencode(command)
self._log_debug(u'Encoded API command: ' + enc_command)

View file

@ -40,7 +40,7 @@ SUBJECT_FN_MATCHER = re.compile(r'"([^"]*)"')
RE_NORMAL_NAME = re.compile(r'\.\w{1,5}$')
def platform_encode(p):
def _platform_encode(p):
""" Return Unicode name, if not already Unicode, decode with UTF-8 or latin1 """
try:
return decode_str(p)
@ -48,17 +48,17 @@ def platform_encode(p):
return decode_str(p, sickgear.SYS_ENCODING, errors='replace').replace('?', '!')
def name_extractor(subject):
def _name_extractor(subject):
""" Try to extract a file name from a subject line, return `subject` if in doubt """
result = subject
for name in re.findall(SUBJECT_FN_MATCHER, subject):
name = name.strip(' "')
if name and RE_NORMAL_NAME.search(name):
result = name
return platform_encode(result)
return _platform_encode(result)
def getSeasonNZBs(name, url_data, season):
def _get_season_nzbs(name, url_data, season):
"""
:param name: name
@ -71,31 +71,31 @@ def getSeasonNZBs(name, url_data, season):
:rtype: Tuple[Dict, AnyStr]
"""
try:
showXML = etree.ElementTree(etree.XML(url_data))
show_xml = etree.ElementTree(etree.XML(url_data))
except SyntaxError:
logger.log(u'Unable to parse the XML of %s, not splitting it' % name, logger.ERROR)
return {}, ''
filename = name.replace('.nzb', '')
nzbElement = showXML.getroot()
nzb_element = show_xml.getroot()
regex = r'([\w\._\ ]+)[\._ ]S%02d[\._ ]([\w\._\-\ ]+)' % season
sceneNameMatch = re.search(regex, filename, re.I)
if sceneNameMatch:
showName, qualitySection = sceneNameMatch.groups()
scene_name_match = re.search(regex, filename, re.I)
if scene_name_match:
show_name, quality_section = scene_name_match.groups()
else:
logger.log('%s - Not a valid season pack scene name. If it\'s a valid one, log a bug.' % name, logger.ERROR)
return {}, ''
regex = r'(%s[\._]S%02d(?:[E0-9]+)\.[\w\._]+)' % (re.escape(showName), season)
regex = r'(%s[\._]S%02d(?:[E0-9]+)\.[\w\._]+)' % (re.escape(show_name), season)
regex = regex.replace(' ', '.')
ep_files = {}
xmlns = None
for cur_file in list(nzbElement):
for cur_file in list(nzb_element):
if not isinstance(cur_file.tag, string_types):
continue
xmlns_match = re.match(r'[{](https?://[A-Za-z0-9_./]+/nzb)[}]file', cur_file.tag)
@ -108,7 +108,7 @@ def getSeasonNZBs(name, url_data, season):
# print curFile.get("subject"), "doesn't match", regex
continue
cur_ep = match.group(1)
fn = name_extractor(cur_file.get('subject', ''))
fn = _name_extractor(cur_file.get('subject', ''))
if cur_ep == re.sub(r'\+\d+\.par2$', '', fn, flags=re.I):
bn, ext = os.path.splitext(fn)
cur_ep = re.sub(r'\.(part\d+|vol\d+(\+\d+)?)$', '', bn, flags=re.I)
@ -126,7 +126,7 @@ def getSeasonNZBs(name, url_data, season):
return ep_files, xmlns
def createNZBString(file_elements, xmlns):
def _create_nzb_string(file_elements, xmlns):
"""
:param file_elements: first element
@ -134,17 +134,17 @@ def createNZBString(file_elements, xmlns):
:return:
:rtype: AnyStr
"""
rootElement = etree.Element("nzb")
root_element = etree.Element("nzb")
if xmlns:
rootElement.set("xmlns", xmlns)
root_element.set("xmlns", xmlns)
for curFile in file_elements:
rootElement.append(stripNS(curFile, xmlns))
root_element.append(_strip_ns(curFile, xmlns))
return etree.tostring(rootElement, encoding='utf-8')
return etree.tostring(root_element, encoding='utf-8')
def saveNZB(nzb_name, nzb_string):
def _save_nzb(nzb_name, nzb_string):
"""
:param nzb_name: nzb name
@ -160,15 +160,15 @@ def saveNZB(nzb_name, nzb_string):
logger.log(u'Unable to save NZB: ' + ex(e), logger.ERROR)
def stripNS(element, ns):
def _strip_ns(element, ns):
element.tag = element.tag.replace("{" + ns + "}", "")
for curChild in list(element):
stripNS(curChild, ns)
_strip_ns(curChild, ns)
return element
def splitResult(result):
def split_result(result):
"""
:param result: search result
@ -195,7 +195,7 @@ def splitResult(result):
# bust it up
season = parse_result.season_number if None is not parse_result.season_number else 1
separate_nzbs, xmlns = getSeasonNZBs(result.name, resp, season)
separate_nzbs, xmlns = _get_season_nzbs(result.name, resp, season)
result_list = []
@ -246,7 +246,7 @@ def splitResult(result):
nzb_result.provider = result.provider
nzb_result.quality = result.quality
nzb_result.show_obj = result.show_obj
nzb_result.extraInfo = [createNZBString(separate_nzbs[new_nzb], xmlns)]
nzb_result.extraInfo = [_create_nzb_string(separate_nzbs[new_nzb], xmlns)]
result_list.append(nzb_result)

View file

@ -154,7 +154,7 @@ class PeopleQueueActions(object):
class PeopleQueueItem(generic_queue.QueueItem):
def __init__(self, action_id, show_obj, uid=None, force=False, **kwargs):
# type: (integer_types, TVShow, AnyStr, bool, Dict) -> PeopleQueueItem
# type: (integer_types, TVShow, AnyStr, bool, Dict) -> None
"""
:param action_id:
@ -172,7 +172,7 @@ class PeopleQueueItem(generic_queue.QueueItem):
class CastQueueItem(PeopleQueueItem):
def __init__(self, show_obj, show_info_cast=None, uid=None, force=False, scheduled_update=False, switch=False,
**kwargs):
# type: (TVShow, CastList, AnyStr, bool, bool, bool, Dict) -> CastQueueItem
# type: (TVShow, CastList, AnyStr, bool, bool, bool, Dict) -> None
"""
:param show_obj: show obj

View file

@ -10,8 +10,7 @@ import re
from json_helper import json_loads
from sg_helpers import cmdline_runner, is_virtualenv
from _23 import filter_list, ordered_dict
from six import iteritems, PY2
from six import iteritems
# noinspection PyUnreachableCode
if False:
@ -51,10 +50,6 @@ def run_pip(pip_cmd, suppress_stderr=False):
pip_cmd += ['--progress-bar', 'off']
new_pip_arg = ['--no-python-version-warning']
if PY2:
pip_version, _, _ = _get_pip_version()
if pip_version and 20 > int(pip_version.split('.')[0]):
new_pip_arg = []
return cmdline_runner(
[sys.executable, '-m', 'pip'] + new_pip_arg + ['--disable-pip-version-check'] + pip_cmd,
@ -72,7 +67,7 @@ def initial_requirements():
from Cheetah import VersionTuple
is_cheetah2 = (3, 0, 0) > VersionTuple[0:3]
is_cheetah3py3 = not PY2 and (3, 3, 0) > VersionTuple[0:3]
is_cheetah3py3 = (3, 3, 0) > VersionTuple[0:3]
if not (is_cheetah2 or is_cheetah3py3):
return
@ -158,13 +153,10 @@ def check_pip_env():
_, _, installed, failed_names = _check_pip_env()
py2_last = 'final py2 release'
boost = 'performance boost'
extra_info = dict({'Cheetah3': 'filled requirement', 'CT3': 'filled requirement',
'lxml': boost, 'python-Levenshtein': boost})
extra_info.update((dict(cryptography=py2_last, pip=py2_last, regex=py2_last,
scandir=boost, setuptools=py2_last),
dict(regex=boost))[not PY2])
extra_info.update(dict(regex=boost))
return installed, extra_info, failed_names
@ -256,9 +248,9 @@ def _check_pip_env(pip_outdated=False, reset_fails=False):
names_outdated = dict({cur_item.get('name'): {k: cur_item.get(k) for k in ('version', 'latest_version',
'latest_filetype')}
for cur_item in json_loads(output)})
to_update = set(filter_list(
to_update = set(list(filter(
lambda name: name in specifiers and names_outdated[name]['latest_version'] in specifiers[name],
set(names_reco).intersection(set(names_outdated))))
set(names_reco).intersection(set(names_outdated)))))
# check whether to ignore direct reference specification updates if not dev mode
if not int(os.environ.get('CHK_URL_SPECIFIERS', 0)):
@ -272,7 +264,7 @@ def _check_pip_env(pip_outdated=False, reset_fails=False):
except (BaseException, Exception):
pass
updates_todo = ordered_dict()
updates_todo = dict()
todo = to_install.union(to_update, requirement_update)
for cur_name in [cur_n for cur_n in names_reco if cur_n in todo]:
updates_todo[cur_name] = dict({

View file

@ -33,7 +33,7 @@ from .indexers.indexer_config import TVINFO_TVDB
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from _23 import decode_str
from six import iteritems, PY2, string_types
from six import iteritems, string_types
from sg_helpers import long_path, cmdline_runner
# noinspection PyUnreachableCode
@ -762,7 +762,7 @@ class PostProcessor(object):
# if there is a quality available in the status then we don't need to bother guessing from the filename
if ep_obj.status in common.Quality.SNATCHED_ANY:
old_status, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
old_status, ep_quality = common.Quality.split_composite_status(ep_obj.status)
if common.Quality.UNKNOWN != ep_quality:
self._log(
u'Using "%s" quality from the old status' % common.Quality.qualityStrings[ep_quality],
@ -779,7 +779,7 @@ class PostProcessor(object):
if not cur_name:
continue
ep_quality = common.Quality.nameQuality(cur_name, ep_obj.show_obj.is_anime)
ep_quality = common.Quality.name_quality(cur_name, ep_obj.show_obj.is_anime)
quality_log = u' "%s" quality parsed from the %s %s'\
% (common.Quality.qualityStrings[ep_quality], thing, cur_name)
@ -790,14 +790,14 @@ class PostProcessor(object):
else:
self._log(u'Found' + quality_log, logger.DEBUG)
ep_quality = common.Quality.fileQuality(self.file_path)
ep_quality = common.Quality.file_quality(self.file_path)
if common.Quality.UNKNOWN != ep_quality:
self._log(u'Using "%s" quality parsed from the metadata file content of %s'
% (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG)
return ep_quality
# Try guessing quality from the file name
ep_quality = common.Quality.assumeQuality(self.file_name)
ep_quality = common.Quality.assume_quality(self.file_name)
self._log(u'Using guessed "%s" quality from the file name %s'
% (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG)
@ -824,12 +824,7 @@ class PostProcessor(object):
script_cmd[0] = os.path.abspath(script_cmd[0])
self._log(u'Absolute path to script: ' + script_cmd[0], logger.DEBUG)
if PY2:
script_cmd += [ep_obj.location.encode(sickgear.SYS_ENCODING),
self.file_path.encode(sickgear.SYS_ENCODING)
]
else:
script_cmd += [ep_obj.location, self.file_path]
script_cmd += [ep_obj.location, self.file_path]
script_cmd += ([], [str(ep_obj.show_obj.tvid)])[new_call] + [
str(ep_obj.show_obj.prodid),
@ -894,7 +889,7 @@ class PostProcessor(object):
self._log(u'SickGear snatched this episode, marking it safe to replace', logger.DEBUG)
return True
old_ep_status, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
old_ep_status, old_ep_quality = common.Quality.split_composite_status(ep_obj.status)
# if old episode is not downloaded/archived then it's safe
if common.DOWNLOADED != old_ep_status and common.ARCHIVED != old_ep_status:
@ -1007,10 +1002,10 @@ class PostProcessor(object):
cur_ep_obj.release_name = self.release_name or ''
any_qualities, best_qualities = common.Quality.splitQuality(cur_ep_obj.show_obj.quality)
cur_status, cur_quality = common.Quality.splitCompositeStatus(cur_ep_obj.status)
any_qualities, best_qualities = common.Quality.split_quality(cur_ep_obj.show_obj.quality)
cur_status, cur_quality = common.Quality.split_composite_status(cur_ep_obj.status)
cur_ep_obj.status = common.Quality.compositeStatus(
cur_ep_obj.status = common.Quality.composite_status(
**({'status': common.DOWNLOADED, 'quality': quality},
{'status': common.ARCHIVED, 'quality': quality})
[cur_ep_obj.status in common.Quality.SNATCHED_BEST or
@ -1116,7 +1111,7 @@ class PostProcessor(object):
# set the status of the episodes
# for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj:
# cur_ep_obj.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
# cur_ep_obj.status = common.Quality.composite_status(common.SNATCHED, new_ep_quality)
# if the show directory doesn't exist then make it if allowed
if not os.path.isdir(ep_obj.show_obj.location) and sickgear.CREATE_MISSING_SHOW_DIRS:
@ -1174,9 +1169,8 @@ class PostProcessor(object):
keepalive = keepalive_stop = None
if self.webhandler:
def keep_alive(webh, stop_event):
if not PY2:
import asyncio
asyncio.set_event_loop(asyncio.new_event_loop())
import asyncio
asyncio.set_event_loop(asyncio.new_event_loop())
while not stop_event.is_set():
stop_event.wait(60)
webh('.')

View file

@ -35,8 +35,7 @@ from .history import reset_status
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from .sgdatetime import timestamp_near
from _23 import filter_list, filter_iter, list_values, map_iter
from six import iteritems, iterkeys, string_types, PY2, text_type
from six import iteritems, iterkeys, string_types, text_type
from sg_helpers import long_path, scantree
import lib.rarfile.rarfile as rarfile
@ -281,7 +280,7 @@ class ProcessTVShow(object):
build_path = (lambda old_path: '%s%s' % (helpers.real_path(old_path).rstrip(os.path.sep), os.path.sep))
process_path = build_path(path)
for parent in map_iter(lambda p: build_path(p), sickgear.ROOT_DIRS.split('|')[1:]):
for parent in map(lambda p: build_path(p), sickgear.ROOT_DIRS.split('|')[1:]):
if process_path.startswith(parent):
return parent.rstrip(os.path.sep)
@ -352,7 +351,7 @@ class ProcessTVShow(object):
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
if sickgear.POSTPONE_IF_SYNC_FILES and any(filter_iter(helpers.is_sync_file, files)):
if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)):
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
return self.result
@ -367,7 +366,7 @@ class ProcessTVShow(object):
work_files += [joined]
rar_files, rarfile_history = self.unused_archives(
path, filter_list(helpers.is_first_rar_volume, files), pp_type, process_method)
path, list(filter(helpers.is_first_rar_volume, files)), pp_type, process_method)
rar_content = self._unrar(path, rar_files, force)
if self.fail_detected:
self._process_failed(dir_name, nzb_name, show_obj=show_obj)
@ -376,8 +375,8 @@ class ProcessTVShow(object):
rar_content = [x for x in rar_content if not helpers.is_link(os.path.join(path, x))]
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
files = [x for x in files if not helpers.is_link(os.path.join(path, x))]
video_files = filter_list(helpers.has_media_ext, files)
video_in_rar = filter_list(helpers.has_media_ext, rar_content)
video_files = list(filter(helpers.has_media_ext, files))
video_in_rar = list(filter(helpers.has_media_ext, rar_content))
work_files += [os.path.join(path, item) for item in rar_content]
if 0 < len(files):
@ -438,7 +437,7 @@ class ProcessTVShow(object):
for walk_path, walk_dir, files in os.walk(os.path.join(path, directory), topdown=False):
if sickgear.POSTPONE_IF_SYNC_FILES and any(filter_iter(helpers.is_sync_file, files)):
if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)):
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
return self.result
@ -452,7 +451,7 @@ class ProcessTVShow(object):
files = [x for x in files if not helpers.is_link(os.path.join(walk_path, x))]
rar_files, rarfile_history = self.unused_archives(
walk_path, filter_list(helpers.is_first_rar_volume, files), pp_type, process_method,
walk_path, list(filter(helpers.is_first_rar_volume, files)), pp_type, process_method,
rarfile_history)
rar_content = self._unrar(walk_path, rar_files, force)
work_files += [os.path.join(walk_path, item) for item in rar_content]
@ -461,8 +460,8 @@ class ProcessTVShow(object):
continue
rar_content = [x for x in rar_content if not helpers.is_link(os.path.join(walk_path, x))]
files = list(set(files + rar_content))
video_files = filter_list(helpers.has_media_ext, files)
video_in_rar = filter_list(helpers.has_media_ext, rar_content)
video_files = list(filter(helpers.has_media_ext, files))
video_in_rar = list(filter(helpers.has_media_ext, rar_content))
notwanted_files = [x for x in files if x not in video_files]
# Don't Link media when the media is extracted from a rar in the same path
@ -640,7 +639,7 @@ class ProcessTVShow(object):
all_dirs += process_dir
all_files += fileList
video_files = filter_list(helpers.has_media_ext, all_files)
video_files = list(filter(helpers.has_media_ext, all_files))
all_dirs.append(dir_name)
# check if the directory have at least one tv video file
@ -660,7 +659,7 @@ class ProcessTVShow(object):
if sickgear.UNPACK and process_path and all_files:
# Search for packed release
packed_files = filter_list(helpers.is_first_rar_volume, all_files)
packed_files = list(filter(helpers.is_first_rar_volume, all_files))
for packed in packed_files:
try:
@ -719,9 +718,8 @@ class ProcessTVShow(object):
rar_content = [os.path.normpath(x.filename) for x in rar_handle.infolist() if not x.is_dir()]
renamed = self.cleanup_names(path, rar_content)
cur_unpacked = rar_content if not renamed else \
(list(set(rar_content) - set(iterkeys(renamed))) + list_values(renamed))
self._log_helper(u'Unpacked content: [u\'%s\']' % '\', u\''.join(map_iter(text_type,
cur_unpacked)))
(list(set(rar_content) - set(iterkeys(renamed))) + list(renamed.values()))
self._log_helper(u'Unpacked content: [u\'%s\']' % '\', u\''.join(map(text_type, cur_unpacked)))
unpacked_files += cur_unpacked
except (rarfile.PasswordRequired, rarfile.RarWrongPassword):
self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR)
@ -928,10 +926,6 @@ class ProcessTVShow(object):
if force or not self.any_vid_processed:
return False
# Needed for accessing DB with a unicode dir_name
if PY2 and not isinstance(dir_name, text_type):
dir_name = text_type(dir_name, 'utf_8')
parse_result = None
try:
parse_result = NameParser(convert=True).parse(videofile, cache_result=False)
@ -974,8 +968,6 @@ class ProcessTVShow(object):
else:
# This is needed for video whose name differ from dir_name
if PY2 and not isinstance(videofile, text_type):
videofile = text_type(videofile, 'utf_8')
sql_result = my_db.select(
'SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]])

View file

@ -32,7 +32,7 @@ from .history import dateFormat
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from .sgdatetime import timestamp_near
from _23 import filter_iter, filter_list, list_values, map_consume, map_list
from _23 import map_consume
from six import string_types
# noinspection PyUnreachableCode
@ -73,7 +73,7 @@ def search_propers(provider_proper_obj=None):
proper_sch = sickgear.proper_finder_scheduler
if None is proper_sch.start_time:
run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
run_in = proper_sch.last_run + proper_sch.cycle_time - datetime.datetime.now()
run_at = ', next check '
if datetime.timedelta() > run_in:
run_at += 'imminent'
@ -131,7 +131,7 @@ def get_old_proper_level(show_obj, tvid, prodid, season, episode_numbers, old_st
[tvid, prodid, season, episode])
if not result or not isinstance(result[0]['resource'], string_types) or not result[0]['resource']:
continue
nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime)
nq = Quality.scene_quality(result[0]['resource'], show_obj.is_anime)
if nq != new_quality:
continue
try:
@ -214,7 +214,7 @@ def load_webdl_types():
def _search_provider(cur_provider, provider_propers, aired_since_shows, recent_shows, recent_anime):
# type: (GenericProvider, List, datetime.datetime, List[Tuple[int, int]], List[Tuple[int, int]]) -> None
try:
# we need to extent the referenced list from parameter to update the original var
# we need to extend the referenced list from parameter to update the original var
provider_propers.extend(cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
anime=recent_anime))
except AuthException as e:
@ -251,9 +251,9 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
# filter provider list for:
# 1. from recent search: recent search enabled providers
# 2. native proper search: active search enabled providers
provider_list = filter_list(
provider_list = list(filter(
lambda p: p.is_active() and (p.enable_recentsearch, p.enable_backlog)[None is proper_dict],
sickgear.providers.sortedProviderList())
sickgear.providers.sorted_sources()))
search_threads = []
if None is proper_dict:
@ -362,8 +362,8 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
# only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones)
# check if we want this release: same quality as current, current has correct status
# restrict other release group releases to Proper's
old_status, old_quality = Quality.splitCompositeStatus(int(sql_result[0]['status']))
cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
old_status, old_quality = Quality.split_composite_status(int(sql_result[0]['status']))
cur_proper.quality = Quality.name_quality(cur_proper.name, parse_result.is_anime)
cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level(
parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True)
cur_proper.proper_level = cur_proper.properlevel # local non global value
@ -487,7 +487,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime
cur_provider.log_result('Propers', len(propers), '%s' % cur_provider.name)
return list_values(propers)
return list(propers.values())
def _download_propers(proper_list):
@ -507,24 +507,24 @@ def _download_propers(proper_list):
# get verified list; sort the list of unique Propers for highest proper_level, newest first
for cur_proper in sorted(
filter_iter(lambda p: p not in consumed_proper,
# allows Proper to fail or be rejected and another to be tried (with a different name)
filter_iter(lambda p: _epid(p) not in downloaded_epid, proper_list)),
filter(lambda p: p not in consumed_proper,
# allows Proper to fail or be rejected and another to be tried (with a different name)
filter(lambda p: _epid(p) not in downloaded_epid, proper_list)),
key=operator.attrgetter('properlevel', 'date'), reverse=True): # type: Proper
epid = _epid(cur_proper)
# if the show is in our list and there hasn't been a Proper already added for that particular episode
# then add it to our list of Propers
if epid not in map_list(_epid, verified_propers):
if epid not in list(map(_epid, verified_propers)):
logger.log('Proper may be useful [%s]' % cur_proper.name)
verified_propers.add(cur_proper)
else:
# use Proper with the highest level
remove_propers = set()
map_consume(lambda vp: remove_propers.add(vp),
filter_iter(lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level),
verified_propers))
filter(lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level),
verified_propers))
if remove_propers:
verified_propers -= remove_propers
@ -631,7 +631,7 @@ def get_needed_qualites(needed=None):
continue
ep_obj = show_obj.get_episode(season=cur_result['season'], episode=cur_result['episode'])
if ep_obj:
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
ep_status, ep_quality = Quality.split_composite_status(ep_obj.status)
if ep_status in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]:
needed.check_needed_qualities([ep_quality])
@ -699,7 +699,7 @@ def _set_last_proper_search(when):
def next_proper_timeleft():
return sickgear.proper_finder_scheduler.timeLeft()
return sickgear.proper_finder_scheduler.time_left()
def get_last_proper_search():

View file

@ -22,7 +22,6 @@ from .newznab import NewznabConstants
from .. import logger
import sickgear
from _23 import filter_list, filter_iter
from six import iteritems, itervalues
# noinspection PyUnreachableCode
@ -30,6 +29,7 @@ if False:
from typing import AnyStr, List, Union
from .generic import GenericProvider, NZBProvider, TorrentProvider
# noinspection PyUnresolvedReferences
__all__ = [
# usenet
'filesharingtalk',
@ -50,47 +50,47 @@ for module in __all__:
try:
m = importlib.import_module('.' + module, 'sickgear.providers')
globals().update({n: getattr(m, n) for n in m.__all__} if hasattr(m, '__all__')
else dict(filter_iter(lambda t: '_' != t[0][0], iteritems(m.__dict__))))
else dict(filter(lambda t: '_' != t[0][0], iteritems(m.__dict__))))
except ImportError as e:
if 'custom' != module[0:6]:
raise e
def sortedProviderList():
def sorted_sources():
# type: (...) -> List[Union[GenericProvider, NZBProvider, TorrentProvider]]
"""
return sorted provider list
:return: sorted list of providers
"""
initialList = sickgear.providerList + sickgear.newznabProviderList + sickgear.torrentRssProviderList
providerDict = dict(zip([x.get_id() for x in initialList], initialList))
initial_list = sickgear.provider_list + sickgear.newznab_providers + sickgear.torrent_rss_providers
provider_dict = dict(zip([x.get_id() for x in initial_list], initial_list))
newList = []
new_list = []
# add all modules in the priority list, in order
for curModule in sickgear.PROVIDER_ORDER:
if curModule in providerDict:
newList.append(providerDict[curModule])
if curModule in provider_dict:
new_list.append(provider_dict[curModule])
if not sickgear.PROVIDER_ORDER:
nzb = filter_list(lambda p: p.providerType == generic.GenericProvider.NZB, itervalues(providerDict))
tor = filter_list(lambda p: p.providerType != generic.GenericProvider.NZB, itervalues(providerDict))
newList = sorted(filter_iter(lambda p: not p.anime_only, nzb), key=lambda v: v.get_id()) + \
sorted(filter_iter(lambda p: not p.anime_only, tor), key=lambda v: v.get_id()) + \
sorted(filter_iter(lambda p: p.anime_only, nzb), key=lambda v: v.get_id()) + \
sorted(filter_iter(lambda p: p.anime_only, tor), key=lambda v: v.get_id())
nzb = list(filter(lambda p: p.providerType == generic.GenericProvider.NZB, itervalues(provider_dict)))
tor = list(filter(lambda p: p.providerType != generic.GenericProvider.NZB, itervalues(provider_dict)))
new_list = sorted(filter(lambda p: not p.anime_only, nzb), key=lambda v: v.get_id()) + \
sorted(filter(lambda p: not p.anime_only, tor), key=lambda v: v.get_id()) + \
sorted(filter(lambda p: p.anime_only, nzb), key=lambda v: v.get_id()) + \
sorted(filter(lambda p: p.anime_only, tor), key=lambda v: v.get_id())
# add any modules that are missing from that list
for curModule in providerDict:
if providerDict[curModule] not in newList:
newList.append(providerDict[curModule])
for curModule in provider_dict:
if provider_dict[curModule] not in new_list:
new_list.append(provider_dict[curModule])
return newList
return new_list
def makeProviderList():
return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
def provider_modules():
return [x.provider for x in [_get_module_by_name(y) for y in __all__] if x]
def generic_provider_name(n):
@ -103,7 +103,7 @@ def generic_provider_url(u):
return u.strip().strip('/').lower().replace('https', 'http')
def make_unique_list(p_list, d_list=None):
def _make_unique_list(p_list, d_list=None):
# type: (List, List) -> List
"""
remove provider duplicates
@ -119,7 +119,7 @@ def make_unique_list(p_list, d_list=None):
default_names = [d.name for d in d_list or []]
p_list = filter_iter(lambda _x: _x.get_id() not in ['sick_beard_index'], p_list)
p_list = filter(lambda _x: _x.get_id() not in ['sick_beard_index'], p_list)
for cur_p in p_list:
g_name = generic_provider_name(cur_p.name)
g_url = generic_provider_url(cur_p.url)
@ -136,32 +136,32 @@ def make_unique_list(p_list, d_list=None):
return new_p_list
def getNewznabProviderList(data):
def newznab_source_list(data):
# type: (AnyStr) -> List
defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')]
providerList = make_unique_list(filter_list(lambda _x: _x, [makeNewznabProvider(x) for x in data.split('!!!')]),
defaultList)
default_list = [_create_newznab_source(x) for x in _default_newznab_sources().split('!!!')]
provider_list = _make_unique_list(list(filter(
lambda _x: _x, [_create_newznab_source(x) for x in data.split('!!!')])), default_list)
providerDict = dict(zip([x.name for x in providerList], providerList))
provider_dict = dict(zip([x.name for x in provider_list], provider_list))
for curDefault in defaultList:
for curDefault in default_list:
if not curDefault:
continue
if curDefault.name not in providerDict:
if curDefault.name not in provider_dict:
curDefault.default = True
providerList.append(curDefault)
provider_list.append(curDefault)
else:
providerDict[curDefault.name].default = True
provider_dict[curDefault.name].default = True
for k in ('name', 'url', 'needs_auth', 'search_mode', 'search_fallback',
'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
'server_type'):
setattr(providerDict[curDefault.name], k, getattr(curDefault, k))
setattr(provider_dict[curDefault.name], k, getattr(curDefault, k))
return filter_list(lambda _x: _x, providerList)
return list(filter(lambda _x: _x, provider_list))
def makeNewznabProvider(config_string):
def _create_newznab_source(config_string):
if not config_string:
return None
@ -182,19 +182,19 @@ def makeNewznabProvider(config_string):
newznab_module = sys.modules['sickgear.providers.newznab']
newProvider = newznab_module.NewznabProvider(name, url, **params)
newProvider.enabled = '1' == enabled
new_provider = newznab_module.NewznabProvider(name, url, **params)
new_provider.enabled = '1' == enabled
return newProvider
return new_provider
def getTorrentRssProviderList(data):
providerList = filter_list(lambda _x: _x, [makeTorrentRssProvider(x) for x in data.split('!!!')])
def torrent_rss_source_list(data):
provider_list = list(filter(lambda _x: _x, [_create_torrent_rss_source(x) for x in data.split('!!!')]))
return filter_list(lambda _x: _x, providerList)
return list(filter(lambda _x: _x, provider_list))
def makeTorrentRssProvider(config_string):
def _create_torrent_rss_source(config_string):
if not config_string:
return None
@ -218,25 +218,27 @@ def makeTorrentRssProvider(config_string):
return None
try:
torrentRss = sys.modules['sickgear.providers.rsstorrent']
torrent_rss = sys.modules['sickgear.providers.rsstorrent']
except (BaseException, Exception):
return
newProvider = torrentRss.TorrentRssProvider(name, url, cookies, search_mode, search_fallback, enable_recentsearch,
enable_backlog)
newProvider.enabled = '1' == enabled
new_provider = torrent_rss.TorrentRssProvider(name, url, cookies, search_mode, search_fallback, enable_recentsearch,
enable_backlog)
new_provider.enabled = '1' == enabled
return newProvider
return new_provider
def getDefaultNewznabProviders():
return '!!!'.join(['NZBgeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0',
'DrunkenSlug|https://api.drunkenslug.com/||5030,5040|0|eponly|0|0|0',
'NinjaCentral|https://ninjacentral.co.za/||5030,5040|0|eponly|0|0|0',
])
def _default_newznab_sources():
return '!!!'.join([
'|'.join(_src) for _src in
(['NZBgeek', 'https://api.nzbgeek.info/', '', '5030,5040', '0', 'eponly', '0', '0', '0'],
['DrunkenSlug', 'https://api.drunkenslug.com/', '', '5030,5040', '0', 'eponly', '0', '0', '0'],
['NinjaCentral', 'https://ninjacentral.co.za/', '', '5030,5040', '0', 'eponly', '0', '0', '0'],
)])
def getProviderModule(name):
def _get_module_by_name(name):
prefix, cprov, name = 'sickgear.providers.', 'motsuc'[::-1], name.lower()
if name in __all__ and prefix + name in sys.modules:
return sys.modules[prefix + name]
@ -245,11 +247,11 @@ def getProviderModule(name):
raise Exception('Can\'t find %s%s in providers' % (prefix, name))
def getProviderClass(provider_id):
providerMatch = [x for x in
sickgear.providerList + sickgear.newznabProviderList + sickgear.torrentRssProviderList if
provider_id == x.get_id()]
def get_by_id(provider_id):
provider_match = [x for x in
sickgear.provider_list + sickgear.newznab_providers + sickgear.torrent_rss_providers if
provider_id == x.get_id()]
if 1 != len(providerMatch):
if 1 != len(provider_match):
return None
return providerMatch[0]
return provider_match[0]

Some files were not shown because too many files have changed in this diff Show more