Merge branch 'feature/AddWatchedState' into develop
16
CHANGES.md
|
@ -1,5 +1,19 @@
|
|||
### 0.15.0 (2018-xx-xx xx:xx:xx UTC)
|
||||
|
||||
* Add choice to delete watched episodes from a list of played media at Kodi, Emby, and/or Plex,
|
||||
instructions at Shows/History/Layout/"Watched"
|
||||
* Add installable SickGear Kodi repository containing addon "SickGear Watched State Updater"
|
||||
* Change add Emby setting for watched state scheduler at Config/Notifications/Emby/"Update watched interval"
|
||||
* Change add Plex setting for watched state scheduler at Config/Notifications/Plex/"Update watched interval"
|
||||
* Add API cmd=sg.updatewatchedstate, instructions for use are linked to in layout "Watched" at /history
|
||||
* Change history page table filter input values are saved across page refreshes
|
||||
* Change history page table filter inputs, accept values like "dvd or web" to only display both
|
||||
* Change history page table filter inputs, press 'ESC' key inside a filter input to reset it
|
||||
* Add provider activity stats to Shows/History/Layout/ drop down
|
||||
* Change move provider failures table from Manage/Media Search to Shows/History/Layout/Provider fails
|
||||
* Change sort provider failures by most recent failure, and with paused providers at the top
|
||||
* Add SickGear-NZBGet dedicated post processing script, see.. \autoProcessTV\SickGear-NG\INSTALL.txt
|
||||
* Add non standard multi episode name parsing e.g. S01E02and03 and 1x02and03and04
|
||||
* Change overhaul and add API functions
|
||||
* Change API version... start with 10
|
||||
* Change set application response header to 'SickGear' + add API version
|
||||
|
@ -90,8 +104,6 @@
|
|||
* Add X-Filename response header to getbanner, getposter
|
||||
* Add X-Fanartname response header for sg.show.getfanart
|
||||
* Change remove some non-release group stuff from newnab results
|
||||
* Add SickGear-NZBGet dedicated post processing script, see.. \autoProcessTV\SickGear-NG\INSTALL.txt
|
||||
* Add non standard multi episode name parsing e.g. S01E02and03 and 1x02and03and04
|
||||
|
||||
[develop changelog]
|
||||
|
||||
|
|
|
@ -653,22 +653,32 @@ inc_bottom.tmpl
|
|||
display:inline
|
||||
}
|
||||
|
||||
#history-table thead th .icon-glyph,
|
||||
.footer .icon-glyph{
|
||||
opacity:0.4;filter:alpha(opacity=40);
|
||||
float:none;
|
||||
display:inline-block;
|
||||
margin:0 0 -1px 2px;
|
||||
height:12px;
|
||||
margin:0 0 -2px 4px;
|
||||
height:13px;
|
||||
width:14px
|
||||
}
|
||||
#history-table thead th .icon-glyph:hover,
|
||||
#history-table tfoot th .icon-glyph:hover,
|
||||
.footer .icon-glyph:hover{
|
||||
opacity:0.6;filter:alpha(opacity=60)
|
||||
}
|
||||
#history-table thead th .icon-glyph:hover,
|
||||
.footer .icon-glyph:hover{
|
||||
opacity:0.6;filter:alpha(opacity=60);
|
||||
cursor:pointer
|
||||
}
|
||||
#history-table thead th .icon-glyph.age,
|
||||
#history-table thead th .icon-glyph.date:hover,
|
||||
.footer .icon-glyph.timeleft,
|
||||
.footer .icon-glyph.time:hover{
|
||||
background-position:-49px -25px
|
||||
}
|
||||
#history-table thead th .icon-glyph.date,
|
||||
#history-table thead th .icon-glyph.age:hover,
|
||||
.footer .icon-glyph.time,
|
||||
.footer .icon-glyph.timeleft:hover{
|
||||
background-position:-193px -121px
|
||||
|
@ -2605,40 +2615,51 @@ h2.day.add-apace, h2.network.add-space{
|
|||
/* =======================================================================
|
||||
history.tmpl
|
||||
========================================================================== */
|
||||
.strike-deleted{
|
||||
text-decoration:line-through
|
||||
}
|
||||
|
||||
#historyTable td,
|
||||
#historyTable td.provider span{
|
||||
.red-bg{
|
||||
background-color:#992828
|
||||
}
|
||||
|
||||
.green-bg{
|
||||
background-color:#3f993f
|
||||
}
|
||||
|
||||
#history-table td,
|
||||
#history-table td.provider span{
|
||||
text-align:center
|
||||
}
|
||||
|
||||
#historyTable td.provider span{
|
||||
#history-table td.provider span{
|
||||
padding:0 2px
|
||||
}
|
||||
|
||||
#historyTable td.provider > img{
|
||||
#history-table td.provider > img{
|
||||
margin-right:3px
|
||||
}
|
||||
|
||||
#historyTable td.provider span.fail img{
|
||||
#history-table td.provider span.fail img{
|
||||
opacity:0.5;
|
||||
filter:alpha(opacity=50)
|
||||
}
|
||||
|
||||
#historyTable td img,
|
||||
#historyTable td span{
|
||||
#history-table td img,
|
||||
#history-table td span{
|
||||
vertical-align:middle
|
||||
}
|
||||
|
||||
#historyTable td span.article{
|
||||
#history-table td span.article{
|
||||
vertical-align:initial
|
||||
}
|
||||
|
||||
#historyTable td img.help,
|
||||
#historyTable td span.help{
|
||||
#history-table td img.help,
|
||||
#history-table td span.help{
|
||||
cursor:help
|
||||
}
|
||||
|
||||
#historyTable td.tvShow{
|
||||
#history-table td.tvShow{
|
||||
text-align:left
|
||||
}
|
||||
|
||||
|
@ -3256,15 +3277,18 @@ input.get_less_eps{
|
|||
#media-search .section{
|
||||
padding-bottom:10px
|
||||
}
|
||||
#media-search .btn{
|
||||
#media-search .btn,
|
||||
#provider-failures .btn{
|
||||
margin:0 6px 0 0;
|
||||
min-width:70px
|
||||
}
|
||||
#media-search .btn.shows-more,
|
||||
#media-search .btn.shows-less{
|
||||
#media-search .btn.shows-less,
|
||||
#provider-failures .btn.shows-more,
|
||||
#provider-failures .btn.shows-less{
|
||||
margin:6px 6px 6px 0;
|
||||
}
|
||||
#media-search .btn.provider-retry{
|
||||
#provider-failures .btn.provider-retry{
|
||||
margin:6px 0 6px 4px;
|
||||
}
|
||||
.tablesorter.provider-failures{width:auto;clear:both;margin-bottom:10px}
|
||||
|
@ -3356,6 +3380,16 @@ input.get_less_eps{
|
|||
Global
|
||||
========================================================================== */
|
||||
|
||||
.contrast-text,
|
||||
a.contrast-text:focus,
|
||||
a.contrast-text:active,
|
||||
a.contrast-text:visited{
|
||||
color:#ddd
|
||||
}
|
||||
a.contrast-text:hover{
|
||||
color:#999
|
||||
}
|
||||
|
||||
span.path{
|
||||
padding:3px;
|
||||
margin-left:3px
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
#from lib.libtrakt import TraktAPI
|
||||
#from sickbeard.helpers import anon_url, starify
|
||||
#from sickbeard.notifiers import NotifierFactory
|
||||
<% def sg_var(varname, default=False): return getattr(sickbeard, varname, default) %>#slurp#
|
||||
<% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp#
|
||||
##
|
||||
#set global $title = 'Config - Notifications'
|
||||
#set global $header = 'Notifications'
|
||||
|
@ -91,6 +93,21 @@
|
|||
</span>
|
||||
</label>
|
||||
</div>
|
||||
#set $selected = ' selected="selected" class="selected"'
|
||||
<div class="field-pair">
|
||||
<label for="emby-watched-interval">
|
||||
<span class="component-title">Update watched interval</span>
|
||||
<span class="component-desc">
|
||||
<select id="emby-watched-interval" name="emby_watched_interval" class="form-control input-sm">
|
||||
<option value="0"#if not $sg_var('EMBY_WATCHEDSTATE_SCHEDULED')#${selected}#end if#>Off </option>
|
||||
#for v in [10, 15, 30, 45, 60]
|
||||
<option value="$v"#if $sg_var('EMBY_WATCHEDSTATE_SCHEDULED') and $v == $sg_var('EMBY_WATCHEDSTATE_FREQUENCY')#${selected}#end if#>$v #if not $sg_var('EMBY_WATCHEDSTATE_SCHEDULED') and $v == $sg_var('EMBY_WATCHEDSTATE_FREQUENCY')#(recent) #end if#</option>
|
||||
#end for
|
||||
</select>
|
||||
<span>minutes to fetch episode watched states for the history page</span>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label for="emby-host">
|
||||
<span class="component-title">Host(s) running Emby</span>
|
||||
|
@ -285,12 +302,26 @@
|
|||
<label for="plex-update-library">
|
||||
<span class="component-title">Update server library</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" class="enabler" name="plex_update_library" id="plex-update-library" #if $sickbeard.PLEX_UPDATE_LIBRARY then 'checked="checked" ' else ''#/>
|
||||
<input type="checkbox" name="plex_update_library" id="plex-update-library" #if $sickbeard.PLEX_UPDATE_LIBRARY then 'checked="checked" ' else ''#/>
|
||||
<p>update Plex Media Server library when a download finishes</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div id="content_plex-update-library">
|
||||
#set $selected = ' selected="selected" class="selected"'
|
||||
<div class="field-pair">
|
||||
<label for="plex-watched-interval">
|
||||
<span class="component-title">Update watched interval</span>
|
||||
<span class="component-desc">
|
||||
<select id="plex-watched-interval" name="plex_watched_interval" class="form-control input-sm">
|
||||
<option value="0"#if not $sg_var('PLEX_WATCHEDSTATE_SCHEDULED')#${selected}#end if#>Off </option>
|
||||
#for v in [10, 15, 30, 45, 60]
|
||||
<option value="$v"#if $sg_var('PLEX_WATCHEDSTATE_SCHEDULED') and $v == $sg_var('PLEX_WATCHEDSTATE_FREQUENCY')#${selected}#end if#>$v #if not $sg_var('PLEX_WATCHEDSTATE_SCHEDULED') and $v == $sg_var('PLEX_WATCHEDSTATE_FREQUENCY')#(recent) #end if#</option>
|
||||
#end for
|
||||
</select>
|
||||
<span>minutes to fetch episode watched states for the history page</span>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label for="plex-server-host">
|
||||
<span class="component-title">Plex Media Server IP:Port</span>
|
||||
|
@ -310,7 +341,6 @@
|
|||
<div class="clear-left"> </div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div class="field-pair">
|
||||
|
|
|
@ -114,7 +114,7 @@
|
|||
<span class="component-title">Extra scripts</span>
|
||||
<span class="component-desc">
|
||||
<input type="text" name="extra_scripts" id="extra_scripts" value="<%='|'.join(sickbeard.EXTRA_SCRIPTS)%>" class="form-control input-sm input350">
|
||||
<img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_extra_params" title="Toggle info for script arguments" class="legend" class="legend" />
|
||||
<img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_extra_params" title="Toggle info for script arguments">
|
||||
<div class="clear-left">
|
||||
<p class="note">scripts are called after built-in post processing.
|
||||
<b>note:</b> use <b class="grey-text boldest">|</b> to separate additional extra scripts
|
||||
|
@ -332,7 +332,7 @@
|
|||
<span class="component-title"></span>
|
||||
<span class="component-desc">
|
||||
<input type="text" name="naming_pattern" id="naming_pattern" value="$sickbeard.NAMING_PATTERN" class="form-control input-sm input350 custom-pattern">
|
||||
<img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_key" title="Toggle Naming Legend" class="legend" class="legend" />
|
||||
<img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_key" title="Toggle Naming Legend">
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
|
@ -552,7 +552,7 @@
|
|||
<span class="component-title"></span>
|
||||
<span class="component-desc">
|
||||
<input type="text" name="naming_abd_pattern" id="naming_abd_pattern" value="$sickbeard.NAMING_ABD_PATTERN" class="form-control input-sm input350 custom-pattern">
|
||||
<img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_abd_key" title="Toggle ABD Naming Legend" class="legend" />
|
||||
<img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_abd_key" title="Toggle ABD Naming Legend">
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
|
@ -750,7 +750,7 @@
|
|||
<span class="component-title"></span>
|
||||
<span class="component-desc">
|
||||
<input type="text" name="naming_sports_pattern" id="naming_sports_pattern" value="$sickbeard.NAMING_SPORTS_PATTERN" class="form-control input-sm input350 custom-pattern">
|
||||
<img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_sports_key" title="Toggle Sports Naming Legend" class="legend" />
|
||||
<img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_sports_key" title="Toggle Sports Naming Legend">
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
|
@ -953,7 +953,7 @@
|
|||
<span class="component-title"></span>
|
||||
<span class="component-desc">
|
||||
<input type="text" name="naming_anime_pattern" id="naming_anime_pattern" value="$sickbeard.NAMING_ANIME_PATTERN" class="form-control input-sm input350 custom-pattern">
|
||||
<img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_anime_key" title="Toggle Anime Naming Legend" class="legend" />
|
||||
<img src="$sbRoot/images/legend16.png" width="16" height="16" alt="[Toggle Key]" id="show_naming_anime_key" title="Toggle Anime Naming Legend">
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
|
|
|
@ -1,64 +1,41 @@
|
|||
#import datetime
|
||||
#import re
|
||||
|
||||
##
|
||||
#import sickbeard
|
||||
#from sickbeard import history, providers, sbdatetime
|
||||
#from sickbeard import history, providers, sbdatetime, WEB_PORT
|
||||
#from sickbeard.common import Quality, statusStrings, SNATCHED_ANY, SNATCHED_PROPER, DOWNLOADED, SUBTITLED, ARCHIVED, FAILED
|
||||
#from sickbeard.helpers import human
|
||||
#from sickbeard.providers import generic
|
||||
<% def sg_var(varname, default=False): return getattr(sickbeard, varname, default) %>#slurp#
|
||||
<% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp#
|
||||
##
|
||||
#set global $title = 'History'
|
||||
#set global $header = 'History'
|
||||
#set $layout = $sg_str('HISTORY_LAYOUT', 'detailed')
|
||||
#set $layout_name = 'watched' in $layout and 'Watched' or 'stats' in $layout and 'Activity Hits' or 'provider_failures'in $layout and 'Provider Failures' or 'Activity'
|
||||
#set sg_port = str($getVar('sbHttpPort', WEB_PORT))
|
||||
#set global $title = 'History : %s' % $layout_name
|
||||
#set global $header = 'History <span class="grey-text">: %s</span>' % $layout_name
|
||||
#set global $sbPath = '..'
|
||||
#set global $topmenu = 'home'
|
||||
#set $layout = $sg_str('HISTORY_LAYOUT', 'detailed')
|
||||
##
|
||||
#import os.path
|
||||
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_top.tmpl')
|
||||
##
|
||||
#set $checked = ' checked="checked"'
|
||||
|
||||
<script type="text/javascript">
|
||||
<script src="$sbRoot/js/history.js?v=$sbPID"></script>
|
||||
|
||||
<script>
|
||||
<!--
|
||||
\$.tablesorter.addParser({
|
||||
id: 'cDate',
|
||||
is: function(s) {
|
||||
return false;
|
||||
},
|
||||
format: function(s) {
|
||||
return s;
|
||||
},
|
||||
type: 'numeric'
|
||||
});
|
||||
\$.SickGear.history = {
|
||||
layoutName: '$layout',
|
||||
isCompact: #echo ('!1', '!0')['compact' in $layout]#,
|
||||
isTrashit: #echo ('!1', '!0')[bool($sg_var('TRASH_REMOVE_SHOW'))]#,
|
||||
useSubtitles: #echo ('!1', '!0')[bool($sg_var('USE_SUBTITLES'))]#,
|
||||
lastDeleteFiles: '#echo ('', $checked)[$getVar('last_delete_files', False)]#',
|
||||
lastDeleteRecords: '#echo ('', $checked)[$getVar('last_delete_records', False)]#',
|
||||
};
|
||||
|
||||
\$(document).ready(function()
|
||||
{
|
||||
\$('#historyTable:has(tbody tr)').tablesorter({
|
||||
widgets: ['zebra', 'filter'],
|
||||
sortList: [[0, 1]],
|
||||
textExtraction: {
|
||||
0: function(node) { return \$(node).find('span').text().toLowerCase(); },
|
||||
#if ('detailed' == $layout)
|
||||
4: function(node) { return \$(node).find('span').text().toLowerCase(); }
|
||||
#else
|
||||
1: function(node) { return \$(node).find('span[data-name]').attr('data-name').toLowerCase(); },
|
||||
2: function(node) { return \$(node).attr('provider').toLowerCase(); },
|
||||
5: function(node) { return \$(node).attr('quality').toLowerCase(); }
|
||||
#end if
|
||||
},
|
||||
headers: {
|
||||
0: { sorter: 'cDate' },
|
||||
#if ('detailed' == $layout)
|
||||
4: { sorter: 'quality' }
|
||||
#else
|
||||
4: { sorter: false },
|
||||
5: { sorter: 'quality' }
|
||||
#end if
|
||||
}
|
||||
|
||||
});
|
||||
\$('#limit').change(function(){
|
||||
window.location.href = '$sbRoot/history/?limit=' + \$(this).val()
|
||||
});
|
||||
\$(document).ready(function() {
|
||||
|
||||
#set $fuzzydate = 'airdate'
|
||||
#if $sg_var('FUZZY_DATING')
|
||||
|
@ -80,31 +57,64 @@
|
|||
#else
|
||||
<h1 class="title">$title</h1>
|
||||
#end if
|
||||
#if $varExists('earliest')
|
||||
<div class="grey-text" style="clear:both;margin:-8px 0 1.5em 2px;font-size:0.85em;float:left">
|
||||
Stats range from <span class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdatetime($datetime.datetime.strptime(str($earliest), $history.dateFormat))</span> until <span class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdatetime($datetime.datetime.strptime(str($latest), $history.dateFormat))</span>
|
||||
</div>
|
||||
#end if
|
||||
|
||||
#set $html_selected = ' selected="selected"'
|
||||
#set $selected = ' selected="selected" class="selected"'
|
||||
##
|
||||
<div class="h2footer pull-right">Limit:
|
||||
<div id="results-sortby" class="h2footer pull-right">Layout:
|
||||
<select name="limit" id="limit" class="form-control form-control-inline input-sm">
|
||||
<option value="100"#echo ('', $html_selected)['100' == $limit]#>100</option>
|
||||
<option value="250"#echo ('', $html_selected)['250' == $limit]#>250</option>
|
||||
<option value="500"#echo ('', $html_selected)['500' == $limit]#>500</option>
|
||||
<option value="0"#echo ('', $html_selected)['0' == $limit]#>All</option>
|
||||
<option value="100"#echo ('', $selected)['100' == $limit]#>100</option>
|
||||
<option value="250"#echo ('', $selected)['250' == $limit]#>250</option>
|
||||
<option value="500"#echo ('', $selected)['500' == $limit]#>500</option>
|
||||
<option value="0"#echo ('', $selected)['0' == $limit]#>All</option>
|
||||
</select>
|
||||
|
||||
<span style="margin-left:5px">Layout:
|
||||
<span style="margin-left:5px">
|
||||
<select name="HistoryLayout" class="form-control form-control-inline input-sm" onchange="location = this.options[this.selectedIndex].value">
|
||||
<option value="$sbRoot/setHistoryLayout/?layout=compact"#echo ('', $html_selected)['compact' == $sg_str('HISTORY_LAYOUT')]#>Compact</option>
|
||||
<option value="$sbRoot/setHistoryLayout/?layout=detailed"#echo ('', $html_selected)['detailed' == $sg_str('HISTORY_LAYOUT', 'detailed')]#>Detailed</option>
|
||||
<optgroup label="Activity">
|
||||
<option value="$sbRoot/setHistoryLayout/?layout=compact"#echo ('', $selected)['compact' == $sg_str('HISTORY_LAYOUT')]#>Compact</option>
|
||||
<option value="$sbRoot/setHistoryLayout/?layout=detailed"#echo ('', $selected)['detailed' == $sg_str('HISTORY_LAYOUT', 'detailed')]#>Detailed</option>
|
||||
</optgroup>
|
||||
<optgroup label="Watched">
|
||||
<option value="$sbRoot/setHistoryLayout/?layout=compact_watched"#echo ('', $selected)['compact_watched' == $sg_str('HISTORY_LAYOUT')]#>Compact</option>
|
||||
<option value="$sbRoot/setHistoryLayout/?layout=detailed_watched"#echo ('', $selected)['detailed_watched' == $sg_str('HISTORY_LAYOUT')]#>Detailed</option>
|
||||
</optgroup>
|
||||
<optgroup label="Stats">
|
||||
<option value="$sbRoot/setHistoryLayout/?layout=compact_stats"#echo ('', $selected)['compact_stats' == $sg_str('HISTORY_LAYOUT')]#>Activity hits</option>
|
||||
<option value="$sbRoot/setHistoryLayout/?layout=graph_stats"#echo ('', $selected)['graph_stats' == $sg_str('HISTORY_LAYOUT')]#>Graphed hits</option>
|
||||
<option value="$sbRoot/setHistoryLayout/?layout=provider_failures"#echo ('', $selected)['provider_failures' == $sg_str('HISTORY_LAYOUT')]#>Provider fails</option>
|
||||
</optgroup>
|
||||
</select>
|
||||
</span>
|
||||
</div>
|
||||
<br>
|
||||
|
||||
<table id="historyTable" class="sickbeardTable tablesorter $layout" cellspacing="1" border="0" cellpadding="0">
|
||||
<style>
|
||||
#watched-help thead tr th, #watched-help tbody tr td{text-align:left}
|
||||
#watched-help tbody td ol, #watched-help tbody td ul, #watched-help tbody td p{margin-bottom:0}
|
||||
#watched-help tbody td img{margin-right:3px}
|
||||
#watched-help .vmid{vertical-align:middle}
|
||||
#history-table .age{display:none}
|
||||
#history-table.event-age .age{display:inline-block}
|
||||
#history-table.event-age .date{display:none}
|
||||
</style>
|
||||
##
|
||||
#if 'failure' not in $layout
|
||||
##
|
||||
<table id="history-table" data-table-group="$layout" class="sickbeardTable tablesorter $layout" cellspacing="1" border="0" cellpadding="0">
|
||||
##
|
||||
#end if
|
||||
##
|
||||
##
|
||||
#if 'detailed' == $layout
|
||||
##
|
||||
##
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="nowrap">Time</th>
|
||||
<th class="text-nowrap">Time</th>
|
||||
<th width="35%">Episode</th>
|
||||
<th>Action</th>
|
||||
<th>Provider</th>
|
||||
|
@ -114,21 +124,20 @@
|
|||
|
||||
<tfoot>
|
||||
<tr>
|
||||
<th class="nowrap" colspan="5"> </th>
|
||||
<th class="text-nowrap" colspan="5"> </th>
|
||||
</tr>
|
||||
</tfoot>
|
||||
|
||||
<tbody>
|
||||
#for $hItem in $historyResults
|
||||
#for $hItem in $history_results
|
||||
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($hItem['action']))
|
||||
#set $data_name = (re.sub('^((?:A(?!\s+to)n?)|The)\s(\w)', r'\2', $hItem['show_name']), $hItem['show_name'])[$sg_var('SORT_ARTICLE')]
|
||||
#set $display_name = '<span data-name="%s">%s - S%02iE%02i</span>' % (
|
||||
$data_name,
|
||||
(re.sub('^((?:A(?!\s+to)n?)|The)\s(\w)', r'<span class="article">\1</span> \2', $hItem['show_name']), $hItem['show_name'])[$sg_var('SORT_ARTICLE')],
|
||||
int(hItem['season']), int(hItem['episode']))
|
||||
#set $display_name = '<span data-sort="%s">%s - S%02iE%02i</span>' % (
|
||||
$hItem['data_name'],
|
||||
(('<span class="article">%s</span> %s' % ($hItem['name1'], $hItem['name2'])), $hItem['show_name'])[$sg_var('SORT_ARTICLE') or not $hItem['name1']],
|
||||
int($hItem['season']), int($hItem['episode']))
|
||||
<tr>
|
||||
#set $curdatetime = $datetime.datetime.strptime(str($hItem['date']), $history.dateFormat)
|
||||
<td><div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdatetime($curdatetime, show_seconds=True)</div><span class="sort-data">$time.mktime($curdatetime.timetuple())</span></td>
|
||||
<td><div class="${fuzzydate}" data-sort="$time.mktime($curdatetime.timetuple())">$sbdatetime.sbdatetime.sbfdatetime($curdatetime, show_seconds=True)</div></td>
|
||||
<td class="tvShow"><a href="$sbRoot/home/displayShow?show=$hItem['showid']#season-$hItem['season']">$display_name#if $Quality.splitCompositeStatus($hItem['action'])[0] == $SNATCHED_PROPER then ' <span class="quality Proper">Proper</span>' else ''#</a></td>
|
||||
<td#echo ('', ' class="subtitles_column"')[$SUBTITLED == $curStatus]#>
|
||||
#if $SUBTITLED == $curStatus
|
||||
|
@ -159,11 +168,14 @@
|
|||
<td><span class="hide">$curQuality</span><span class="quality $Quality.get_quality_css($curQuality)">$Quality.get_quality_ui($curQuality)</span></td>
|
||||
</tr>
|
||||
#end for
|
||||
|
||||
#else
|
||||
##
|
||||
##
|
||||
#elif ('compact' == $layout)
|
||||
##
|
||||
##
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="nowrap">Time</th>
|
||||
<th class="text-nowrap">Time</th>
|
||||
<th width="#echo '3%s%%' % ('5', '0')[$sg_var('USE_SUBTITLES')]#">Episode</th>
|
||||
<th>Snatched</th>
|
||||
<th>Downloaded</th>
|
||||
|
@ -176,18 +188,17 @@
|
|||
|
||||
<tfoot>
|
||||
<tr>
|
||||
<th class="nowrap" colspan="6"> </th>
|
||||
<th class="text-nowrap" colspan="6"> </th>
|
||||
</tr>
|
||||
</tfoot>
|
||||
|
||||
<tbody>
|
||||
#for $hItem in $compactResults
|
||||
#for $hItem in $compact_results
|
||||
#set $curdatetime = $datetime.datetime.strptime(str($hItem['actions'][0]['time']), $history.dateFormat)
|
||||
#set $data_name = (re.sub('^((?:A(?!\s+to)n?)|The)\s(\w)', r'\2', $hItem['show_name']), $hItem['show_name'])[$sg_var('SORT_ARTICLE')]
|
||||
#set $display_name = '<span data-name="%s">%s - S%02iE%02i</span>' % (
|
||||
$data_name,
|
||||
(re.sub('^((?:A(?!\s+to)n?)|The)\s(\w)', r'<span class="article">\1</span> \2', $hItem['show_name']), $hItem['show_name'])[$sg_var('SORT_ARTICLE')],
|
||||
int(hItem['season']), int(hItem['episode']))
|
||||
#set $display_name = '<span data-sort="%s">%s - S%02iE%02i</span>' % (
|
||||
$hItem['data_name'],
|
||||
(('<span class="article">%s</span> %s' % ($hItem['name1'], $hItem['name2'])), $hItem['show_name'])[$sg_var('SORT_ARTICLE') or not $hItem['name1']],
|
||||
int($hItem['season']), int($hItem['episode']))
|
||||
#set $prov_list = []
|
||||
#set $down_list = []
|
||||
#set $order = 1
|
||||
|
@ -235,7 +246,7 @@
|
|||
#end if
|
||||
#end for
|
||||
<tr>
|
||||
<td><div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdatetime($curdatetime, show_seconds=True)</div><span class="sort-data">$time.mktime($curdatetime.timetuple())</span></td>
|
||||
<td><div class="${fuzzydate}" data-sort="$time.mktime($curdatetime.timetuple())">$sbdatetime.sbdatetime.sbfdatetime($curdatetime, show_seconds=True)</div></td>
|
||||
<td class="tvShow">
|
||||
<span><a href="$sbRoot/home/displayShow?show=$hItem['show_id']#season-$hItem['season']">$display_name#if 'proper' in $hItem['resource'].lower or 'repack' in $hItem['resource'].lower then ' <span class="quality Proper">Proper</span>' else ''#</a></span>
|
||||
</td>
|
||||
|
@ -261,9 +272,403 @@
|
|||
<td quality="$curQuality"><span class="quality $Quality.get_quality_css($curQuality)">$Quality.get_quality_ui($curQuality)</span></td>
|
||||
</tr>
|
||||
#end for
|
||||
##
|
||||
##
|
||||
#elif 'watched' in $layout
|
||||
##
|
||||
##
|
||||
<thead>
|
||||
<tr>
|
||||
<th><span class="date">Event Date</span><span class="age">Event Age</span></th>
|
||||
<th width="8%">Played</th>
|
||||
<th class="text-nowrap">Episode</th>
|
||||
<th class="text-nowrap" width="15%">Label (Profile)</th>
|
||||
<th class="text-nowrap" width="10%">Quality</th>
|
||||
<th class="text-nowrap" width="10%">Size</th>
|
||||
<th width="10%">Delete</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
||||
#end if
|
||||
<tfoot>
|
||||
<tr>
|
||||
<th>
|
||||
<i style="background-image:url($sbRoot/images/legend16.png)" id="show-watched-help" title="Toggle help" class="add-qtip icon-glyph"></i>
|
||||
<span id="row-count" style="font-size:12px;line-height:20px;float:left"></span>
|
||||
</th>
|
||||
<th colspan="4"></th>
|
||||
<th><span id="sum-size" style="border-top:solid 1px #ddd">0 Bytes</span></th>
|
||||
<th>
|
||||
#if $len($results)
|
||||
<input id="del-watched" type="button" class="btn" value="Submit">
|
||||
#end if
|
||||
</th>
|
||||
</tr>
|
||||
</tfoot>
|
||||
|
||||
<tbody id="tbody">
|
||||
#if not $results
|
||||
<tr colspan="7">
|
||||
<td colspan="7" style="text-align:center">
|
||||
<p>Media marked watched or unwatched will list in this space</p>
|
||||
</td>
|
||||
</tr>
|
||||
#else
|
||||
#for $hItem in $results
|
||||
#if $hItem.hide
|
||||
#continue
|
||||
#end if
|
||||
#set $compact = 'compact' in $layout and $hItem['rowid'] not in $mru_row_ids
|
||||
##
|
||||
#set $curdatetime = $datetime.datetime.fromtimestamp($hItem.get('date_watched'))
|
||||
#set $curage = ($datetime.datetime.now() - $curdatetime).days
|
||||
#set $display_name = '<span data-sort="%s"%s>%s - S%sE%s</span>' % (
|
||||
$hItem.get('data_name'),
|
||||
('', ' class="grey-text"')[$hItem.get('deleted')],
|
||||
(('<span class="article">%s</span> %s' % ($hItem.get('name1'), $hItem.get('name2'))), $hItem.get('show_name'))[$sg_var('SORT_ARTICLE') or not $hItem.get('name1')],
|
||||
$hItem.get('season'), $hItem.get('episode'))
|
||||
<tr data-tvep-id="$hItem['tvep_id']" data-file="$hItem['location']"#if $compact# class="hide"#end if#>
|
||||
<td>
|
||||
<div class="date ${fuzzydate}" data-sort="$curage">$sbdatetime.sbdatetime.sbfdatetime($curdatetime)</div>
|
||||
<div class="age">${curage}d</div>
|
||||
</td>
|
||||
<td>
|
||||
#set $float_played = int($hItem.get('played'))/100.0
|
||||
#set $value = ($float_played, int($float_played))[int($float_played) == $float_played]
|
||||
<span#if not $bool($int($value))# class="add-qtip" title="Marked Unwatched"#end if#>$value</span>
|
||||
</td>
|
||||
<td class="tvShow text-nowrap">
|
||||
<span class="add-qtip#if not $hItem.get('deleted')#"#else# strike-deleted" title="file no longer exists"#end if#>
|
||||
<a href="$sbRoot/home/displayShow?show=$hItem.get('showid')#season-$hItem.get('season')">$display_name</a>
|
||||
</span>
|
||||
</td>
|
||||
<td>
|
||||
#set $label = re.sub('\{[^}]+\}$', '', $hItem.get('label'))
|
||||
#set $client = ''
|
||||
#try
|
||||
#set $client = re.findall('\{([^}]+)\}$', $hItem.get('label'))[0].lower()
|
||||
#set $client_label = ('%s %s' % ($client, $label)).strip(' ')
|
||||
<img height="16px" style="margin-right:3px" src="$sbRoot/images/notifiers/${client}.png"><span data-sort="$client_label" style="vertical-align:middle">$label</span>
|
||||
#except
|
||||
<span data-sort="${label}">$label</span>
|
||||
#pass
|
||||
#end try
|
||||
</td>
|
||||
<td quality="$hItem.get('quality')" class="text-nowrap">
|
||||
<span class="quality $Quality.get_quality_css($hItem.get('quality')) add-qtip" title="#if $hItem.get('deleted')#file no longer exists#else#$hItem['location']#end if#">$Quality.qualityStrings[$hItem.get('quality')].replace('SD DVD', 'SD DVD/BR/BD')</span>
|
||||
</td>
|
||||
<td class="size text-nowrap">
|
||||
<span#if $hItem.get('deleted')# class="add-qtip grey-text strike-deleted" title="file no longer exists"#end if# data-sort="$hItem.get('file_size')">$human($hItem.get('file_size'))</span>
|
||||
</td>
|
||||
<td class="#echo ('green', 'red')[100 > $hItem.get('mru_count')]#-bg">
|
||||
<input id="del-$hItem.get('rowid')-$hItem.get('indexer')-$hItem.get('showid')"
|
||||
title="last event<br>#echo ('watched', 'unwatched')[100 > $hItem.get('mru_count')]#"
|
||||
type="checkbox" class="del-check add-qtip">
|
||||
#if $hItem.get('mru_count')
|
||||
<span style="position:absolute">
|
||||
<i class="icon-glyph" style="top:-6px;right:4px;position:relative; opacity: 0.4;background-position: -95px -119px"></i>
|
||||
</span>
|
||||
#end if
|
||||
</td>
|
||||
</tr>
|
||||
#end for
|
||||
#end if
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
#def row_class()
|
||||
#set global $row += 1
|
||||
#echo ('even', 'odd')[bool($row % 2)]
|
||||
#end def
|
||||
<table id="watched-help" style="#if $hide_watched_help#display:none;#end if#margin-top:15px" class="sickbeardTable tablesorter" cellspacing="1" border="0" cellpadding="0">
|
||||
<thead>
|
||||
<tr>
|
||||
<th colspan="2">General help</th>
|
||||
</tr>
|
||||
</thead>
|
||||
#set global $row = 0
|
||||
<tbody>
|
||||
<tr class="$row_class()">
|
||||
<td colspan="2">
|
||||
<p>Filters are saved per layout. Examples;</p>
|
||||
<ul>
|
||||
<li>Event Date/Age, or Played: <span class="grey-text">>7 and <60</span> (between 7d and 60d) , <span class="grey-text">>1</span> (played more than once)</li>
|
||||
<li>Label (Profile): <span class="grey-text">emby or kodi</span> , <span class="grey-text">!kodi and !plex</span> , <span class="grey-text">emby user2</span> , <span class="grey-text">emby user"</span> (single end quote excludes user2)</li>
|
||||
<li>Quality: <span class="grey-text">sd or dl</span> , <span class="grey-text">blu</span></li>
|
||||
</ul>
|
||||
</td
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="$row_class()">
|
||||
<td colspan="2">
|
||||
<p>The above table is sorted first by played and then by date of event (i.e. watched/unwatched)</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="$row_class()">
|
||||
<td colspan="2">
|
||||
<p>To multi-select checkboxes or column headers, click then hold shift and click</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="$row_class()">
|
||||
<td colspan="2">
|
||||
<p style="margin-bottom:6px">Key for <span class="grey-text">Delete</span> column;</p>
|
||||
<ul>
|
||||
<li style="line-height:25px"><span class="contrast-text green-bg" style="padding:4px 18px 0 6px;margin:0 6px 0 0">Green
|
||||
<span style="position:absolute">
|
||||
<i class="icon-glyph" style="position:relative;top:-2px;right:8px;opacity: 0.4;background-position:-95px -119px"></i>
|
||||
</span>
|
||||
</span>Watched at least once at client</li>
|
||||
<li style="line-height:25px"><span class="contrast-text red-bg" style="padding:4px 20px 0 6px;margin:0 6px 0 0">Red
|
||||
</span>Partially watched or set 'unwatched' at client
|
||||
</li>
|
||||
</ul>
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="$row_class()">
|
||||
<td colspan="2">
|
||||
<p>To find how much freespace a delete will yield, the size tally increases for selected episodes that have a media file</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="$row_class()">
|
||||
<td colspan="2">
|
||||
<p>In <span class="grey-text">Compact</span> layout, deleting records removes all episode related records. <span class="grey-text">Detailed</span> layout allows for individual selection [<a rel="dialog" href="https://raw.githubusercontent.com/wiki/SickGear/SickGear/images/screenies/watched.png">Show me</a>]</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="$row_class()">
|
||||
<td colspan="2">
|
||||
<p>Any script can add to the watched list by making a <a href="https://github.com/SickGear/SickGear/wiki/API#sg.updatewatchedstate">documented API call</a> to <code>sg.updatewatchedstate</code></p>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
<thead>
|
||||
<tr>
|
||||
<th width="20%">Supported clients</th>
|
||||
<th>To use</th>
|
||||
</tr>
|
||||
</thead>
|
||||
#set global $row = 0
|
||||
<tbody>
|
||||
<tr class="$row_class()">
|
||||
<td><img height="16px" src="$sbRoot/images/notifiers/kodi.png"><span class="vmid">Kodi</span>
|
||||
<p><em class="grey-text">Isengard, Jarvis, Krypton</em><br>
|
||||
Episodes marked watched or unwatched are pushed in real-time and shown above.</p>
|
||||
</td>
|
||||
<td>
|
||||
<p>Make the following changes at Kodi;</p>
|
||||
<ol>
|
||||
<li>Install the SickGear repo to access its Kodi Add-on
|
||||
<ul>
|
||||
<li>in <b class="boldest">Filemanager</b>, add a source for SickGear with <span class="grey-text"><ip>:<port>/kodi/</span> (e.g. <span class="grey-text">192.168.0.10:$sg_port/kodi/</span>)<br>
|
||||
and name it for example, <span class="grey-text">SickGear</span>. <em>You will need to allow <span class="highlight-text">Unknown Sources</span> if not already</em> </li>
|
||||
<li>in <b class="boldest">System/Add-ons</b>, "<span class="grey-text">Install from zip file</span>", in the folder list, select the <span class="grey-text">SickGear</span> source</li>
|
||||
<li>select the <span class="grey-text">repository.sickgear</span> in the folder listing, and install the repository zip<br>
|
||||
<em>Kodi will connect to the SickGear app to download and install its Add-on repository</em></li>
|
||||
</ul>
|
||||
<li>Install the SickGear Add-on from the repo</li>
|
||||
<ul>
|
||||
<li>in <b class="boldest">System/Add-ons</b>, "<span class="grey-text">Install from zip repository</span>", select "<span class="grey-text">SickGear Add-on repository</span>" / "<span class="grey-text">Services</span>"<br>
|
||||
<li>select Add-on "<span class="grey-text">SickGear Watched State Updater</span>"</li>
|
||||
<li>configure Add-on and restart Kodi after install or after switching profiles for the first time</li>
|
||||
</ul>
|
||||
</ol>
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="$row_class()">
|
||||
<td><img height="16px" src="$sbRoot/images/notifiers/emby.png"><span class="vmid">Emby</span>
|
||||
<p>Episode watch states are periodically fetched and shown above.</p>
|
||||
</td>
|
||||
<td>
|
||||
<ol>
|
||||
<li>Enable Emby Media Server in <b class="boldest">config/Notifications</b></li>
|
||||
<li>Choose an interval for updating watched states</li>
|
||||
</ol>
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="$row_class()">
|
||||
<td><img height="16px" src="$sbRoot/images/notifiers/plex.png"><span class="vmid">Plex</span>
|
||||
<p>Episode watch states are periodically fetched and shown above.</p>
|
||||
</td>
|
||||
<td>
|
||||
<ol>
|
||||
<li>Enable Plex Media Server in <b class="boldest">config/Notifications</b></li>
|
||||
<li>Choose an interval for updating watched states</li>
|
||||
</ol>
|
||||
</td>
|
||||
</tr>
|
||||
##
|
||||
##
|
||||
#elif 'stats' in $layout
|
||||
##
|
||||
##
|
||||
#set sum = 0
|
||||
#for $hItem in $stat_results
|
||||
#set $sum += $hItem['count']
|
||||
#end for
|
||||
##
|
||||
#if 'graph' in $layout
|
||||
<tbody>
|
||||
<tr><td>
|
||||
#set $labels = []
|
||||
#set $perc = []
|
||||
#for $hItem in $stat_results
|
||||
#set $p = (float($hItem['count']) / float($sum)) * 100
|
||||
#if 1 <= $p:
|
||||
#set $labels += [$hItem['provider']]
|
||||
#set $perc += ['%s' % re.sub(r'(\d+)(\.\d)\d+', r'\1\2', str($p))]
|
||||
#end if
|
||||
#end for
|
||||
<script src="$sbRoot/js/plot.ly/plotly-latest.min.js?v=$sbPID"></script>
|
||||
<script src="$sbRoot/js/plot.ly/numeric/1.2.6/numeric.min.js?v=$sbPID"></script>
|
||||
|
||||
<div id="plot-canvas" style="margin:15px auto 15px;width:550px;height:350px"></div>
|
||||
<style>
|
||||
.modebar-btn[data-title*="edit plot"]{display:none !important}
|
||||
</style>
|
||||
<script>
|
||||
Plotly.newPlot('plot-canvas', [
|
||||
{
|
||||
values: [#echo ', '.join($perc)#],
|
||||
labels: [#echo '\'%s\'' % '\', \''.join($labels)#],
|
||||
name: 'SickGear provider activity',
|
||||
hoverinfo: 'label+percent+name',
|
||||
domain: {x: [0, .5]},
|
||||
hole: .42,
|
||||
type: 'pie'
|
||||
}
|
||||
],
|
||||
{title: 'SickGear provider activity (1% and above for #echo ('latest %s' % $limit, 'all')['0' == $limit]#)',
|
||||
annotations: [{font: {size: 14}, showarrow: false, text: 'Activity', x: 0.16, y: 0.5}]
|
||||
}, {displaylogo: !1, showLink: !1});
|
||||
</script>
|
||||
</td></tr>
|
||||
##
|
||||
##
|
||||
#else
|
||||
##
|
||||
##
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Provider</th>
|
||||
<th>Activity Hits</th>
|
||||
<th>Activity %</th>
|
||||
<th>Latest Activity</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
||||
<tfoot>
|
||||
<tr>
|
||||
<th class="text-nowrap" colspan="4"> </th>
|
||||
</tr>
|
||||
</tfoot>
|
||||
|
||||
<tbody>
|
||||
#for $hItem in $stat_results
|
||||
<tr>
|
||||
<td class="provider text-nowrap">
|
||||
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($hItem['provider']))
|
||||
#if None is not $provider
|
||||
<img src="$sbRoot/images/providers/<%= provider.image_name() %>" width="16" height="16"><span data-sort="$hItem['provider']">$provider.name</span>
|
||||
#else
|
||||
<img src="$sbRoot/images/providers/missing.png" width="16" height="16" title="missing provider"><span data-sort="$hItem['provider']">Missing Provider</span>
|
||||
#end if
|
||||
</td>
|
||||
<td>$hItem['count']</td>
|
||||
<td>#echo '%s%%' % re.sub(r'(\d+)(\.\d)\d+', r'\1\2', str((float($hItem['count'])/float($sum))*100))#</td>
|
||||
#set $curdatetime = $datetime.datetime.strptime(str($hItem['latest']), $history.dateFormat)
|
||||
<td><div class="${fuzzydate}" data-sort="$hItem['latest']">$sbdatetime.sbdatetime.sbfdatetime($curdatetime)</div></td>
|
||||
</tr>
|
||||
#end for
|
||||
#end if
|
||||
##
|
||||
##
|
||||
#elif 'failures' in $layout
|
||||
##
|
||||
##
|
||||
<div id="provider-failures">
|
||||
#if not $provider_fails
|
||||
<p>No current failures. Failure stats display here when appropriate.</p>
|
||||
#else
|
||||
<p>When a provider cannot be contacted over a period, SickGear backs off and waits an increasing interval between each retry</p>
|
||||
#for $prov in $provider_fail_stats
|
||||
#if $len($prov['fails'])
|
||||
|
||||
<!-- $prov['name'] -->
|
||||
<div>
|
||||
#set $prov_class = '<span %sstyle="vertical-align:middle">'
|
||||
#if not $prov['active']
|
||||
#set $prov_class = $prov_class % 'class="grey-text" '
|
||||
#else
|
||||
#set $prov_class = $prov_class % ''
|
||||
#end if
|
||||
<input type="button" class="shows-more btn" value="Expand" style="display:none"><input type="button" class="shows-less btn" value="Collapse"><img src="$sbRoot/images/providers/$prov['prov_img']" width="16" height="16" style="margin:0 6px 0 3px">$prov_class$prov['name']
|
||||
#if $prov['active']
|
||||
#if $prov['next_try']
|
||||
#set nt = $str($prov['next_try']).split('.', 2)[0][::-1].replace(':', ' m', 1).replace(':', ' h', 1)[::-1]
|
||||
... is paused until $sbdatetime.sbdatetime.sbftime($sbdatetime.sbdatetime.now() + $prov['next_try'], markup=True) (in ${nt}s) <input type="button" class="provider-retry btn" id="$prov['prov_id']-btn-retry" value="Ignore pause on next search">
|
||||
#end if
|
||||
#else
|
||||
... is not enabled
|
||||
#end if
|
||||
</span>
|
||||
</div>
|
||||
<table class="manageTable provider-failures tablesorter hover-highlight focus-highlight text-center" cellspacing="0" border="0" cellpadding="0">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="text-center" style="width:13em;padding-right:20px">period of 1hr</th>
|
||||
<th class="text-center" style="padding-right:20px">server/timeout</th>
|
||||
<th class="text-center" style="padding-right:20px">network</th>
|
||||
<th class="text-center" style="padding-right:20px">no data</th>
|
||||
<th class="text-center" style="padding-right:20px">other</th>
|
||||
#if $prov['has_limit']
|
||||
<th class="text-center" style="padding-right:20px">hit limit</th>
|
||||
#end if
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
#set $day = []
|
||||
#for $fail in $prov['fails']
|
||||
#set $child = True
|
||||
#if $fail['date'] not in $day
|
||||
#set $day += [$fail['date']]
|
||||
#set $child = False
|
||||
#end if
|
||||
#slurp#
|
||||
<tr#if $fail['multirow'] and $child# class="tablesorter-childRow"#end if#>
|
||||
#if $fail['multirow']
|
||||
#if not $child
|
||||
<td><a href="#" class="provider-fail-parent-toggle" title="Totals (expand for detail)">$sbdatetime.sbdatetime.sbfdate($fail['date_time'])</a></td>
|
||||
#else
|
||||
<td>$sbdatetime.sbdatetime.sbftime($fail['date_time'], markup=True)</td>
|
||||
#end if
|
||||
#else
|
||||
<td>$sbdatetime.sbdatetime.sbfdatetime($fail['date_time'], markup=True)</td>
|
||||
#end if
|
||||
#set $blank = '-'
|
||||
#set $title=None
|
||||
#if $fail['http']['count']
|
||||
#set $title=$fail['http']['code']
|
||||
#end if
|
||||
<td>#if $fail['http']['count']#<span title="#if $child or not $fail['multirow']#$title#else#Expand for fail codes#end if#">$fail['http']['count']</span>#else#$blank#end if# / #echo $fail['timeout'].get('count', 0) or $blank#</td>
|
||||
<td>#echo ($fail['connection'].get('count', 0) + $fail['connection_timeout'].get('count', 0)) or $blank#</td>
|
||||
<td>#echo $fail['nodata'].get('count', 0) or $blank#</td>
|
||||
<td>#echo $fail['other'].get('count', 0) or $blank#</td>
|
||||
#if $prov['has_limit']
|
||||
<td>#echo $fail.get('limit', {}).get('count', 0) or $blank#</td>
|
||||
#end if
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
<!-- /$prov['name'] -->
|
||||
#end if
|
||||
#end for
|
||||
#end if
|
||||
</div>
|
||||
##
|
||||
##
|
||||
#end if
|
||||
#if 'failure' not in $layout
|
||||
</tbody>
|
||||
</table>
|
||||
#end if
|
||||
|
||||
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_bottom.tmpl')
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#import sickbeard
|
||||
#import datetime
|
||||
#from sickbeard import common
|
||||
#from sickbeard.common import (Overview, statusStrings, ARCHIVED, FAILED, IGNORED, SKIPPED,
|
||||
SNATCHED, SNATCHED_ANY, UNKNOWN, WANTED, DOWNLOADED)
|
||||
##
|
||||
#set global $title = 'Episode Overview'
|
||||
#set global $header = 'Episode Overview'
|
||||
|
@ -19,7 +20,7 @@
|
|||
#if not $whichStatus or ($whichStatus and not $ep_counts)
|
||||
##
|
||||
#if $whichStatus:
|
||||
<h3>no episodes have status <span class="grey-text">$common.statusStrings[$whichStatus].lower()</span></h3>
|
||||
<h3>no episodes have status <span class="grey-text">$statusStrings[$whichStatus].lower()</span></h3>
|
||||
#end if
|
||||
|
||||
<form action="$sbRoot/manage/episodeStatuses" method="get">
|
||||
|
@ -27,8 +28,8 @@
|
|||
Manage episodes with status
|
||||
<select name="whichStatus" class="form-control form-control-inline input-sm" style="margin:0 10px">
|
||||
|
||||
#for $curStatus in [$common.SKIPPED, $common.UNKNOWN, $common.SNATCHED, $common.WANTED, $common.ARCHIVED, $common.IGNORED, $common.DOWNLOADED]:
|
||||
<option value="$curStatus"#echo ('', ' selected="selected"')[$curStatus == $default_manage]#>$common.statusStrings[$curStatus]</option>
|
||||
#for $curStatus in [$SKIPPED, $UNKNOWN, $SNATCHED, $WANTED, $ARCHIVED, $IGNORED, $DOWNLOADED]:
|
||||
<option value="$curStatus"#echo ('', ' selected="selected"')[$curStatus == $default_manage]#>$statusStrings[$curStatus]</option>
|
||||
#end for
|
||||
|
||||
</select>
|
||||
|
@ -36,26 +37,26 @@
|
|||
</form>
|
||||
##
|
||||
#else
|
||||
#if $whichStatus in ($common.ARCHIVED, $common.IGNORED):
|
||||
#if $whichStatus in ($ARCHIVED, $IGNORED):
|
||||
#set $row_class = 'good'
|
||||
#elif $whichStatus == $common.SNATCHED:
|
||||
#elif $whichStatus == $SNATCHED:
|
||||
#set $row_class = 'snatched'
|
||||
#else
|
||||
#set $row_class = $common.Overview.overviewStrings[$whichStatus]
|
||||
#set $row_class = $Overview.overviewStrings[$whichStatus]
|
||||
#end if
|
||||
|
||||
#set $statusList = [$common.SKIPPED, $common.ARCHIVED, $common.IGNORED]
|
||||
#if $common.DOWNLOADED == $whichStatus:
|
||||
#set $statusList = [$common.ARCHIVED]
|
||||
#elif $common.ARCHIVED == $whichStatus:
|
||||
#set $statusList = [$common.SKIPPED, $common.DOWNLOADED, $common.ARCHIVED, $common.IGNORED]
|
||||
#set $statusList = [$ARCHIVED, $IGNORED, $SKIPPED]
|
||||
#if $DOWNLOADED == $whichStatus:
|
||||
#set $statusList = [$ARCHIVED]
|
||||
#elif $ARCHIVED == $whichStatus:
|
||||
#set $statusList = [$SKIPPED, $DOWNLOADED, $ARCHIVED, $IGNORED]
|
||||
#end if
|
||||
#if $whichStatus in $statusList
|
||||
$statusList.remove($whichStatus)
|
||||
#end if
|
||||
|
||||
#if $whichStatus in $common.SNATCHED_ANY
|
||||
$statusList.append($common.FAILED)
|
||||
#if $whichStatus in $SNATCHED_ANY
|
||||
$statusList.append($FAILED)
|
||||
#end if
|
||||
|
||||
<script type="text/javascript" src="$sbRoot/js/manageEpisodeStatuses.js?v=$sbPID"></script>
|
||||
|
@ -63,7 +64,7 @@
|
|||
<form action="$sbRoot/manage/changeEpisodeStatuses" method="post">
|
||||
<input type="hidden" id="oldStatus" name="oldStatus" value="$whichStatus">
|
||||
|
||||
<h3><span class="grey-text">$ep_count</span> episode#echo ('s', '')[1 == $ep_count]# marked <span class="grey-text">$common.statusStrings[$whichStatus].lower()</span> in <span class="grey-text">${len($sorted_show_ids)}</span> show#echo ('s', '')[1 == len($sorted_show_ids)]#</h3>
|
||||
<h3><span class="grey-text">$ep_count</span> episode#echo ('s', '')[1 == $ep_count]# marked <span class="grey-text">$statusStrings[$whichStatus].lower()</span> in <span class="grey-text">${len($sorted_show_ids)}</span> show#echo ('s', '')[1 == len($sorted_show_ids)]#</h3>
|
||||
|
||||
<input type="hidden" id="row_class" value="$row_class">
|
||||
|
||||
|
@ -71,16 +72,16 @@
|
|||
<span>Set checked shows/episodes to</span>
|
||||
<select name="newStatus" class="form-control form-control-inline input-sm" style="margin:0 10px 0 5px">
|
||||
#for $curStatus in $statusList:
|
||||
<option value="$curStatus">$common.statusStrings[$curStatus]</option>
|
||||
<option value="$curStatus">$statusStrings[$curStatus]</option>
|
||||
#end for
|
||||
</select>
|
||||
<input class="btn btn-inline go" type="submit" value="Go">
|
||||
|
||||
#if $common.DOWNLOADED != $whichStatus:
|
||||
#if $DOWNLOADED != $whichStatus:
|
||||
<span class="red-text" style="margin:0 0 0 30px">Override checked status to</span>
|
||||
<select name="wantedStatus" class="form-control form-control-inline input-sm" style="margin:0 10px 0 5px">
|
||||
<option value="$common.UNKNOWN">nothing</option>
|
||||
<option value="$common.WANTED">$common.statusStrings[$common.WANTED]</option>
|
||||
<option value="$UNKNOWN">nothing</option>
|
||||
<option value="$WANTED">$statusStrings[$WANTED]</option>
|
||||
</select>
|
||||
<input class="btn btn-inline go" type="submit" value="Go">
|
||||
#end if
|
||||
|
|
|
@ -63,83 +63,6 @@
|
|||
|
||||
|
||||
|
||||
<div id="provider-failures" class="section">
|
||||
<h3>Provider Failures:</h3>
|
||||
#if not $provider_fails
|
||||
<p>No current failures. Failure stats display here when appropriate.</p>
|
||||
#else
|
||||
<p>Some providers can be often down over periods, SickGear will back off then retry connecting at a later time</p>
|
||||
#for $prov in $provider_fail_stats
|
||||
#if $len($prov['fails'])
|
||||
|
||||
<!-- $prov['name'] -->
|
||||
<div>
|
||||
<input type="button" class="shows-more btn" value="Expand" style="display:none"><input type="button" class="shows-less btn" value="Collapse"><img src="$sbRoot/images/providers/$prov['prov_img']" width="16" height="16" style="margin:0 6px 0 3px">$prov['name']
|
||||
#if $prov['active']
|
||||
#if $prov['next_try']
|
||||
#set nt = $str($prov['next_try']).split('.', 2)
|
||||
... is blocked until $sbdatetime.sbdatetime.sbftime($sbdatetime.sbdatetime.now() + $prov['next_try'], markup=True) (in $nt[0]) <input type="button" class="provider-retry btn" id="$prov['prov_id']-btn-retry" value="Ignore block on next search">
|
||||
#end if
|
||||
#else
|
||||
... is not enabled
|
||||
#end if
|
||||
</div>
|
||||
<table class="manageTable provider-failures tablesorter hover-highlight focus-highlight text-center" cellspacing="0" border="0" cellpadding="0">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="text-center" style="width:13em;padding-right:20px">period of 1hr</th>
|
||||
<th class="text-center" style="padding-right:20px">server/timeout</th>
|
||||
<th class="text-center" style="padding-right:20px">network</th>
|
||||
<th class="text-center" style="padding-right:20px">no data</th>
|
||||
<th class="text-center" style="padding-right:20px">other</th>
|
||||
#if $prov['has_limit']
|
||||
<th class="text-center" style="padding-right:20px">hit limit</th>
|
||||
#end if
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
#set $day = []
|
||||
#for $fail in $prov['fails']
|
||||
#set $child = True
|
||||
#if $fail['date'] not in $day
|
||||
#set $day += [$fail['date']]
|
||||
#set $child = False
|
||||
#end if
|
||||
#slurp#
|
||||
<tr#if $fail['multirow'] and $child# class="tablesorter-childRow"#end if#>
|
||||
#if $fail['multirow']
|
||||
#if not $child
|
||||
<td><a href="#" class="provider-fail-parent-toggle" title="Totals (expand for detail)">$sbdatetime.sbdatetime.sbfdate($fail['date_time'])</a></td>
|
||||
#else
|
||||
<td>$sbdatetime.sbdatetime.sbftime($fail['date_time'], markup=True)</td>
|
||||
#end if
|
||||
#else
|
||||
<td>$sbdatetime.sbdatetime.sbfdatetime($fail['date_time'], markup=True)</td>
|
||||
#end if
|
||||
#set $blank = '-'
|
||||
#set $title=None
|
||||
#if $fail['http']['count']
|
||||
#set $title=$fail['http']['code']
|
||||
#end if
|
||||
<td>#if $fail['http']['count']#<span title="#if $child or not $fail['multirow']#$title#else#Expand for fail codes#end if#">$fail['http']['count']</span>#else#$blank#end if# / #echo $fail['timeout'].get('count', 0) or $blank#</td>
|
||||
<td>#echo ($fail['connection'].get('count', 0) + $fail['connection_timeout'].get('count', 0)) or $blank#</td>
|
||||
<td>#echo $fail['nodata'].get('count', 0) or $blank#</td>
|
||||
<td>#echo $fail['other'].get('count', 0) or $blank#</td>
|
||||
#if $prov['has_limit']
|
||||
<td>#echo $fail.get('limit', {}).get('count', 0) or $blank#</td>
|
||||
#end if
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
<!-- /$prov['name'] -->
|
||||
#end if
|
||||
#end for
|
||||
#end if
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<div id="search-queues" class="section">
|
||||
<h3>Search Queues:</h3>
|
||||
|
||||
|
|
19
gui/slick/interfaces/default/repo_index.tmpl
Normal file
|
@ -0,0 +1,19 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head></head>
|
||||
<body>
|
||||
<h1>Index of $basepath</h1>
|
||||
<table border="1" cellpadding="5" cellspacing="0" class="whitelinks">
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
</tr>
|
||||
#for $file in $filelist
|
||||
<tr>
|
||||
<td><a href="$file">$file</a></td>
|
||||
</tr>
|
||||
#end for
|
||||
</table>
|
||||
<hr>
|
||||
<em>Tornado Server for SickGear</em>
|
||||
</body>
|
||||
</html>
|
27
gui/slick/interfaces/default/repo_kodi_addon.tmpl
Normal file
|
@ -0,0 +1,27 @@
|
|||
##
|
||||
#from sickbeard import WEB_PORT, WEB_ROOT, ENABLE_HTTPS
|
||||
#set sg_host = $getVar('sbHost', 'localhost')
|
||||
#set sg_port = str($getVar('sbHttpPort', WEB_PORT))
|
||||
#set sg_root = $getVar('sbRoot', WEB_ROOT)
|
||||
#set sg_use_https = $getVar('sbHttpsEnabled', ENABLE_HTTPS)
|
||||
##
|
||||
#set $base_url = 'http%s://%s:%s%s' % (('', 's')[any([sg_use_https])], $sg_host, $sg_port, $sg_root)
|
||||
##
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<addon id="repository.sickgear" name="SickGear Add-on repository" version="1.0.0" provider-name="SickGear">
|
||||
<extension point="xbmc.addon.repository"
|
||||
name="SickGear Add-on Repository">
|
||||
<info compressed="true">$base_url/kodi/addons.xml</info>
|
||||
<checksum>$base_url/kodi/addons.xml.md5</checksum>
|
||||
<datadir zip="true">$base_url/kodi</datadir>
|
||||
<hashes>false</hashes>
|
||||
</extension>
|
||||
<extension point="xbmc.addon.metadata">
|
||||
<summary>Install Add-ons for SickGear</summary>
|
||||
<description>Download and install add-ons from a repository at a running SickGear instance.[CR][CR]Contains:[CR]* Watchedstate updater service</description>
|
||||
<disclaimer></disclaimer>
|
||||
<platform>all</platform>
|
||||
<website>https://github.com/SickGear/SickGear</website>
|
||||
<nofanart>true</nofanart>
|
||||
</extension>
|
||||
</addon>
|
5
gui/slick/interfaces/default/repo_kodi_addons.tmpl
Normal file
|
@ -0,0 +1,5 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<addons>
|
||||
$watchedstate_updater_addon_xml
|
||||
$repo_xml
|
||||
</addons>
|
|
@ -68,12 +68,12 @@ $(document).ready(function () {
|
|||
var showname = document.getElementById('showtitle').getAttribute('data-showname');
|
||||
$.confirm({
|
||||
'title' : 'Remove Show',
|
||||
'message' : 'Are you sure you want to remove <span class="footerhighlight">' + showname + '</span> from the database ?<br /><br /><input type="checkbox" id="deleteFiles"> <span class="red-text">Check to delete files as well. IRREVERSIBLE</span></input>',
|
||||
'message' : 'Are you sure you want to remove <span class="footerhighlight">' + showname + '</span> from the database ?<br /><br /><input type="checkbox" id="delete-files"> <span class="red-text">Check to delete files as well. IRREVERSIBLE</span>',
|
||||
'buttons' : {
|
||||
'Yes' : {
|
||||
'class' : 'green',
|
||||
'action': function(){
|
||||
location.href = target + (document.getElementById('deleteFiles').checked ? '&full=1' : '');
|
||||
location.href = target + (document.getElementById('delete-files').checked ? '&full=1' : '');
|
||||
// If checkbox is ticked, remove show and delete files. Else just remove show.
|
||||
}
|
||||
},
|
||||
|
@ -85,6 +85,98 @@ $(document).ready(function () {
|
|||
});
|
||||
});
|
||||
|
||||
$('#del-watched').bind('click', function(e) {
|
||||
e.preventDefault();
|
||||
|
||||
var dedupe = [], delArr = [], mFiles = 0;
|
||||
$('.del-check').each(function() {
|
||||
if (!0 === this.checked) {
|
||||
var pathFile = $(this).closest('tr').attr('data-file'),
|
||||
thisId = $(this).attr('id');
|
||||
|
||||
if (-1 === jQuery.inArray(pathFile, dedupe)) {
|
||||
dedupe.push(pathFile);
|
||||
mFiles += 1 - $(this).closest('tr').find('.tvShow .strike-deleted').length;
|
||||
}
|
||||
|
||||
delArr.push(thisId.replace('del-', ''));
|
||||
|
||||
/** @namespace $.SickGear.history.isCompact */
|
||||
if ($.SickGear.history.isCompact) {
|
||||
// then select all related episode checkboxes
|
||||
var tvepId = $(this).closest('tr').attr('data-tvep-id');
|
||||
$('tr[data-tvep-id="' + tvepId + '"] input.del-check:not("#' + thisId + '")')
|
||||
.each(function(){
|
||||
delArr.push($(this).attr('id').replace('del-', ''));
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
if (0 === delArr.length)
|
||||
return !1;
|
||||
|
||||
/** @namespace $.SickGear.history.isTrashit */
|
||||
/** @namespace $.SickGear.history.lastDeleteFiles */
|
||||
/** @namespace $.SickGear.history.lastDeleteRecords */
|
||||
var action = $.SickGear.history.isTrashit ? 'Trash' : 'Delete',
|
||||
btns = {
|
||||
'Yes' : {
|
||||
'class' : 'green',
|
||||
'action': function(){
|
||||
var deleteFiles = !!$('#delete-files:checked').length,
|
||||
deleteRecords = !!$('#delete-records:checked').length,
|
||||
checked = ' checked="checked"';
|
||||
$.SickGear.history.lastDeleteFiles = deleteFiles ? checked : '';
|
||||
$.SickGear.history.lastDeleteRecords = deleteRecords ? checked : '';
|
||||
$.post($.SickGear.Root + '/history/watched',
|
||||
{
|
||||
tvew_id: delArr.join('|'),
|
||||
files: (deleteFiles ? '1' : ''),
|
||||
records: (deleteRecords ? '1' : '')
|
||||
},
|
||||
function(data){
|
||||
var result = $.parseJSON(data);
|
||||
result.success && window.location.reload(true);
|
||||
/* using window.location as the following is
|
||||
sluggish when deleting 20 of 100 records
|
||||
*/
|
||||
/*
|
||||
result.success && $.each(result.success, function(){
|
||||
var tr = $('#del-' + this).closest('tr');
|
||||
var t = tr.closest('table');
|
||||
tr.addClass('delete-me').fadeToggle('fast', 'linear').promise().done(
|
||||
function(){
|
||||
$('.delete-me').html('');
|
||||
t.trigger('update');
|
||||
$.SickGear.sumChecked();
|
||||
});
|
||||
});*/
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// btn pre-created here in order to use a custom btn text as named key to object
|
||||
btns['No' + (0 < mFiles ? ', abort ' + ($.SickGear.history.isTrashit ? 'trash' : 'permanent delete') : '')] = {'class' : 'red'};
|
||||
$.confirm({
|
||||
'title' : (action + (0 < mFiles ? ' media' : ' records')
|
||||
+ '<span style="float:right;font-size:12px">(<a class="highlight-text contrast-text" href="/config/general/">"Send to trash" options</a>)</span>'),
|
||||
'message' : (0 < mFiles
|
||||
? '<input id="delete-files" style="margin-right:6px"' + $.SickGear.history.lastDeleteFiles + ' type="checkbox">'
|
||||
+ '<span>' + action + ' <span class="footerhighlight">' + mFiles + '</span>'
|
||||
+ ' media file' + (1===mFiles?'':'s') + ' from disk</span>'
|
||||
: ''
|
||||
)
|
||||
+ '<span style="display:block;margin-top:20px">'
|
||||
+ '<input id="delete-records" style="margin-right:6px"' + $.SickGear.history.lastDeleteRecords + ' type="checkbox">'
|
||||
+ 'Remove <span class="footerhighlight">'
|
||||
+ delArr.length + '</span> history record' + (1===delArr.length?'':'s')
|
||||
+ '</span>'
|
||||
+ '<span class="red-text" style="display:block;margin-top:20px">Are you sure ?</span>',
|
||||
'buttons' : btns
|
||||
});
|
||||
});
|
||||
|
||||
$('a.clearhistory').bind('click',function(e) {
|
||||
e.preventDefault();
|
||||
var target = $( this ).attr('href');
|
||||
|
|
236
gui/slick/js/history.js
Normal file
|
@ -0,0 +1,236 @@
|
|||
/** @namespace $.SickGear.Root */
|
||||
/** @namespace $.SickGear.history.isCompact */
|
||||
/** @namespace $.SickGear.history.isTrashit */
|
||||
/** @namespace $.SickGear.history.useSubtitles */
|
||||
/** @namespace $.SickGear.history.layoutName */
|
||||
/*
|
||||
2017 Jason Mulligan <jason.mulligan@avoidwork.com>
|
||||
@version 3.5.11
|
||||
*/
|
||||
!function(i){function e(i){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=[],d=0,r=void 0,a=void 0,s=void 0,f=void 0,u=void 0,l=void 0,v=void 0,B=void 0,c=void 0,p=void 0,y=void 0,m=void 0,x=void 0,g=void 0;if(isNaN(i))throw new Error("Invalid arguments");return s=!0===e.bits,y=!0===e.unix,a=e.base||2,p=void 0!==e.round?e.round:y?1:2,m=void 0!==e.spacer?e.spacer:y?"":" ",g=e.symbols||e.suffixes||{},x=2===a?e.standard||"jedec":"jedec",c=e.output||"string",u=!0===e.fullform,l=e.fullforms instanceof Array?e.fullforms:[],r=void 0!==e.exponent?e.exponent:-1,B=Number(i),v=B<0,f=a>2?1e3:1024,v&&(B=-B),(-1===r||isNaN(r))&&(r=Math.floor(Math.log(B)/Math.log(f)))<0&&(r=0),r>8&&(r=8),0===B?(n[0]=0,n[1]=y?"":t[x][s?"bits":"bytes"][r]):(d=B/(2===a?Math.pow(2,10*r):Math.pow(1e3,r)),s&&(d*=8)>=f&&r<8&&(d/=f,r++),n[0]=Number(d.toFixed(r>0?p:0)),n[1]=10===a&&1===r?s?"kb":"kB":t[x][s?"bits":"bytes"][r],y&&(n[1]="jedec"===x?n[1].charAt(0):r>0?n[1].replace(/B$/,""):n[1],o.test(n[1])&&(n[0]=Math.floor(n[0]),n[1]=""))),v&&(n[0]=-n[0]),n[1]=g[n[1]]||n[1],"array"===c?n:"exponent"===c?r:"object"===c?{value:n[0],suffix:n[1],symbol:n[1]}:(u&&(n[1]=l[r]?l[r]:b[x][r]+(s?"bit":"byte")+(1===n[0]?"":"s")),n.join(m))}var o=/^(b|B)$/,t={iec:{bits:["b","Kib","Mib","Gib","Tib","Pib","Eib","Zib","Yib"],bytes:["B","KiB","MiB","GiB","TiB","PiB","EiB","ZiB","YiB"]},jedec:{bits:["b","Kb","Mb","Gb","Tb","Pb","Eb","Zb","Yb"],bytes:["B","KB","MB","GB","TB","PB","EB","ZB","YB"]}},b={iec:["","kibi","mebi","gibi","tebi","pebi","exbi","zebi","yobi"],jedec:["","kilo","mega","giga","tera","peta","exa","zetta","yotta"]};e.partial=function(i){return function(o){return e(o,i)}},"undefined"!=typeof exports?module.exports=e:"function"==typeof define&&define.amd?define(function(){return e}):i.filesize=e}("undefined"!=typeof window?window:global);
|
||||
|
||||
function rowCount(){
|
||||
var output$ = $('#row-count');
|
||||
if(!output$.length)
|
||||
return;
|
||||
|
||||
var tbody$ = $('#tbody'),
|
||||
nRows = tbody$.find('tr').length,
|
||||
compacted = tbody$.find('tr.hide').length,
|
||||
compactedFiltered = tbody$.find('tr.filtered.hide').length,
|
||||
filtered = tbody$.find('tr.filtered').length;
|
||||
output$.text((filtered
|
||||
? nRows - (filtered + compacted - compactedFiltered) + ' / ' + nRows + ' filtered'
|
||||
: nRows) + (1 === nRows ? ' row' : ' rows'));
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
|
||||
var extraction = {0: function(node) {
|
||||
var dataSort = $(node).find('div[data-sort]').attr('data-sort')
|
||||
|| $(node).find('span[data-sort]').attr('data-sort');
|
||||
return !dataSort ? dataSort : dataSort.toLowerCase();}},
|
||||
tbody$ = $('#tbody'),
|
||||
headers = {},
|
||||
layoutName = '' + $.SickGear.history.layoutName;
|
||||
|
||||
if ('detailed' === layoutName) {
|
||||
|
||||
jQuery.extend(extraction, {
|
||||
4: function (node) {
|
||||
return $(node).find('span').text().toLowerCase();
|
||||
}
|
||||
});
|
||||
|
||||
jQuery.extend(headers, {4: {sorter: 'quality'}});
|
||||
|
||||
} else if ('compact' === layoutName) {
|
||||
|
||||
jQuery.extend(extraction, {
|
||||
1: function (node) {
|
||||
return $(node).find('span[data-sort]').attr('data-sort').toLowerCase();
|
||||
},
|
||||
2: function (node) {
|
||||
return $(node).attr('provider').toLowerCase();
|
||||
},
|
||||
5: function (node) {
|
||||
return $(node).attr('quality').toLowerCase();
|
||||
}
|
||||
});
|
||||
|
||||
var disable = {sorter: !1}, qualSort = {sorter: 'quality'};
|
||||
jQuery.extend(headers, $.SickGear.history.useSubtitles ? {4: disable, 5: qualSort} : {3: disable, 4: qualSort});
|
||||
|
||||
} else if (-1 !== layoutName.indexOf('watched')) {
|
||||
|
||||
jQuery.extend(extraction, {
|
||||
3: function(node) {
|
||||
return $(node).find('span[data-sort]').attr('data-sort');
|
||||
},
|
||||
5: function(node) {
|
||||
return $(node).find('span[data-sort]').attr('data-sort');
|
||||
},
|
||||
6: function (node) {
|
||||
return $(node).find('input:checked').length;
|
||||
}
|
||||
});
|
||||
|
||||
jQuery.extend(headers, {4: {sorter: 'quality'}});
|
||||
|
||||
rowCount();
|
||||
} else if (-1 !== layoutName.indexOf('compact_stats')) {
|
||||
jQuery.extend(extraction, {
|
||||
3: function (node) {
|
||||
return $(node).find('div[data-sort]').attr('data-sort');
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
var isWatched = -1 !== $('select[name="HistoryLayout"]').val().indexOf('watched'),
|
||||
options = {
|
||||
widgets: ['zebra', 'filter'],
|
||||
widgetOptions : {
|
||||
filter_hideEmpty: !0, filter_matchType : {'input': 'match', 'select': 'match'},
|
||||
filter_resetOnEsc: !0, filter_saveFilters: !0, filter_searchDelay: 300
|
||||
},
|
||||
sortList: isWatched ? [[1, 1], [0, 1]] : [0, 1],
|
||||
textExtraction: extraction,
|
||||
headers: headers},
|
||||
stateLayoutDate = function(table$, glyph$){table$.toggleClass('event-age');glyph$.toggleClass('age date');};
|
||||
|
||||
if(isWatched){
|
||||
jQuery.extend(options, {
|
||||
selectorSort: '.tablesorter-header-inside',
|
||||
headerTemplate: '<div class="tablesorter-header-inside" style="margin:0 -8px 0 -4px">{content}{icon}</div>',
|
||||
onRenderTemplate: function(index, template){
|
||||
if(0 === index){
|
||||
template = '<i id="watched-date" class="icon-glyph date add-qtip" title="Change date layout" style="float:left;margin:4px -14px 0 2px"></i>'
|
||||
+ template;
|
||||
}
|
||||
return template;
|
||||
},
|
||||
onRenderHeader: function(){
|
||||
var table$ = $('#history-table'), glyph$ = $('#watched-date');
|
||||
if($.tablesorter.storage(table$, 'isLayoutAge')){
|
||||
stateLayoutDate(table$, glyph$);
|
||||
}
|
||||
$(this).find('#watched-date').on('click', function(){
|
||||
stateLayoutDate(table$, glyph$);
|
||||
$.tablesorter.storage(table$, 'isLayoutAge', table$.hasClass('event-age'));
|
||||
return !1;
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$('#history-table').tablesorter(options).bind('filterEnd', function(){
|
||||
rowCount();
|
||||
});
|
||||
|
||||
$('#limit').change(function(){
|
||||
window.location.href = $.SickGear.Root + '/history/?limit=' + $(this).val()
|
||||
});
|
||||
|
||||
$('#show-watched-help').click(function () {
|
||||
$('#watched-help').fadeToggle('fast', 'linear');
|
||||
$.get($.SickGear.Root + '/history/toggle_help');
|
||||
});
|
||||
|
||||
var addQTip = (function(){
|
||||
$(this).css('cursor', 'help');
|
||||
$(this).qtip({
|
||||
show: {solo:true},
|
||||
position: {viewport:$(window), my:'left center', adjust:{y: -10, x: 2}},
|
||||
style: {tip: {corner:true, method:'polygon'}, classes:'qtip-dark qtip-rounded qtip-shadow'}
|
||||
});
|
||||
});
|
||||
$('.add-qtip').each(addQTip);
|
||||
|
||||
$.SickGear.sumChecked = (function(){
|
||||
var dedupe = [], sum = 0, output;
|
||||
|
||||
$('.del-check:checked').each(function(){
|
||||
if ($(this).closest('tr').find('.tvShow .strike-deleted').length)
|
||||
return;
|
||||
var pathFile = $(this).closest('tr').attr('data-file');
|
||||
if (-1 === jQuery.inArray(pathFile, dedupe)) {
|
||||
dedupe.push(pathFile);
|
||||
output = $(this).closest('td').prev('td.size').find('span[data-sort]').attr('data-sort');
|
||||
sum = sum + parseInt(output, 10);
|
||||
}
|
||||
});
|
||||
$('#del-watched').attr('disabled', !dedupe.length && !$('#tbody').find('tr').find('.tvShow .strike-deleted').length);
|
||||
|
||||
output = filesize(sum, {symbols: {B: 'Bytes'}});
|
||||
$('#sum-size').text(/\s(MB)$/.test(output) ? filesize(sum, {round:1})
|
||||
: /^1\sB/.test(output) ? output.replace('Bytes', 'Byte') : output);
|
||||
});
|
||||
$.SickGear.sumChecked();
|
||||
|
||||
var className='.del-check', lastCheck = null, check, found;
|
||||
tbody$.on('click', className, function(ev){
|
||||
if(!lastCheck || !ev.shiftKey){
|
||||
lastCheck = this;
|
||||
} else {
|
||||
check = this; found = 0;
|
||||
$('#tbody').find('> tr:visible').find(className).each(function(){
|
||||
if (2 === found)
|
||||
return !1;
|
||||
if (1 === found)
|
||||
this.checked = lastCheck.checked;
|
||||
found += (1 && (this === check || this === lastCheck));
|
||||
});
|
||||
}
|
||||
$(this).closest('table').trigger('update');
|
||||
$.SickGear.sumChecked();
|
||||
});
|
||||
|
||||
$('.shows-less').click(function(){
|
||||
var table$ = $(this).nextAll('table:first');
|
||||
table$ = table$.length ? table$ : $(this).parent().nextAll('table:first');
|
||||
table$.hide();
|
||||
$(this).hide();
|
||||
$(this).prevAll('input:first').show();
|
||||
});
|
||||
$('.shows-more').click(function(){
|
||||
var table$ = $(this).nextAll('table:first');
|
||||
table$ = table$.length ? table$ : $(this).parent().nextAll('table:first');
|
||||
table$.show();
|
||||
$(this).hide();
|
||||
$(this).nextAll('input:first').show();
|
||||
});
|
||||
|
||||
$('.provider-retry').click(function () {
|
||||
$(this).addClass('disabled');
|
||||
var match = $(this).attr('id').match(/^(.+)-btn-retry$/);
|
||||
$.ajax({
|
||||
url: $.SickGear.Root + '/manage/manageSearches/retryProvider?provider=' + match[1],
|
||||
type: 'GET',
|
||||
complete: function () {
|
||||
window.location.reload(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$('.provider-failures').tablesorter({widgets : ['zebra'],
|
||||
headers : { 0:{sorter:!1}, 1:{sorter:!1}, 2:{sorter:!1}, 3:{sorter:!1}, 4:{sorter:!1}, 5:{sorter:!1} }
|
||||
});
|
||||
|
||||
$('.provider-fail-parent-toggle').click(function(){
|
||||
$(this).closest('tr').nextUntil('tr:not(.tablesorter-childRow)').find('td').toggle();
|
||||
return !1;
|
||||
});
|
||||
|
||||
// Make table cell focusable
|
||||
// http://css-tricks.com/simple-css-row-column-highlighting/
|
||||
var focus$ = $('.focus-highlight');
|
||||
if (focus$.length){
|
||||
focus$.find('td, th')
|
||||
.attr('tabindex', '1')
|
||||
// add touch device support
|
||||
.on('touchstart', function(){
|
||||
$(this).focus();
|
||||
});
|
||||
}
|
||||
});
|
|
@ -34,37 +34,4 @@ $(function(){
|
|||
$(this).hide();
|
||||
$(this).nextAll('input:first').show();
|
||||
});
|
||||
$('.provider-retry').click(function () {
|
||||
$(this).addClass('disabled');
|
||||
var match = $(this).attr('id').match(/^(.+)-btn-retry$/);
|
||||
$.ajax({
|
||||
url: sbRoot + '/manage/manageSearches/retryProvider?provider=' + match[1],
|
||||
type: 'GET',
|
||||
complete: function () {
|
||||
window.location.reload(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$('.provider-failures').tablesorter({widgets : ['zebra'],
|
||||
headers : { 0:{sorter:!1}, 1:{sorter:!1}, 2:{sorter:!1}, 3:{sorter:!1}, 4:{sorter:!1}, 5:{sorter:!1} }
|
||||
});
|
||||
|
||||
$('.provider-fail-parent-toggle').click(function(){
|
||||
$(this).closest('tr').nextUntil('tr:not(.tablesorter-childRow)').find('td').toggle();
|
||||
return !1;
|
||||
});
|
||||
|
||||
// Make table cell focusable
|
||||
// http://css-tricks.com/simple-css-row-column-highlighting/
|
||||
var focus$ = $('.focus-highlight');
|
||||
if (focus$.length){
|
||||
focus$.find('td, th')
|
||||
.attr('tabindex', '1')
|
||||
// add touch device support
|
||||
.on('touchstart', function(){
|
||||
$(this).focus();
|
||||
});
|
||||
}
|
||||
|
||||
});
|
||||
|
|
3
gui/slick/js/plot.ly/numeric/1.2.6/numeric.min.js
vendored
Normal file
7
gui/slick/js/plot.ly/plotly-latest.min.js
vendored
Normal file
1
lib/plex/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
from plex import *
|
423
lib/plex/plex.py
Normal file
|
@ -0,0 +1,423 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from time import sleep
|
||||
|
||||
import datetime
|
||||
import math
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
|
||||
try:
|
||||
from urllib import urlencode # Python2
|
||||
except ImportError:
|
||||
import urllib
|
||||
from urllib.parse import urlencode # Python3
|
||||
|
||||
try:
|
||||
import urllib.request as urllib2
|
||||
except ImportError:
|
||||
import urllib2
|
||||
|
||||
from sickbeard import logger
|
||||
from sickbeard.helpers import getURL, tryInt
|
||||
|
||||
try:
|
||||
from lxml import etree
|
||||
except ImportError:
|
||||
try:
|
||||
import xml.etree.cElementTree as etree
|
||||
except ImportError:
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
|
||||
class Plex:
|
||||
def __init__(self, settings=None):
|
||||
|
||||
settings = settings or {}
|
||||
self._plex_host = settings.get('plex_host') or '127.0.0.1'
|
||||
self.plex_port = settings.get('plex_port') or '32400'
|
||||
|
||||
self.username = settings.get('username', '')
|
||||
self.password = settings.get('password', '')
|
||||
self.token = settings.get('token', '')
|
||||
|
||||
self.device_name = settings.get('device_name', '')
|
||||
self.client_id = settings.get('client_id') or '5369636B47656172'
|
||||
self.machine_client_identifier = ''
|
||||
|
||||
self.default_home_users = settings.get('default_home_users', '')
|
||||
|
||||
# Progress percentage to consider video as watched
|
||||
# if set to anything > 0, videos with watch progress greater than this will be considered watched
|
||||
self.default_progress_as_watched = settings.get('default_progress_as_watched', 0)
|
||||
|
||||
# Sections to scan. If empty all sections will be looked at,
|
||||
# the section id should be used which is the number found be in the url on PlexWeb after /section/[ID]
|
||||
self.section_list = settings.get('section_list', [])
|
||||
|
||||
# Sections to skip scanning, for use when Settings['section_list'] is not specified,
|
||||
# the same as section_list, the section id should be used
|
||||
self.ignore_sections = settings.get('ignore_sections', [])
|
||||
|
||||
# Filter sections by paths that are in this array
|
||||
self.section_filter_path = settings.get('section_filter_path', [])
|
||||
|
||||
# Results
|
||||
self.show_states = {}
|
||||
self.file_count = 0
|
||||
|
||||
# Conf
|
||||
self.config_version = 2.0
|
||||
self.use_logger = False
|
||||
self.test = None
|
||||
self.home_user_tokens = {}
|
||||
|
||||
if self.username and '' == self.token:
|
||||
self.token = self.get_token(self.username, self.password)
|
||||
|
||||
@property
|
||||
def plex_host(self):
|
||||
|
||||
if not self._plex_host.startswith('http'):
|
||||
return 'http://%s' % self.plex_host
|
||||
return self._plex_host
|
||||
|
||||
@plex_host.setter
|
||||
def plex_host(self, value):
|
||||
|
||||
self._plex_host = value
|
||||
|
||||
def log(self, msg, debug=True):
|
||||
|
||||
try:
|
||||
if self.use_logger:
|
||||
msg = 'Plex:: ' + msg
|
||||
if debug:
|
||||
logger.log(msg, logger.DEBUG)
|
||||
else:
|
||||
logger.log(msg)
|
||||
# else:
|
||||
# print(msg.encode('ascii', 'replace').decode())
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
def get_token(self, user, passw):
|
||||
|
||||
auth = ''
|
||||
try:
|
||||
auth = getURL('https://plex.tv/users/sign_in.json',
|
||||
headers={'X-Plex-Device-Name': 'SickGear',
|
||||
'X-Plex-Platform': platform.system(), 'X-Plex-Device': platform.system(),
|
||||
'X-Plex-Platform-Version': platform.release(),
|
||||
'X-Plex-Provides': 'Python', 'X-Plex-Product': 'Python',
|
||||
'X-Plex-Client-Identifier': self.client_id,
|
||||
'X-Plex-Version': str(self.config_version),
|
||||
'X-Plex-Username': user
|
||||
},
|
||||
json=True,
|
||||
data=urlencode({b'user[login]': user, b'user[password]': passw}).encode('utf-8')
|
||||
)['user']['authentication_token']
|
||||
except IndexError:
|
||||
self.log('Error getting Plex Token')
|
||||
|
||||
return auth
|
||||
|
||||
def get_access_token(self, token):
|
||||
|
||||
resources = self.get_url_x('https://plex.tv/api/resources?includeHttps=1', token=token)
|
||||
if None is resources:
|
||||
return ''
|
||||
|
||||
devices = resources.findall('Device')
|
||||
for device in devices:
|
||||
if 1 == len(devices) \
|
||||
or self.machine_client_identifier == device.get('clientIdentifier') \
|
||||
or (self.device_name
|
||||
and (self.device_name.lower() in device.get('name').lower()
|
||||
or self.device_name.lower() in device.get('clientIdentifier').lower())
|
||||
):
|
||||
access_token = device.get('accessToken')
|
||||
if not access_token:
|
||||
return ''
|
||||
return access_token
|
||||
|
||||
connections = device.findall('Connection')
|
||||
for connection in connections:
|
||||
if self.plex_host == connection.get('address'):
|
||||
access_token = device.get('accessToken')
|
||||
if not access_token:
|
||||
return ''
|
||||
uri = connection.get('uri')
|
||||
match = re.compile('(http[s]?://.*?):(\d*)').match(uri)
|
||||
if match:
|
||||
self.plex_host = match.group(1)
|
||||
self.plex_port = match.group(2)
|
||||
return access_token
|
||||
return ''
|
||||
|
||||
def get_plex_home_user_tokens(self):
|
||||
|
||||
user_tokens = {}
|
||||
|
||||
# check Plex is contactable
|
||||
home_users = self.get_url_x('https://plex.tv/api/home/users')
|
||||
if None is not home_users:
|
||||
for user in home_users.findall('User'):
|
||||
user_id = user.get('id')
|
||||
# use empty byte data to force POST
|
||||
switch_page = self.get_url_x('https://plex.tv/api/home/users/%s/switch' % user_id, data=b'')
|
||||
if None is not switch_page:
|
||||
home_token = 'user' == switch_page.tag and switch_page.get('authenticationToken')
|
||||
if home_token:
|
||||
username = switch_page.get('title')
|
||||
user_tokens[username] = self.get_access_token(home_token)
|
||||
return user_tokens
|
||||
|
||||
def get_url_x(self, url, token=None, **kwargs):
|
||||
|
||||
if not token:
|
||||
token = self.token
|
||||
if not url.startswith('http'):
|
||||
url = 'http://' + url
|
||||
|
||||
for x in range(0, 3):
|
||||
if 0 < x:
|
||||
sleep(0.5)
|
||||
try:
|
||||
headers = {'X-Plex-Device-Name': 'SickGear',
|
||||
'X-Plex-Platform': platform.system(), 'X-Plex-Device': platform.system(),
|
||||
'X-Plex-Platform-Version': platform.release(),
|
||||
'X-Plex-Provides': 'controller', 'X-Plex-Product': 'Python',
|
||||
'X-Plex-Client-Identifier': self.client_id,
|
||||
'X-Plex-Version': str(self.config_version),
|
||||
'X-Plex-Token': token,
|
||||
'Accept': 'application/xml'
|
||||
}
|
||||
if self.username:
|
||||
headers.update({'X-Plex-Username': self.username})
|
||||
page = getURL(url, headers=headers, **kwargs)
|
||||
if page:
|
||||
parsed = etree.fromstring(page)
|
||||
if None is not parsed and len(parsed):
|
||||
return parsed
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
self.log('Error requesting page: %s' % e)
|
||||
continue
|
||||
return None
|
||||
|
||||
# uses the Plex API to delete files instead of system functions, useful for remote installations
|
||||
def delete_file(self, media_id=0):
|
||||
|
||||
try:
|
||||
endpoint = ('/library/metadata/%s' % str(media_id))
|
||||
req = urllib2.Request('%s:%s%s' % (self.plex_host, self.plex_port, endpoint),
|
||||
None, {'X-Plex-Token': self.token})
|
||||
req.get_method = lambda: 'DELETE'
|
||||
urllib2.urlopen(req)
|
||||
except (StandardError, Exception):
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def get_media_info(video_node):
|
||||
|
||||
progress = 0
|
||||
if None is not video_node.get('viewOffset') and None is not video_node.get('duration'):
|
||||
progress = tryInt(video_node.get('viewOffset')) * 100 / tryInt(video_node.get('duration'))
|
||||
|
||||
for media in video_node.findall('Media'):
|
||||
for part in media.findall('Part'):
|
||||
file_name = part.get('file')
|
||||
# if '3' > sys.version: # remove HTML quoted characters, only works in python < 3
|
||||
# file_name = urllib2.unquote(file_name.encode('utf-8', errors='replace'))
|
||||
# else:
|
||||
file_name = urllib2.unquote(file_name)
|
||||
|
||||
return {'path_file': file_name, 'media_id': video_node.get('ratingKey'),
|
||||
'played': int(video_node.get('viewCount') or 0), 'progress': progress}
|
||||
|
||||
def check_users_watched(self, users, media_id):
|
||||
|
||||
if not self.home_user_tokens:
|
||||
self.home_user_tokens = self.get_plex_home_user_tokens()
|
||||
|
||||
result = {}
|
||||
if 'all' in users:
|
||||
users = self.home_user_tokens.keys()
|
||||
|
||||
for user in users:
|
||||
user_media_page = self.get_url_pms('/library/metadata/%s' % media_id, token=self.home_user_tokens[user])
|
||||
if None is not user_media_page:
|
||||
video_node = user_media_page.find('Video')
|
||||
|
||||
progress = 0
|
||||
if None is not video_node.get('viewOffset') and None is not video_node.get('duration'):
|
||||
progress = tryInt(video_node.get('viewOffset')) * 100 / tryInt(video_node.get('duration'))
|
||||
|
||||
played = int(video_node.get('viewCount') or 0)
|
||||
if not progress and not played:
|
||||
continue
|
||||
|
||||
date_watched = 0
|
||||
if (0 < tryInt(video_node.get('viewCount'))) or (0 < self.default_progress_as_watched < progress):
|
||||
last_viewed_at = video_node.get('lastViewedAt')
|
||||
if last_viewed_at and last_viewed_at not in ('', '0'):
|
||||
date_watched = last_viewed_at
|
||||
|
||||
if date_watched:
|
||||
result[user] = dict(played=played, progress=progress, date_watched=date_watched)
|
||||
else:
|
||||
self.log('Do not have the token for %s.' % user)
|
||||
|
||||
return result
|
||||
|
||||
def get_url_pms(self, endpoint=None, **kwargs):
|
||||
|
||||
return endpoint and self.get_url_x(
|
||||
'%s:%s%s' % (self.plex_host, self.plex_port, endpoint), **kwargs)
|
||||
|
||||
# parse episode information from season pages
|
||||
def stat_show(self, node):
|
||||
|
||||
episodes = []
|
||||
if 'directory' == node.tag.lower() and 'show' == node.get('type'):
|
||||
show = self.get_url_pms(node.get('key'))
|
||||
if None is show: # Check if show page is None or empty
|
||||
self.log('Failed to load show page. Skipping...')
|
||||
return None
|
||||
|
||||
for season_node in show.findall('Directory'): # Each directory is a season
|
||||
if 'season' != season_node.get('type'): # skips Specials
|
||||
continue
|
||||
|
||||
season_key = season_node.get('key')
|
||||
season = self.get_url_pms(season_key)
|
||||
if None is not season:
|
||||
episodes += [season]
|
||||
|
||||
elif 'mediacontainer' == node.tag.lower() and 'episode' == node.get('viewGroup'):
|
||||
episodes = [node]
|
||||
|
||||
check_users = []
|
||||
if self.default_home_users:
|
||||
check_users = self.default_home_users.strip(' ,').lower().split(',')
|
||||
for k in range(0, len(check_users)): # Remove extra spaces and commas
|
||||
check_users[k] = check_users[k].strip(', ')
|
||||
|
||||
for episode_node in episodes:
|
||||
for video_node in episode_node.findall('Video'):
|
||||
|
||||
media_info = self.get_media_info(video_node)
|
||||
|
||||
if check_users:
|
||||
user_info = self.check_users_watched(check_users, media_info['media_id'])
|
||||
for user_name, user_media_info in user_info.items():
|
||||
self.show_states.update({len(self.show_states): dict(
|
||||
path_file=media_info['path_file'],
|
||||
media_id=media_info['media_id'],
|
||||
played=(100 * user_media_info['played']) or user_media_info['progress'] or 0,
|
||||
label=user_name,
|
||||
date_watched=user_media_info['date_watched'])})
|
||||
else:
|
||||
self.show_states.update({len(self.show_states): dict(
|
||||
path_file=media_info['path_file'],
|
||||
media_id=media_info['media_id'],
|
||||
played=(100 * media_info['played']) or media_info['progress'] or 0,
|
||||
label=self.username,
|
||||
date_watched=video_node.get('lastViewedAt'))})
|
||||
|
||||
self.file_count += 1
|
||||
|
||||
return True
|
||||
|
||||
def fetch_show_states(self, fetch_all=False):
|
||||
|
||||
error_log = []
|
||||
self.show_states = {}
|
||||
|
||||
server_check = self.get_url_pms('/')
|
||||
if None is server_check or 'MediaContainer' != server_check.tag:
|
||||
error_log.append('Cannot reach server!')
|
||||
|
||||
else:
|
||||
if not self.device_name:
|
||||
self.device_name = server_check.get('friendlyName')
|
||||
|
||||
if not self.machine_client_identifier:
|
||||
self.machine_client_identifier = server_check.get('machineIdentifier')
|
||||
|
||||
access_token = None
|
||||
if self.token:
|
||||
access_token = self.get_access_token(self.token)
|
||||
if access_token:
|
||||
self.token = access_token
|
||||
if not self.home_user_tokens:
|
||||
self.home_user_tokens = self.get_plex_home_user_tokens()
|
||||
else:
|
||||
error_log.append('Access Token not found')
|
||||
|
||||
resp_sections = None
|
||||
if None is access_token or len(access_token):
|
||||
resp_sections = self.get_url_pms('/library/sections/')
|
||||
|
||||
if None is not resp_sections:
|
||||
|
||||
unpather = []
|
||||
for loc in self.section_filter_path:
|
||||
loc = re.sub(r'[/\\]+', '/', loc.lower())
|
||||
loc = re.sub(r'^(.{,2})[/\\]', '', loc)
|
||||
unpather.append(loc)
|
||||
self.section_filter_path = unpather
|
||||
|
||||
for section in resp_sections.findall('Directory'):
|
||||
if 'show' != section.get('type') or not section.findall('Location'):
|
||||
continue
|
||||
|
||||
section_path = re.sub(r'[/\\]+', '/', section.find('Location').get('path').lower())
|
||||
section_path = re.sub(r'^(.{,2})[/\\]', '', section_path)
|
||||
if not any([section_path in path for path in self.section_filter_path]):
|
||||
continue
|
||||
|
||||
if section.get('key') not in self.ignore_sections \
|
||||
and section.get('title') not in self.ignore_sections:
|
||||
section_key = section.get('key')
|
||||
|
||||
for (user, token) in (self.home_user_tokens or {'': None}).iteritems():
|
||||
self.username = user
|
||||
|
||||
resp_section = self.get_url_pms('/library/sections/%s/%s' % (
|
||||
section_key, ('recentlyViewed', 'all')[fetch_all]), token=token)
|
||||
if None is not resp_section:
|
||||
view_group = 'MediaContainer' == resp_section.tag and \
|
||||
resp_section.get('viewGroup') or ''
|
||||
if 'show' == view_group and fetch_all:
|
||||
for DirectoryNode in resp_section.findall('Directory'):
|
||||
self.stat_show(DirectoryNode)
|
||||
elif 'episode' == view_group and not fetch_all:
|
||||
self.stat_show(resp_section)
|
||||
|
||||
if 0 < len(error_log):
|
||||
self.log('Library errors...')
|
||||
for item in error_log:
|
||||
self.log(item)
|
||||
|
||||
return 0 < len(error_log)
|
95
readme.md
|
@ -1,57 +1,76 @@
|
|||
<hr>
|
||||
<div><a id="top"><img alt="SickGear" width="200" src="https://raw.githubusercontent.com/wiki/SickGear/SickGear.Wiki/images/SickGearLogo.png"></a></div>
|
||||
**SickGear**, a usenet and bittorrent PVR
|
||||
*SickGear*, a usenet and bittorrent PVR
|
||||
<hr>
|
||||
_Please note you should know how to use git and setup basic requirements in order to run this software._
|
||||
|
||||
SickGear provides management of TV shows and/or Anime, it can detect new episodes, link to downloader apps, and more. SickGear is a proud descendant of Sick Beard and is humbled to have been endorsed by one of its former lead developers.
|
||||
SickGear provides management of TV shows and/or Anime, it detects new episodes, links downloader apps, and more.
|
||||
|
||||
Why SickGear?
|
||||
* SickGear maintains perfect uptime with the longest track record of being stable, reliable and trusted to work
|
||||
* SickGear delivers quality from active development with a wealth of options on a dark or light themed interface
|
||||
#### Why SickGear?
|
||||
* SickGear maintains a perfect uptime with the longest track record of being stable, reliable and trusted to work
|
||||
* SickGear delivers valued quality from active development with a wealth of options on a dark or light themed interface
|
||||
|
||||
#### What now?
|
||||
* [Install guides](https://github.com/SickGear/SickGear/wiki/Installation-Instructions) for many platforms
|
||||
* [Migrating](https://github.com/SickGear/SickGear/wiki/Install-SickGear-%5B0%5D-Migrate) to a hassle free and feature rich set up is super simple
|
||||
|
||||
## Features include
|
||||
Or read more below...
|
||||
|
||||
## Some innovative SickGear features
|
||||
* Ideas of shows to add from Trakt, IMDb, and AniDB categories; anticipating, new seasons, new shows, popular, and more
|
||||
* Advanced add show finder that near always returns results sorted A-Z, Aired, or Relevancy, with known shows separated
|
||||
* Releases can be searched during adding a show (e.g. optionally fetch oldest and/or newest without further effort)
|
||||
* Advanced automated search that always works to prevent you wasting time manually scanning result lists
|
||||
* Choose to delete watched episodes from a list built directly from played media at Kodi, Emby, and/or Plex (No Trakt!)
|
||||
* Smart custom qualities selector system that helps achieve an optimal quality selection for automated episode search
|
||||
* Choose to have episodes upgraded in quality, or keep existing archive quality, and upgrade future episodes either way
|
||||
* Natively use a most powerful regex pattern matching system for superior information handling
|
||||
* Select a UI style anytime; Regular, Proview I, or Proview II - independently for Episode View, and for Display Show
|
||||
* Smart fanart system allows you to rate avoid/prefer. UI can be moved or toggled off/on to fully appreciate a fanart
|
||||
* Episode View Layout "Day by Day" displays a fanart background from randomly selected imminent releases
|
||||
* Configure from 0 to 500 fanart images to cache per show, default is 3.
|
||||
* Provider server failure charts display what failed, when and why
|
||||
* Provider server failure handler that pauses connecting the more a server fails
|
||||
* Provider server activity graph and stats display how each are performing
|
||||
* Built-in providers that have proved over time to deliver leading durability among its application class and third parties
|
||||
* Built-in Kodi repository service to maintain SickGear add-ons, the first add-on is Watched State Updater
|
||||
* Separate Plex server and Plex client settings, multiple LAN Plex server support, and Plex Home multiple user support
|
||||
* Intelligent library updates target the actual Plex server that carries the show of an episode (multi LAN server)
|
||||
* Communicate directly with NZBGet using a dedicated integration script
|
||||
* Communicate directly with qBittorrent/Deluge/Transmission etc. using a dedicated integration script
|
||||
* Proxy auto-config (PAC) support to define when a proxy is used instead of using one for every system request
|
||||
* Change file date, to the date that the episode aired (yup, the feature was first created here for XBMC file sorting)
|
||||
* Visual percentage progress of managed episodes
|
||||
* Configure an episode status for removed media files
|
||||
* Configurable default home page
|
||||
|
||||
Features above link to the UI; innovations also exist in core, we often inspire imitators, but you can [get the real deal!](https://github.com/SickGear/SickGear/wiki/Installation-Instructions)
|
||||
|
||||
Other features and worthy points;
|
||||
* Stable, quality assured testing and development cycle
|
||||
* Innovations that inspire imitators
|
||||
* Compatible with any platform via a familiar web interface
|
||||
* Most comprehensive selection of usenet and torrent sources
|
||||
* Compatible with any platform via a familiar web interface
|
||||
* Episode management
|
||||
* Group shows into personalised sections
|
||||
* View missed and upcoming shows at a glance with "day by day" and other layouts
|
||||
* Group shows into personalised sections in a full show list view
|
||||
* Automatic and manual search for availability of wanted episodes
|
||||
* Set what episodes you want and how to receive them
|
||||
* Uses well known established index sites to gather show information
|
||||
* Searches for known alternatively named shows with a fallback to user edited names
|
||||
* Searches for known alternatively numbered episodes with a fallback to user edited numbers
|
||||
* Searches for known alternatively numbered seasons with a fallback to user edited numbers
|
||||
* Forward search results to a downloader (e.g. NZBGet, SABNZBd, uTorrent, and others)
|
||||
* Save search results to a "blackhole" folder that can be periodically scanned for taking action
|
||||
* Post-process downloaded episodes into customisable layouts, with or without extra metadata
|
||||
* Advanced Failed Download Handling (FDH)
|
||||
* Overview of seasons, episodes, rating, version, airdate, episode status ([their meaning](https://github.com/SickGear/SickGear/wiki/Status-Modes))
|
||||
* Processing nzb/torrents with your downloader application at your chosen qualities
|
||||
* Subtitle management
|
||||
* Automatic and manual search for availability of episodes you want
|
||||
* Well known established sources are used to gather reliable show information
|
||||
* Known and user added alternative show names can be searched
|
||||
* Known and user added alternative numbered seasons and/or episodes can be searched
|
||||
* Forward search results directly to clients like NZBGet, SABNZBd, qBitTorrent, Deluge and others
|
||||
* Save search results to a "blackhole" folder, a place designated for clients to auto scan
|
||||
* Built-in post processing of episodes, with renaming to custom folder and/or file names
|
||||
* Fetch metadata like fanart, poster and banner images, nfo's for clients like Kodi, Plex and many more
|
||||
* Native advanced automated handling of failed downloads to ensure a success after failure
|
||||
* Overview of episodes, rating, version, airdate, episode status ([their meaning](https://github.com/SickGear/SickGear/wiki/Status-Modes))
|
||||
* Support for specials and multi episode media files
|
||||
* Automated subtitle management
|
||||
* Notification
|
||||
* Home Theater/NAS (Emby, Kodi, Plex, Syno, Tivo, and more)
|
||||
* Home Theater/NAS (Emby, Kodi, Plex, Syno, Tivo, and more) can be notified to update their library
|
||||
* Social notifiers (Trakt, Slack, Gitter, Discord, E-mail, and more)
|
||||
* Device notifiers (Boxcar2, Notify My Android, Growl, Prowl, and more)
|
||||
* Server friendly with minimal number of calls (e.g. one request per chosen snatch, not per result)
|
||||
* Can recommend trendy and/or personally tailored shows from Trakt, IMDb, AniDB
|
||||
* Server friendly with minimal number of API calls using both active and passive search tech
|
||||
* Automated alternative show names and episode numbering from XEM
|
||||
|
||||
Some of our innovative features;
|
||||
* Automated search after adding a show
|
||||
* Desktop notifications
|
||||
* Enhanced Anime features when adding shows
|
||||
* Visual percentage progress of managed episodes
|
||||
* Separate Plex server and Plex client settings
|
||||
* Intelligent library updates that target Plex servers that list the show of an episode
|
||||
* Configurable episode status for removed media files
|
||||
* Configurable default home page
|
||||
* Source providers
|
||||
* User Interface
|
||||
|
||||
## Screenies
|
||||
<table><thead></thead><tbody>
|
||||
<tr align="center">
|
||||
|
|
|
@ -37,12 +37,14 @@ sys.path.insert(1, os.path.abspath('../lib'))
|
|||
from sickbeard import helpers, encodingKludge as ek
|
||||
from sickbeard import db, image_cache, logger, naming, metadata, providers, scene_exceptions, scene_numbering, \
|
||||
scheduler, auto_post_processer, search_queue, search_propers, search_recent, search_backlog, \
|
||||
show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes, properFinder
|
||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, minimax
|
||||
show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes, properFinder, \
|
||||
watchedstate_queue
|
||||
from sickbeard.config import check_section, check_setting_int, check_setting_str, ConfigMigrator, minimax
|
||||
from sickbeard.common import SD, SKIPPED
|
||||
from sickbeard.databases import mainDB, cache_db, failed_db
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard.providers.generic import GenericProvider
|
||||
from sickbeard.watchedstate import EmbyWatchedStateUpdater, PlexWatchedStateUpdater
|
||||
from indexers.indexer_config import INDEXER_TVDB
|
||||
from indexers.indexer_api import indexerApi
|
||||
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, \
|
||||
|
@ -85,6 +87,9 @@ autoPostProcesserScheduler = None
|
|||
subtitlesFinderScheduler = None
|
||||
# traktCheckerScheduler = None
|
||||
background_mapping_task = None
|
||||
embyWatchedStateScheduler = None
|
||||
plexWatchedStateScheduler = None
|
||||
watchedStateQueueScheduler = None
|
||||
|
||||
provider_ping_thread_pool = {}
|
||||
|
||||
|
@ -226,12 +231,15 @@ DEFAULT_AUTOPOSTPROCESSER_FREQUENCY = 10
|
|||
DEFAULT_RECENTSEARCH_FREQUENCY = 40
|
||||
DEFAULT_BACKLOG_FREQUENCY = 21
|
||||
DEFAULT_UPDATE_FREQUENCY = 1
|
||||
DEFAULT_WATCHEDSTATE_FREQUENCY = 10
|
||||
|
||||
MIN_AUTOPOSTPROCESSER_FREQUENCY = 1
|
||||
MIN_RECENTSEARCH_FREQUENCY = 10
|
||||
MIN_BACKLOG_FREQUENCY = 7
|
||||
MAX_BACKLOG_FREQUENCY = 42
|
||||
MIN_UPDATE_FREQUENCY = 1
|
||||
MIN_WATCHEDSTATE_FREQUENCY = 10
|
||||
MAX_WATCHEDSTATE_FREQUENCY = 60
|
||||
|
||||
BACKLOG_DAYS = 7
|
||||
SEARCH_UNAIRED = False
|
||||
|
@ -279,6 +287,8 @@ USE_EMBY = False
|
|||
EMBY_UPDATE_LIBRARY = False
|
||||
EMBY_HOST = None
|
||||
EMBY_APIKEY = None
|
||||
EMBY_WATCHEDSTATE_SCHEDULED = False
|
||||
EMBY_WATCHEDSTATE_FREQUENCY = None
|
||||
|
||||
USE_KODI = False
|
||||
KODI_ALWAYS_ON = True
|
||||
|
@ -301,6 +311,8 @@ PLEX_SERVER_HOST = None
|
|||
PLEX_HOST = None
|
||||
PLEX_USERNAME = None
|
||||
PLEX_PASSWORD = None
|
||||
PLEX_WATCHEDSTATE_SCHEDULED = False
|
||||
PLEX_WATCHEDSTATE_FREQUENCY = None
|
||||
|
||||
USE_XBMC = False
|
||||
XBMC_ALWAYS_ON = True
|
||||
|
@ -566,8 +578,10 @@ def initialize(console_logging=True):
|
|||
# global traktCheckerScheduler
|
||||
global recentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, \
|
||||
versionCheckScheduler, showQueueScheduler, searchQueueScheduler, \
|
||||
properFinderScheduler, autoPostProcesserScheduler, subtitlesFinderScheduler, background_mapping_task, \
|
||||
provider_ping_thread_pool
|
||||
properFinderScheduler, autoPostProcesserScheduler, subtitlesFinderScheduler, \
|
||||
background_mapping_task, provider_ping_thread_pool, \
|
||||
embyWatchedStateScheduler, plexWatchedStateScheduler, watchedStateQueueScheduler, \
|
||||
MIN_WATCHEDSTATE_FREQUENCY, MAX_WATCHEDSTATE_FREQUENCY, DEFAULT_WATCHEDSTATE_FREQUENCY
|
||||
# Add Show Search
|
||||
global RESULTS_SORTBY
|
||||
# Add Show Defaults
|
||||
|
@ -626,6 +640,7 @@ def initialize(console_logging=True):
|
|||
METADATA_PS3, METADATA_TIVO, METADATA_WDTV, METADATA_XBMC_12PLUS
|
||||
# Notification Settings/HT and NAS
|
||||
global USE_EMBY, EMBY_UPDATE_LIBRARY, EMBY_HOST, EMBY_APIKEY, \
|
||||
EMBY_WATCHEDSTATE_SCHEDULED, EMBY_WATCHEDSTATE_FREQUENCY, \
|
||||
USE_KODI, KODI_ALWAYS_ON, KODI_UPDATE_LIBRARY, KODI_UPDATE_FULL, KODI_UPDATE_ONLYFIRST, \
|
||||
KODI_HOST, KODI_USERNAME, KODI_PASSWORD, KODI_NOTIFY_ONSNATCH, \
|
||||
KODI_NOTIFY_ONDOWNLOAD, KODI_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
||||
|
@ -633,6 +648,7 @@ def initialize(console_logging=True):
|
|||
XBMC_UPDATE_LIBRARY, XBMC_UPDATE_FULL, XBMC_UPDATE_ONLYFIRST, XBMC_HOST, XBMC_USERNAME, XBMC_PASSWORD, \
|
||||
USE_PLEX, PLEX_USERNAME, PLEX_PASSWORD, PLEX_UPDATE_LIBRARY, PLEX_SERVER_HOST, \
|
||||
PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_HOST, \
|
||||
PLEX_WATCHEDSTATE_SCHEDULED, PLEX_WATCHEDSTATE_FREQUENCY, \
|
||||
USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, \
|
||||
USE_NMJv2, NMJv2_HOST, NMJv2_DATABASE, NMJv2_DBLOC, \
|
||||
USE_SYNOINDEX, \
|
||||
|
@ -680,7 +696,7 @@ def initialize(console_logging=True):
|
|||
'Growl', 'Prowl', 'Twitter', 'Slack', 'Discordapp', 'Boxcar2', 'NMJ', 'NMJv2',
|
||||
'Synology', 'SynologyNotifier',
|
||||
'pyTivo', 'NMA', 'Pushalot', 'Pushbullet', 'Subtitles'):
|
||||
CheckSection(CFG, stanza)
|
||||
check_section(CFG, stanza)
|
||||
|
||||
update_config = False
|
||||
|
||||
|
@ -933,6 +949,10 @@ def initialize(console_logging=True):
|
|||
EMBY_UPDATE_LIBRARY = bool(check_setting_int(CFG, 'Emby', 'emby_update_library', 0))
|
||||
EMBY_HOST = check_setting_str(CFG, 'Emby', 'emby_host', '')
|
||||
EMBY_APIKEY = check_setting_str(CFG, 'Emby', 'emby_apikey', '')
|
||||
EMBY_WATCHEDSTATE_SCHEDULED = bool(check_setting_int(CFG, 'Emby', 'emby_watchedstate_scheduled', 0))
|
||||
EMBY_WATCHEDSTATE_FREQUENCY = minimax(check_setting_int(
|
||||
CFG, 'Emby', 'emby_watchedstate_frequency', DEFAULT_WATCHEDSTATE_FREQUENCY),
|
||||
DEFAULT_WATCHEDSTATE_FREQUENCY, MIN_WATCHEDSTATE_FREQUENCY, MAX_WATCHEDSTATE_FREQUENCY)
|
||||
|
||||
USE_KODI = bool(check_setting_int(CFG, 'Kodi', 'use_kodi', 0))
|
||||
KODI_ALWAYS_ON = bool(check_setting_int(CFG, 'Kodi', 'kodi_always_on', 1))
|
||||
|
@ -967,6 +987,10 @@ def initialize(console_logging=True):
|
|||
PLEX_HOST = check_setting_str(CFG, 'Plex', 'plex_host', '')
|
||||
PLEX_USERNAME = check_setting_str(CFG, 'Plex', 'plex_username', '')
|
||||
PLEX_PASSWORD = check_setting_str(CFG, 'Plex', 'plex_password', '')
|
||||
PLEX_WATCHEDSTATE_SCHEDULED = bool(check_setting_int(CFG, 'Plex', 'plex_watchedstate_scheduled', 0))
|
||||
PLEX_WATCHEDSTATE_FREQUENCY = minimax(check_setting_int(
|
||||
CFG, 'Plex', 'plex_watchedstate_frequency', DEFAULT_WATCHEDSTATE_FREQUENCY),
|
||||
DEFAULT_WATCHEDSTATE_FREQUENCY, MIN_WATCHEDSTATE_FREQUENCY, MAX_WATCHEDSTATE_FREQUENCY)
|
||||
|
||||
USE_GROWL = bool(check_setting_int(CFG, 'Growl', 'use_growl', 0))
|
||||
GROWL_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Growl', 'growl_notify_onsnatch', 0))
|
||||
|
@ -1049,7 +1073,6 @@ def initialize(console_logging=True):
|
|||
TRAKT_ACCOUNTS = TraktAPI.read_config_string(check_setting_str(CFG, 'Trakt', 'trakt_accounts', ''))
|
||||
TRAKT_MRU = check_setting_str(CFG, 'Trakt', 'trakt_mru', '')
|
||||
|
||||
CheckSection(CFG, 'pyTivo')
|
||||
USE_PYTIVO = bool(check_setting_int(CFG, 'pyTivo', 'use_pytivo', 0))
|
||||
PYTIVO_HOST = check_setting_str(CFG, 'pyTivo', 'pytivo_host', '')
|
||||
PYTIVO_SHARE_NAME = check_setting_str(CFG, 'pyTivo', 'pytivo_share_name', '')
|
||||
|
@ -1418,6 +1441,23 @@ def initialize(console_logging=True):
|
|||
|
||||
background_mapping_task = threading.Thread(name='LOAD-MAPPINGS', target=indexermapper.load_mapped_ids)
|
||||
|
||||
watchedStateQueueScheduler = scheduler.Scheduler(
|
||||
watchedstate_queue.WatchedStateQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName='WATCHEDSTATEQUEUE')
|
||||
|
||||
embyWatchedStateScheduler = scheduler.Scheduler(
|
||||
EmbyWatchedStateUpdater(),
|
||||
cycleTime=datetime.timedelta(minutes=EMBY_WATCHEDSTATE_FREQUENCY),
|
||||
run_delay=datetime.timedelta(minutes=5),
|
||||
threadName='EMBYWATCHEDSTATE')
|
||||
|
||||
plexWatchedStateScheduler = scheduler.Scheduler(
|
||||
PlexWatchedStateUpdater(),
|
||||
cycleTime=datetime.timedelta(minutes=PLEX_WATCHEDSTATE_FREQUENCY),
|
||||
run_delay=datetime.timedelta(minutes=5),
|
||||
threadName='PLEXWATCHEDSTATE')
|
||||
|
||||
__INITIALIZED__ = True
|
||||
return True
|
||||
|
||||
|
@ -1427,7 +1467,8 @@ def enabled_schedulers(is_init=False):
|
|||
for s in ([], [events])[is_init] + \
|
||||
[recentSearchScheduler, backlogSearchScheduler, showUpdateScheduler,
|
||||
versionCheckScheduler, showQueueScheduler, searchQueueScheduler, properFinderScheduler,
|
||||
autoPostProcesserScheduler, subtitlesFinderScheduler] + \
|
||||
autoPostProcesserScheduler, subtitlesFinderScheduler,
|
||||
embyWatchedStateScheduler, plexWatchedStateScheduler, watchedStateQueueScheduler] + \
|
||||
([events], [])[is_init]:
|
||||
yield s
|
||||
|
||||
|
@ -1737,6 +1778,8 @@ def save_config():
|
|||
new_config['Emby']['emby_update_library'] = int(EMBY_UPDATE_LIBRARY)
|
||||
new_config['Emby']['emby_host'] = EMBY_HOST
|
||||
new_config['Emby']['emby_apikey'] = EMBY_APIKEY
|
||||
new_config['Emby']['emby_watchedstate_scheduled'] = int(EMBY_WATCHEDSTATE_SCHEDULED)
|
||||
new_config['Emby']['emby_watchedstate_frequency'] = int(EMBY_WATCHEDSTATE_FREQUENCY)
|
||||
|
||||
new_config['Kodi'] = {}
|
||||
new_config['Kodi']['use_kodi'] = int(USE_KODI)
|
||||
|
@ -1761,6 +1804,8 @@ def save_config():
|
|||
new_config['Plex']['plex_notify_ondownload'] = int(PLEX_NOTIFY_ONDOWNLOAD)
|
||||
new_config['Plex']['plex_notify_onsubtitledownload'] = int(PLEX_NOTIFY_ONSUBTITLEDOWNLOAD)
|
||||
new_config['Plex']['plex_host'] = PLEX_HOST
|
||||
new_config['Plex']['plex_watchedstate_scheduled'] = int(PLEX_WATCHEDSTATE_SCHEDULED)
|
||||
new_config['Plex']['plex_watchedstate_frequency'] = int(PLEX_WATCHEDSTATE_FREQUENCY)
|
||||
|
||||
new_config['XBMC'] = {}
|
||||
new_config['XBMC']['use_xbmc'] = int(USE_XBMC)
|
||||
|
|
|
@ -23,21 +23,22 @@ from sickbeard import logger, processTV
|
|||
from sickbeard import encodingKludge as ek
|
||||
|
||||
|
||||
class PostProcesser():
|
||||
class PostProcesser:
|
||||
def __init__(self):
|
||||
self.amActive = False
|
||||
|
||||
@staticmethod
|
||||
def check_paused():
|
||||
if sickbeard.PROCESS_AUTOMATICALLY:
|
||||
return False
|
||||
return True
|
||||
def is_enabled():
|
||||
return sickbeard.PROCESS_AUTOMATICALLY
|
||||
|
||||
def run(self):
|
||||
if not sickbeard.PROCESS_AUTOMATICALLY:
|
||||
return
|
||||
|
||||
if self.is_enabled():
|
||||
self.amActive = True
|
||||
self._main()
|
||||
self.amActive = False
|
||||
|
||||
@staticmethod
|
||||
def _main():
|
||||
|
||||
if not ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
|
||||
logger.log(u"Automatic post-processing attempted but dir %s doesn't exist" % sickbeard.TV_DOWNLOAD_DIR,
|
||||
|
@ -50,5 +51,3 @@ class PostProcesser():
|
|||
return
|
||||
|
||||
processTV.processDir(sickbeard.TV_DOWNLOAD_DIR, is_basedir=True)
|
||||
|
||||
self.amActive = False
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
import re
|
||||
import datetime
|
||||
import os
|
||||
|
||||
import sickbeard
|
||||
from sickbeard.common import Quality
|
||||
|
@ -379,3 +380,48 @@ class ImageUrlList(list):
|
|||
if self._is_cache_item(x) and url == x[0]:
|
||||
super(ImageUrlList, self).remove(x)
|
||||
break
|
||||
|
||||
|
||||
if 'nt' == os.name:
|
||||
import ctypes
|
||||
|
||||
class WinEnv:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_environment_variable(name):
|
||||
name = unicode(name) # ensures string argument is unicode
|
||||
n = ctypes.windll.kernel32.GetEnvironmentVariableW(name, None, 0)
|
||||
result = None
|
||||
if n:
|
||||
buf = ctypes.create_unicode_buffer(u'\0'*n)
|
||||
ctypes.windll.kernel32.GetEnvironmentVariableW(name, buf, n)
|
||||
result = buf.value
|
||||
return result
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.get_environment_variable(key)
|
||||
|
||||
def get(self, key, default=None):
|
||||
r = self.get_environment_variable(key)
|
||||
return r if r is not None else default
|
||||
|
||||
sickbeard.ENV = WinEnv()
|
||||
else:
|
||||
class LinuxEnv(object):
|
||||
def __init__(self, environ):
|
||||
self.environ = environ
|
||||
|
||||
def __getitem__(self, key):
|
||||
v = self.environ.get(key)
|
||||
try:
|
||||
return v.decode(SYS_ENCODING) if isinstance(v, str) else v
|
||||
except (UnicodeDecodeError, UnicodeEncodeError):
|
||||
return v
|
||||
|
||||
def get(self, key, default=None):
|
||||
v = self[key]
|
||||
return v if v is not None else default
|
||||
|
||||
sickbeard.ENV = LinuxEnv(os.environ)
|
||||
|
|
BIN
sickbeard/clients/kodi/repository.sickgear/icon.png
Normal file
After Width: | Height: | Size: 58 KiB |
|
@ -0,0 +1,22 @@
|
|||
# /tests/_devenv.py
|
||||
#
|
||||
# To trigger dev env
|
||||
#
|
||||
# import _devenv as devenv
|
||||
#
|
||||
|
||||
__remotedebug__ = True
|
||||
|
||||
if __remotedebug__:
|
||||
import sys
|
||||
sys.path.append('C:\Program Files\JetBrains\PyCharm 2017.2.1\debug-eggs\pycharm-debug.egg')
|
||||
import pydevd
|
||||
|
||||
|
||||
def setup_devenv(state):
|
||||
pydevd.settrace('localhost', port=(65001, 65000)[bool(state)], stdoutToServer=True, stderrToServer=True,
|
||||
suspend=False)
|
||||
|
||||
|
||||
def stop():
|
||||
pydevd.stoptrace()
|
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<addon id="service.sickgear.watchedstate.updater" name="SickGear Watched State Updater" version="1.0.3" provider-name="SickGear">
|
||||
<requires>
|
||||
<import addon="xbmc.python" version="2.19.0" />
|
||||
<import addon="xbmc.json" version="6.20.0" />
|
||||
<import addon="xbmc.addon" version="14.0.0" />
|
||||
</requires>
|
||||
<extension point="xbmc.service" library="service.py" start="login" />
|
||||
<extension point="xbmc.python.pluginsource" library="service.py" >
|
||||
<provides>executable</provides>
|
||||
</extension>
|
||||
<extension point="xbmc.addon.metadata">
|
||||
<summary lang="en">SickGear Watched State Updater</summary>
|
||||
<description lang="en">This Add-on notifies SickGear when an episode watched state is changed in Kodi</description>
|
||||
<platform>all</platform>
|
||||
<language>en</language>
|
||||
<disclaimer/>
|
||||
<license/>
|
||||
<forum/>
|
||||
<website>https://github.com/sickgear/sickgear</website>
|
||||
<email/>
|
||||
<nofanart>true</nofanart>
|
||||
<source>https://github.com/sickgear/sickgear</source>
|
||||
<assets>
|
||||
<icon>icon.png</icon>
|
||||
</assets>
|
||||
<news>[B]1.0.0[/B] (2017-10-04)
|
||||
- Initial release
|
||||
[B]1.0.2[/B] (2017-11-15)
|
||||
- Devel release for an SG API change
|
||||
[B]1.0.3[/B] (2018-02-28)
|
||||
- Add episodeid to payload
|
||||
</news>
|
||||
</extension>
|
||||
</addon>
|
|
@ -0,0 +1,2 @@
|
|||
[B]1.0.0[/B] (2017-10-04)
|
||||
- Initial release
|
After Width: | Height: | Size: 311 KiB |
After Width: | Height: | Size: 29 KiB |
After Width: | Height: | Size: 2.3 KiB |
After Width: | Height: | Size: 13 KiB |
After Width: | Height: | Size: 2.3 KiB |
After Width: | Height: | Size: 14 KiB |
After Width: | Height: | Size: 2.2 KiB |
After Width: | Height: | Size: 34 KiB |
|
@ -0,0 +1,18 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<strings>
|
||||
<string id="32000">General</string>
|
||||
<string id="32011">Action Notifications</string>
|
||||
<string id="32012">Error Notifications</string>
|
||||
<string id="32021">Verbose Logs</string>
|
||||
|
||||
<string id="32100">Servers</string>
|
||||
<string id="32111">SickGear IP</string>
|
||||
<string id="32112">SickGear Port</string>
|
||||
<string id="32121">Kodi IP</string>
|
||||
<string id="32122">Kodi JSON RPC Port</string>
|
||||
|
||||
<string id="32500">The following required Kodi settings should already be enabled:</string>
|
||||
<string id="32511">At "System / Service(s) settings / Control (aka Remote control)"</string>
|
||||
<string id="32512">* Allow remote control from/by applications/programs on this system</string>
|
||||
<string id="32513">* Allow remote control from/by applications/programs on other systems</string>
|
||||
</strings>
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<settings>
|
||||
<category label="32000">
|
||||
<setting label="32011" type="bool" id="action_notification" default="true" />
|
||||
<setting label="32012" type="bool" id="error_notification" default="true" />
|
||||
<setting label="32021" type="bool" id="verbose_log" default="true" />
|
||||
|
||||
<setting label="32500" type="lsep" />
|
||||
<setting label="32511" type="lsep" />
|
||||
<setting label="32512" type="lsep" />
|
||||
<setting label="32513" type="lsep" />
|
||||
</category>
|
||||
|
||||
<category label="32100">
|
||||
<setting label="32111" type="ipaddress" id="sickgear_ip" default="127.0.0.1" />
|
||||
<setting label="32112" type="number" id="sickgear_port" default="8081" />
|
||||
|
||||
<setting label="32121" type="ipaddress" id="kodi_ip" default="127.0.0.1" />
|
||||
<setting label="32122" type="number" id="kodi_port" default="9090" />
|
||||
</category>
|
||||
</settings>
|
|
@ -0,0 +1,361 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
try:
|
||||
import json as json
|
||||
except (StandardError, Exception):
|
||||
import simplejson as json
|
||||
from os import path, sep
|
||||
import datetime
|
||||
import socket
|
||||
import time
|
||||
import traceback
|
||||
import urllib
|
||||
import urllib2
|
||||
import xbmc
|
||||
import xbmcaddon
|
||||
import xbmcgui
|
||||
import xbmcvfs
|
||||
|
||||
|
||||
class SickGearWatchedStateUpdater:
|
||||
|
||||
def __init__(self):
|
||||
self.wait_onstartup = 4000
|
||||
|
||||
icon_size = '%s'
|
||||
try:
|
||||
if 1350 > xbmcgui.Window.getWidth(xbmcgui.Window()):
|
||||
icon_size += '-sm'
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
icon = 'special://home/addons/service.sickgear.watchedstate.updater/resources/icon-%s.png' % icon_size
|
||||
|
||||
self.addon = xbmcaddon.Addon()
|
||||
self.red_logo = icon % 'red'
|
||||
self.green_logo = icon % 'green'
|
||||
self.black_logo = icon % 'black'
|
||||
self.addon_name = self.addon.getAddonInfo('name')
|
||||
self.kodi_ip = self.addon.getSetting('kodi_ip')
|
||||
self.kodi_port = int(self.addon.getSetting('kodi_port'))
|
||||
|
||||
self.kodi_events = None
|
||||
self.sock_kodi = None
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Main start
|
||||
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
|
||||
if not self.enable_kodi_allow_remote():
|
||||
return
|
||||
|
||||
self.sock_kodi = socket.socket()
|
||||
self.sock_kodi.setblocking(True)
|
||||
xbmc.sleep(self.wait_onstartup)
|
||||
try:
|
||||
self.sock_kodi.connect((self.kodi_ip, self.kodi_port))
|
||||
except (StandardError, Exception) as e:
|
||||
return self.report_contact_fail(e)
|
||||
|
||||
self.log('Started')
|
||||
self.notify('Started in background')
|
||||
|
||||
self.kodi_events = xbmc.Monitor()
|
||||
|
||||
sock_buffer, depth, methods, method = '', 0, {'VideoLibrary.OnUpdate': self.video_library_on_update}, None
|
||||
|
||||
# socks listener parsing Kodi json output into action to perform
|
||||
while not self.kodi_events.abortRequested():
|
||||
chunk = self.sock_kodi.recv(1)
|
||||
sock_buffer += chunk
|
||||
if chunk in '{}':
|
||||
if '{' == chunk:
|
||||
depth += 1
|
||||
else:
|
||||
depth -= 1
|
||||
if not depth:
|
||||
json_msg = json.loads(sock_buffer)
|
||||
try:
|
||||
method = json_msg.get('method')
|
||||
method_handler = methods[method]
|
||||
method_handler(json_msg)
|
||||
except KeyError:
|
||||
if 'System.OnQuit' == method:
|
||||
break
|
||||
if __dev__:
|
||||
self.log('pass on event: %s' % json_msg.get('method'))
|
||||
|
||||
sock_buffer = ''
|
||||
|
||||
self.sock_kodi.close()
|
||||
del self.kodi_events
|
||||
self.log('Stopped')
|
||||
|
||||
def is_enabled(self, name):
|
||||
"""
|
||||
Return state of an Add-on setting as Boolean
|
||||
|
||||
:param name: Name of Addon setting
|
||||
:type name: String
|
||||
:return: Success as True if addon setting is enabled, else False
|
||||
:rtype: Bool
|
||||
"""
|
||||
return 'true' == self.addon.getSetting(name)
|
||||
|
||||
def log(self, msg, error=False):
|
||||
"""
|
||||
Add a message to the Kodi logging system (provided setting allows it)
|
||||
|
||||
:param msg: Text to add to log file
|
||||
:type msg: String
|
||||
:param error: Specify whether text indicates an error or action
|
||||
:type error: Boolean
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
if self.is_enabled('verbose_log'):
|
||||
xbmc.log('[%s]:: %s' % (self.addon_name, msg), (xbmc.LOGNOTICE, xbmc.LOGERROR)[error])
|
||||
|
||||
def notify(self, msg, period=4, error=None):
|
||||
"""
|
||||
Invoke the Kodi onscreen notification panel with a message (provided setting allows it)
|
||||
|
||||
:param msg: Text to display in panel
|
||||
:type msg: String
|
||||
:param period: Wait seconds before closing dialog
|
||||
:type period: Integer
|
||||
:param error: Specify whether text indicates an error or action
|
||||
:type error: Boolean
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
if not error and self.is_enabled('action_notification') or (error and self.is_enabled('error_notification')):
|
||||
xbmc.executebuiltin('Notification(%s, "%s", %s, %s)' % (
|
||||
self.addon_name, msg, 1000 * period,
|
||||
((self.green_logo, self.red_logo)[any([error])], self.black_logo)[None is error]))
|
||||
|
||||
@staticmethod
|
||||
def ex(e):
|
||||
return '\n'.join(['\nEXCEPTION Raised: --> Python callback/script returned the following error <--',
|
||||
'Error type: <type \'{0}\'>',
|
||||
'Error content: {1!r}',
|
||||
'{2}',
|
||||
'--> End of Python script error report <--\n'
|
||||
]).format(type(e).__name__, e.args, traceback.format_exc())
|
||||
|
||||
def report_contact_fail(self, e):
|
||||
msg = 'Failed to contact Kodi at %s:%s' % (self.kodi_ip, self.kodi_port)
|
||||
self.log('%s %s' % (msg, self.ex(e)), error=True)
|
||||
self.notify(msg, period=20, error=True)
|
||||
|
||||
def kodi_request(self, params):
|
||||
params.update(dict(jsonrpc='2.0', id='SickGear'))
|
||||
try:
|
||||
response = xbmc.executeJSONRPC(json.dumps(params))
|
||||
except (StandardError, Exception) as e:
|
||||
return self.report_contact_fail(e)
|
||||
try:
|
||||
return json.loads(response)
|
||||
except UnicodeDecodeError:
|
||||
return json.loads(response.decode('utf-8', 'ignore'))
|
||||
|
||||
def video_library_on_update(self, json_msg):
|
||||
"""
|
||||
Actions to perform for: Kodi Notifications / VideoLibrary/ VideoLibrary.OnUpdate
|
||||
invoked in Kodi when: A video item has been updated
|
||||
source: http://kodi.wiki/view/JSON-RPC_API/v8#VideoLibrary.OnUpdate
|
||||
|
||||
:param json_msg: A JSON parsed from socks
|
||||
:type json_msg: String
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
try:
|
||||
# note: this is called multiple times when a season is marked as un-/watched
|
||||
if 'episode' == json_msg['params']['data']['item']['type']:
|
||||
media_id = json_msg['params']['data']['item']['id']
|
||||
play_count = json_msg['params']['data']['playcount']
|
||||
|
||||
json_resp = self.kodi_request(dict(
|
||||
method='Profiles.GetCurrentProfile'))
|
||||
current_profile = json_resp['result']['label']
|
||||
|
||||
json_resp = self.kodi_request(dict(
|
||||
method='VideoLibrary.GetEpisodeDetails',
|
||||
params=dict(episodeid=media_id, properties=['file'])))
|
||||
path_file = json_resp['result']['episodedetails']['file'].encode('utf-8')
|
||||
|
||||
self.update_sickgear(media_id, path_file, play_count, current_profile)
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
def update_sickgear(self, media_id, path_file, play_count, profile):
|
||||
|
||||
self.notify('Update sent to SickGear')
|
||||
|
||||
url = 'http://%s:%s/update_watched_state_kodi/' % (
|
||||
self.addon.getSetting('sickgear_ip'), self.addon.getSetting('sickgear_port'))
|
||||
self.log('Notify state to %s with path_file=%s' % (url, path_file))
|
||||
|
||||
msg_bad = 'Failed to contact SickGear on port %s at %s' % (
|
||||
self.addon.getSetting('sickgear_port'), self.addon.getSetting('sickgear_ip'))
|
||||
|
||||
payload_json = self.payload_prep(dict(media_id=media_id, path_file=path_file, played=play_count, label=profile))
|
||||
if payload_json:
|
||||
payload = urllib.urlencode(dict(payload=payload_json))
|
||||
try:
|
||||
rq = urllib2.Request(url, data=payload)
|
||||
r = urllib2.urlopen(rq)
|
||||
response = json.load(r)
|
||||
r.close()
|
||||
if 'OK' == r.msg:
|
||||
self.payload_prep(response)
|
||||
if not all(response.values()):
|
||||
msg = 'Success, watched state updated'
|
||||
else:
|
||||
msg = 'Success, %s/%s watched stated updated' % (
|
||||
len([v for v in response.values() if v]), len(response.values()))
|
||||
self.log(msg)
|
||||
self.notify(msg, error=False)
|
||||
else:
|
||||
msg_bad = 'Failed to update watched state'
|
||||
self.log(msg_bad)
|
||||
self.notify(msg_bad, error=True)
|
||||
except (urllib2.URLError, IOError) as e:
|
||||
self.log(u'Couldn\'t contact SickGear %s' % self.ex(e), error=True)
|
||||
self.notify(msg_bad, error=True, period=15)
|
||||
except (StandardError, Exception) as e:
|
||||
self.log(u'Couldn\'t contact SickGear %s' % self.ex(e), error=True)
|
||||
self.notify(msg_bad, error=True, period=15)
|
||||
|
||||
@staticmethod
|
||||
def payload_prep(payload):
|
||||
|
||||
name = 'sickgear_buffer.txt'
|
||||
# try to locate /temp at parent location
|
||||
path_temp = path.join(path.dirname(path.dirname(path.realpath(__file__))), 'temp')
|
||||
path_data = path.join(path_temp, name)
|
||||
|
||||
data_pool = {}
|
||||
if xbmcvfs.exists(path_data):
|
||||
fh = None
|
||||
try:
|
||||
fh = xbmcvfs.File(path_data)
|
||||
data_pool = json.load(fh)
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
fh and fh.close()
|
||||
|
||||
temp_ok = True
|
||||
if not any([data_pool]):
|
||||
temp_ok = xbmcvfs.exists(path_temp) or xbmcvfs.exists(path.join(path_temp, sep))
|
||||
if not temp_ok:
|
||||
temp_ok = xbmcvfs.mkdirs(path_temp)
|
||||
|
||||
response_data = False
|
||||
for k, v in payload.items():
|
||||
if response_data or k in data_pool:
|
||||
response_data = True
|
||||
if not v:
|
||||
# whether no fail response or bad input, remove this from data
|
||||
data_pool.pop(k)
|
||||
elif isinstance(v, basestring):
|
||||
# error so retry next time
|
||||
continue
|
||||
if not response_data:
|
||||
ts_now = time.mktime(datetime.datetime.now().timetuple())
|
||||
timeout = 100
|
||||
while ts_now in data_pool and timeout:
|
||||
ts_now = time.mktime(datetime.datetime.now().timetuple())
|
||||
timeout -= 1
|
||||
|
||||
max_payload = 50-1
|
||||
for k in list(data_pool.keys())[max_payload:]:
|
||||
data_pool.pop(k)
|
||||
payload.update(dict(date_watched=ts_now))
|
||||
data_pool.update({ts_now: payload})
|
||||
|
||||
output = json.dumps(data_pool)
|
||||
if temp_ok:
|
||||
fh = None
|
||||
try:
|
||||
fh = xbmcvfs.File(path_data, 'w')
|
||||
fh.write(output)
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
fh and fh.close()
|
||||
|
||||
return output
|
||||
|
||||
def enable_kodi_allow_remote(self):
|
||||
try:
|
||||
# setting esenabled: allow remote control by programs on this system
|
||||
# setting esallinterfaces: allow remote control by programs on other systems
|
||||
settings = [dict(esenabled=True), dict(esallinterfaces=True)]
|
||||
for setting in settings:
|
||||
if not self.kodi_request(dict(
|
||||
method='Settings.SetSettingValue',
|
||||
params=dict(setting='services.%s' % setting.keys()[0], value=setting.values()[0])
|
||||
)).get('result', {}):
|
||||
settings[setting] = self.kodi_request(dict(
|
||||
method='Settings.GetSettingValue',
|
||||
params=dict(setting='services.%s' % setting.keys()[0])
|
||||
)).get('result', {}).get('value')
|
||||
except (StandardError, Exception):
|
||||
return
|
||||
|
||||
setting_states = [setting.values()[0] for setting in settings]
|
||||
if not all(setting_states):
|
||||
if not (any(setting_states)):
|
||||
msg = 'Please enable *all* Kodi settings to allow remote control by programs...'
|
||||
else:
|
||||
msg = 'Please enable Kodi setting to allow remote control by programs on other systems'
|
||||
msg = 'Failed startup. %s in system service/remote control' % msg
|
||||
self.log(msg, error=True)
|
||||
self.notify(msg, period=20, error=True)
|
||||
return
|
||||
return True
|
||||
|
||||
|
||||
__dev__ = True
|
||||
if __dev__:
|
||||
try:
|
||||
# noinspection PyProtectedMember
|
||||
import _devenv as devenv
|
||||
except ImportError:
|
||||
__dev__ = False
|
||||
|
||||
|
||||
if 1 < len(sys.argv):
|
||||
if __dev__:
|
||||
devenv.setup_devenv(False)
|
||||
if sys.argv[2].endswith('send_all'):
|
||||
print('>>>>>> TESTTESTTEST')
|
||||
|
||||
elif __name__ == '__main__':
|
||||
if __dev__:
|
||||
devenv.setup_devenv(True)
|
||||
WSU = SickGearWatchedStateUpdater()
|
||||
WSU.run()
|
||||
del WSU
|
||||
|
||||
if __dev__:
|
||||
devenv.stop()
|
|
@ -41,19 +41,6 @@ cpu_presets = {'DISABLED': 0, 'LOW': 0.01, 'NORMAL': 0.05, 'HIGH': 0.1}
|
|||
MULTI_EP_RESULT = -1
|
||||
SEASON_RESULT = -2
|
||||
|
||||
# Notification Types
|
||||
NOTIFY_SNATCH = 1
|
||||
NOTIFY_DOWNLOAD = 2
|
||||
NOTIFY_SUBTITLE_DOWNLOAD = 3
|
||||
NOTIFY_GIT_UPDATE = 4
|
||||
NOTIFY_GIT_UPDATE_TEXT = 5
|
||||
|
||||
notifyStrings = {NOTIFY_SNATCH: 'Started Download',
|
||||
NOTIFY_DOWNLOAD: 'Download Finished',
|
||||
NOTIFY_SUBTITLE_DOWNLOAD: 'Subtitle Download Finished',
|
||||
NOTIFY_GIT_UPDATE: 'SickGear Updated',
|
||||
NOTIFY_GIT_UPDATE_TEXT: 'SickGear Updated To Commit#: '}
|
||||
|
||||
# Episode statuses
|
||||
UNKNOWN = -1 # should never happen
|
||||
UNAIRED = 1 # episodes that haven't aired yet
|
||||
|
|
|
@ -49,7 +49,7 @@ naming_sep_type = (' - ', ' ')
|
|||
naming_sep_type_text = (' - ', 'space')
|
||||
|
||||
|
||||
def change_HTTPS_CERT(https_cert):
|
||||
def change_https_cert(https_cert):
|
||||
if https_cert == '':
|
||||
sickbeard.HTTPS_CERT = ''
|
||||
return True
|
||||
|
@ -64,7 +64,7 @@ def change_HTTPS_CERT(https_cert):
|
|||
return True
|
||||
|
||||
|
||||
def change_HTTPS_KEY(https_key):
|
||||
def change_https_key(https_key):
|
||||
if https_key == '':
|
||||
sickbeard.HTTPS_KEY = ''
|
||||
return True
|
||||
|
@ -79,7 +79,7 @@ def change_HTTPS_KEY(https_key):
|
|||
return True
|
||||
|
||||
|
||||
def change_LOG_DIR(log_dir, web_log):
|
||||
def change_log_dir(log_dir, web_log):
|
||||
log_dir_changed = False
|
||||
abs_log_dir = os.path.normpath(os.path.join(sickbeard.DATA_DIR, log_dir))
|
||||
web_log_value = checkbox_to_value(web_log)
|
||||
|
@ -102,7 +102,7 @@ def change_LOG_DIR(log_dir, web_log):
|
|||
return True
|
||||
|
||||
|
||||
def change_NZB_DIR(nzb_dir):
|
||||
def change_nzb_dir(nzb_dir):
|
||||
if nzb_dir == '':
|
||||
sickbeard.NZB_DIR = ''
|
||||
return True
|
||||
|
@ -117,7 +117,7 @@ def change_NZB_DIR(nzb_dir):
|
|||
return True
|
||||
|
||||
|
||||
def change_TORRENT_DIR(torrent_dir):
|
||||
def change_torrent_dir(torrent_dir):
|
||||
if torrent_dir == '':
|
||||
sickbeard.TORRENT_DIR = ''
|
||||
return True
|
||||
|
@ -132,7 +132,7 @@ def change_TORRENT_DIR(torrent_dir):
|
|||
return True
|
||||
|
||||
|
||||
def change_TV_DOWNLOAD_DIR(tv_download_dir):
|
||||
def change_tv_download_dir(tv_download_dir):
|
||||
if tv_download_dir == '':
|
||||
sickbeard.TV_DOWNLOAD_DIR = ''
|
||||
return True
|
||||
|
@ -147,16 +147,17 @@ def change_TV_DOWNLOAD_DIR(tv_download_dir):
|
|||
return True
|
||||
|
||||
|
||||
def change_AUTOPOSTPROCESSER_FREQUENCY(freq):
|
||||
def schedule_autopostprocesser(freq):
|
||||
sickbeard.AUTOPOSTPROCESSER_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_AUTOPOSTPROCESSER_FREQUENCY)
|
||||
|
||||
if sickbeard.AUTOPOSTPROCESSER_FREQUENCY < sickbeard.MIN_AUTOPOSTPROCESSER_FREQUENCY:
|
||||
sickbeard.AUTOPOSTPROCESSER_FREQUENCY = sickbeard.MIN_AUTOPOSTPROCESSER_FREQUENCY
|
||||
|
||||
sickbeard.autoPostProcesserScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.AUTOPOSTPROCESSER_FREQUENCY)
|
||||
sickbeard.autoPostProcesserScheduler.set_paused_state()
|
||||
|
||||
|
||||
def change_RECENTSEARCH_FREQUENCY(freq):
|
||||
def schedule_recentsearch(freq):
|
||||
sickbeard.RECENTSEARCH_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_RECENTSEARCH_FREQUENCY)
|
||||
|
||||
if sickbeard.RECENTSEARCH_FREQUENCY < sickbeard.MIN_RECENTSEARCH_FREQUENCY:
|
||||
|
@ -165,13 +166,14 @@ def change_RECENTSEARCH_FREQUENCY(freq):
|
|||
sickbeard.recentSearchScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.RECENTSEARCH_FREQUENCY)
|
||||
|
||||
|
||||
def change_BACKLOG_FREQUENCY(freq):
|
||||
sickbeard.BACKLOG_FREQUENCY = minimax(freq, sickbeard.DEFAULT_BACKLOG_FREQUENCY, sickbeard.MIN_BACKLOG_FREQUENCY, sickbeard.MAX_BACKLOG_FREQUENCY)
|
||||
def schedule_backlog(freq):
|
||||
sickbeard.BACKLOG_FREQUENCY = minimax(freq, sickbeard.DEFAULT_BACKLOG_FREQUENCY,
|
||||
sickbeard.MIN_BACKLOG_FREQUENCY, sickbeard.MAX_BACKLOG_FREQUENCY)
|
||||
|
||||
sickbeard.backlogSearchScheduler.action.cycleTime = sickbeard.BACKLOG_FREQUENCY
|
||||
|
||||
|
||||
def change_UPDATE_FREQUENCY(freq):
|
||||
def schedule_update(freq):
|
||||
sickbeard.UPDATE_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_UPDATE_FREQUENCY)
|
||||
|
||||
if sickbeard.UPDATE_FREQUENCY < sickbeard.MIN_UPDATE_FREQUENCY:
|
||||
|
@ -180,27 +182,25 @@ def change_UPDATE_FREQUENCY(freq):
|
|||
sickbeard.versionCheckScheduler.cycleTime = datetime.timedelta(hours=sickbeard.UPDATE_FREQUENCY)
|
||||
|
||||
|
||||
def change_VERSION_NOTIFY(version_notify):
|
||||
oldSetting = sickbeard.VERSION_NOTIFY
|
||||
def schedule_version_notify(version_notify):
|
||||
old_setting = sickbeard.VERSION_NOTIFY
|
||||
|
||||
sickbeard.VERSION_NOTIFY = version_notify
|
||||
|
||||
if not version_notify:
|
||||
sickbeard.NEWEST_VERSION_STRING = None
|
||||
|
||||
if not oldSetting and version_notify:
|
||||
if not old_setting and version_notify:
|
||||
sickbeard.versionCheckScheduler.action.run()
|
||||
|
||||
|
||||
def change_DOWNLOAD_PROPERS(download_propers):
|
||||
if sickbeard.DOWNLOAD_PROPERS == download_propers:
|
||||
return
|
||||
|
||||
def schedule_download_propers(download_propers):
|
||||
if sickbeard.DOWNLOAD_PROPERS != download_propers:
|
||||
sickbeard.DOWNLOAD_PROPERS = download_propers
|
||||
sickbeard.properFinderScheduler.check_paused()
|
||||
sickbeard.properFinderScheduler.set_paused_state()
|
||||
|
||||
|
||||
def change_USE_TRAKT(use_trakt):
|
||||
def schedule_trakt(use_trakt):
|
||||
if sickbeard.USE_TRAKT == use_trakt:
|
||||
return
|
||||
|
||||
|
@ -216,22 +216,40 @@ def change_USE_TRAKT(use_trakt):
|
|||
# pass
|
||||
|
||||
|
||||
def change_USE_SUBTITLES(use_subtitles):
|
||||
if sickbeard.USE_SUBTITLES == use_subtitles:
|
||||
return
|
||||
|
||||
def schedule_subtitles(use_subtitles):
|
||||
if sickbeard.USE_SUBTITLES != use_subtitles:
|
||||
sickbeard.USE_SUBTITLES = use_subtitles
|
||||
sickbeard.subtitlesFinderScheduler.check_paused()
|
||||
sickbeard.subtitlesFinderScheduler.set_paused_state()
|
||||
|
||||
|
||||
def CheckSection(CFG, sec):
|
||||
def schedule_emby_watched(emby_watched_interval):
|
||||
emby_watched_freq = minimax(emby_watched_interval, sickbeard.DEFAULT_WATCHEDSTATE_FREQUENCY,
|
||||
0, sickbeard.MAX_WATCHEDSTATE_FREQUENCY)
|
||||
if emby_watched_freq and emby_watched_freq != sickbeard.EMBY_WATCHEDSTATE_FREQUENCY:
|
||||
sickbeard.EMBY_WATCHEDSTATE_FREQUENCY = emby_watched_freq
|
||||
sickbeard.embyWatchedStateScheduler.cycleTime = datetime.timedelta(minutes=emby_watched_freq)
|
||||
|
||||
sickbeard.EMBY_WATCHEDSTATE_SCHEDULED = bool(emby_watched_freq)
|
||||
sickbeard.embyWatchedStateScheduler.set_paused_state()
|
||||
|
||||
|
||||
def schedule_plex_watched(plex_watched_interval):
|
||||
plex_watched_freq = minimax(plex_watched_interval, sickbeard.DEFAULT_WATCHEDSTATE_FREQUENCY,
|
||||
0, sickbeard.MAX_WATCHEDSTATE_FREQUENCY)
|
||||
if plex_watched_freq and plex_watched_freq != sickbeard.PLEX_WATCHEDSTATE_FREQUENCY:
|
||||
sickbeard.PLEX_WATCHEDSTATE_FREQUENCY = plex_watched_freq
|
||||
sickbeard.plexWatchedStateScheduler.cycleTime = datetime.timedelta(minutes=plex_watched_freq)
|
||||
|
||||
sickbeard.PLEX_WATCHEDSTATE_SCHEDULED = bool(plex_watched_freq)
|
||||
sickbeard.plexWatchedStateScheduler.set_paused_state()
|
||||
|
||||
|
||||
def check_section(cfg, section):
|
||||
""" Check if INI section exists, if not create it """
|
||||
try:
|
||||
CFG[sec]
|
||||
return True
|
||||
except:
|
||||
CFG[sec] = {}
|
||||
if section not in cfg:
|
||||
cfg[section] = {}
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def checkbox_to_value(option, value_on=1, value_off=0):
|
||||
|
@ -284,7 +302,7 @@ def clean_host(host, default_port=None):
|
|||
def clean_hosts(hosts, default_port=None):
|
||||
cleaned_hosts = []
|
||||
|
||||
for cur_host in [x.strip() for x in hosts.split(',')]:
|
||||
for cur_host in [host.strip() for host in hosts.split(',')]:
|
||||
if cur_host:
|
||||
cleaned_host = clean_host(cur_host, default_port)
|
||||
if cleaned_host:
|
||||
|
@ -329,7 +347,7 @@ def to_int(val, default=0):
|
|||
|
||||
try:
|
||||
val = int(val)
|
||||
except:
|
||||
except(StandardError, Exception):
|
||||
val = default
|
||||
|
||||
return val
|
||||
|
@ -351,11 +369,11 @@ def minimax(val, default, low, high):
|
|||
def check_setting_int(config, cfg_name, item_name, def_val):
|
||||
try:
|
||||
my_val = int(config[cfg_name][item_name])
|
||||
except:
|
||||
except(StandardError, Exception):
|
||||
my_val = def_val
|
||||
try:
|
||||
config[cfg_name][item_name] = my_val
|
||||
except:
|
||||
except(StandardError, Exception):
|
||||
config[cfg_name] = {}
|
||||
config[cfg_name][item_name] = my_val
|
||||
logger.log('%s -> %s' % (item_name, my_val), logger.DEBUG)
|
||||
|
@ -365,11 +383,11 @@ def check_setting_int(config, cfg_name, item_name, def_val):
|
|||
def check_setting_float(config, cfg_name, item_name, def_val):
|
||||
try:
|
||||
my_val = float(config[cfg_name][item_name])
|
||||
except:
|
||||
except(StandardError, Exception):
|
||||
my_val = def_val
|
||||
try:
|
||||
config[cfg_name][item_name] = my_val
|
||||
except:
|
||||
except(StandardError, Exception):
|
||||
config[cfg_name] = {}
|
||||
config[cfg_name][item_name] = my_val
|
||||
|
||||
|
@ -391,11 +409,11 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True):
|
|||
|
||||
try:
|
||||
my_val = helpers.decrypt(config[cfg_name][item_name], encryption_version)
|
||||
except:
|
||||
except(StandardError, Exception):
|
||||
my_val = def_val
|
||||
try:
|
||||
config[cfg_name][item_name] = helpers.encrypt(my_val, encryption_version)
|
||||
except:
|
||||
except(StandardError, Exception):
|
||||
config[cfg_name] = {}
|
||||
config[cfg_name][item_name] = helpers.encrypt(my_val, encryption_version)
|
||||
|
||||
|
@ -407,7 +425,7 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True):
|
|||
return (my_val, def_val)['None' == my_val]
|
||||
|
||||
|
||||
class ConfigMigrator():
|
||||
class ConfigMigrator:
|
||||
def __init__(self, config_obj):
|
||||
"""
|
||||
Initializes a config migrator that can take the config from the version indicated in the config
|
||||
|
@ -491,8 +509,8 @@ class ConfigMigrator():
|
|||
sickbeard.NAMING_MULTI_EP = int(check_setting_int(self.config_obj, 'General', 'naming_multi_ep_type', 1))
|
||||
|
||||
# see if any of their shows used season folders
|
||||
myDB = db.DBConnection()
|
||||
season_folder_shows = myDB.select('SELECT * FROM tv_shows WHERE flatten_folders = 0')
|
||||
my_db = db.DBConnection()
|
||||
season_folder_shows = my_db.select('SELECT * FROM tv_shows WHERE flatten_folders = 0')
|
||||
|
||||
# if any shows had season folders on then prepend season folder to the pattern
|
||||
if season_folder_shows:
|
||||
|
@ -518,7 +536,7 @@ class ConfigMigrator():
|
|||
logger.log(u'No shows were using season folders before so I am disabling flattening on all shows')
|
||||
|
||||
# don't flatten any shows at all
|
||||
myDB.action('UPDATE tv_shows SET flatten_folders = 0')
|
||||
my_db.action('UPDATE tv_shows SET flatten_folders = 0')
|
||||
|
||||
sickbeard.NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
|
||||
|
||||
|
@ -534,11 +552,11 @@ class ConfigMigrator():
|
|||
use_ep_name = bool(check_setting_int(self.config_obj, 'General', 'naming_ep_name', 1))
|
||||
|
||||
# make the presets into templates
|
||||
naming_ep_type = ('%Sx%0E',
|
||||
naming_ep_tmpl = ('%Sx%0E',
|
||||
's%0Se%0E',
|
||||
'S%0SE%0E',
|
||||
'%0Sx%0E')
|
||||
naming_sep_type = (' - ', ' ')
|
||||
naming_sep_tmpl = (' - ', ' ')
|
||||
|
||||
# set up our data to use
|
||||
if use_periods:
|
||||
|
@ -555,29 +573,29 @@ class ConfigMigrator():
|
|||
if abd:
|
||||
ep_string = abd_string
|
||||
else:
|
||||
ep_string = naming_ep_type[ep_type]
|
||||
ep_string = naming_ep_tmpl[ep_type]
|
||||
|
||||
finalName = ''
|
||||
final_name = ''
|
||||
|
||||
# start with the show name
|
||||
if use_show_name:
|
||||
finalName += show_name + naming_sep_type[sep_type]
|
||||
final_name += show_name + naming_sep_tmpl[sep_type]
|
||||
|
||||
# add the season/ep stuff
|
||||
finalName += ep_string
|
||||
final_name += ep_string
|
||||
|
||||
# add the episode name
|
||||
if use_ep_name:
|
||||
finalName += naming_sep_type[sep_type] + ep_name
|
||||
final_name += naming_sep_tmpl[sep_type] + ep_name
|
||||
|
||||
# add the quality
|
||||
if use_quality:
|
||||
finalName += naming_sep_type[sep_type] + ep_quality
|
||||
final_name += naming_sep_tmpl[sep_type] + ep_quality
|
||||
|
||||
if use_periods:
|
||||
finalName = re.sub('\s+', '.', finalName)
|
||||
final_name = re.sub('\s+', '.', final_name)
|
||||
|
||||
return finalName
|
||||
return final_name
|
||||
|
||||
# Migration v2: Dummy migration to sync backup number with config version number
|
||||
def _migrate_v2(self):
|
||||
|
@ -589,7 +607,8 @@ class ConfigMigrator():
|
|||
Reads in the old naming settings from your config and generates a new config template from them.
|
||||
"""
|
||||
# get the old settings from the file and store them in the new variable names
|
||||
for prov in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if curProvider.name == 'omgwtfnzbs']:
|
||||
for prov in [curProvider for curProvider in sickbeard.providers.sortedProviderList()
|
||||
if 'omgwtfnzbs' == curProvider.name]:
|
||||
prov.username = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_uid', '')
|
||||
prov.api_key = check_setting_str(self.config_obj, 'omgwtfnzbs', 'omgwtfnzbs_key', '')
|
||||
|
||||
|
@ -662,7 +681,7 @@ class ConfigMigrator():
|
|||
|
||||
use_banner = bool(check_setting_int(self.config_obj, 'General', 'use_banner', 0))
|
||||
|
||||
def _migrate_metadata(metadata, metadata_name, use_banner):
|
||||
def _migrate_metadata(metadata, metadata_name, banner):
|
||||
cur_metadata = metadata.split('|')
|
||||
# if target has the old number of values, do upgrade
|
||||
if len(cur_metadata) == 6:
|
||||
|
@ -673,9 +692,9 @@ class ConfigMigrator():
|
|||
cur_metadata.append('0')
|
||||
# swap show fanart, show poster
|
||||
cur_metadata[3], cur_metadata[2] = cur_metadata[2], cur_metadata[3]
|
||||
# if user was using use_banner to override the poster,
|
||||
# if user was using banner to override the poster,
|
||||
# instead enable the banner option and deactivate poster
|
||||
if metadata_name == 'XBMC' and use_banner:
|
||||
if 'XBMC' == metadata_name and banner:
|
||||
cur_metadata[4], cur_metadata[3] = cur_metadata[3], '0'
|
||||
# write new format
|
||||
metadata = '|'.join(cur_metadata)
|
||||
|
@ -723,7 +742,8 @@ class ConfigMigrator():
|
|||
check_setting_int(self.config_obj, 'GUI', 'coming_eps_display_paused', 0))
|
||||
sickbeard.EPISODE_VIEW_MISSED_RANGE = check_setting_int(self.config_obj, 'GUI', 'coming_eps_missed_range', 7)
|
||||
|
||||
def _migrate_v8(self):
|
||||
@staticmethod
|
||||
def _migrate_v8():
|
||||
# removing settings from gui and making it a hidden debug option
|
||||
sickbeard.RECENTSEARCH_STARTUP = False
|
||||
|
||||
|
@ -731,7 +751,8 @@ class ConfigMigrator():
|
|||
sickbeard.PUSHBULLET_ACCESS_TOKEN = check_setting_str(self.config_obj, 'Pushbullet', 'pushbullet_api', '')
|
||||
sickbeard.PUSHBULLET_DEVICE_IDEN = check_setting_str(self.config_obj, 'Pushbullet', 'pushbullet_device', '')
|
||||
|
||||
def _migrate_v10(self):
|
||||
@staticmethod
|
||||
def _migrate_v10():
|
||||
# reset backlog frequency to default
|
||||
sickbeard.BACKLOG_FREQUENCY = sickbeard.DEFAULT_BACKLOG_FREQUENCY
|
||||
|
||||
|
@ -741,9 +762,11 @@ class ConfigMigrator():
|
|||
else:
|
||||
sickbeard.SHOWLIST_TAGVIEW = 'default'
|
||||
|
||||
def _migrate_v12(self):
|
||||
@staticmethod
|
||||
def _migrate_v12():
|
||||
# add words to ignore list and insert spaces to improve the ui config readability
|
||||
words_to_add = ['hevc', 'reenc', 'x265', 'danish', 'deutsch', 'flemish', 'italian', 'nordic', 'norwegian', 'portuguese', 'spanish', 'turkish']
|
||||
words_to_add = ['hevc', 'reenc', 'x265', 'danish', 'deutsch', 'flemish', 'italian',
|
||||
'nordic', 'norwegian', 'portuguese', 'spanish', 'turkish']
|
||||
config_words = sickbeard.IGNORE_WORDS.split(',')
|
||||
new_list = []
|
||||
for new_word in words_to_add:
|
||||
|
@ -759,7 +782,8 @@ class ConfigMigrator():
|
|||
|
||||
sickbeard.IGNORE_WORDS = ', '.join(sorted(new_list))
|
||||
|
||||
def _migrate_v13(self):
|
||||
@staticmethod
|
||||
def _migrate_v13():
|
||||
# change dereferrer.org urls to blank, but leave any other url untouched
|
||||
if sickbeard.ANON_REDIRECT == 'http://dereferer.org/?':
|
||||
sickbeard.ANON_REDIRECT = ''
|
||||
|
|
|
@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek
|
|||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
|
||||
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
||||
MAX_DB_VERSION = 20008
|
||||
MAX_DB_VERSION = 20009
|
||||
TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
|
||||
|
||||
|
||||
|
@ -44,12 +44,13 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
def fix_duplicate_shows(self, column='indexer_id'):
|
||||
|
||||
sql_results = self.connection.select(
|
||||
'SELECT show_id, ' + column + ', COUNT(' + column + ') as count FROM tv_shows GROUP BY ' + column + ' HAVING count > 1')
|
||||
'SELECT show_id, %(col)s, COUNT(%(col)s) as count FROM tv_shows GROUP BY %(col)s HAVING count > 1'
|
||||
% {'col': column})
|
||||
|
||||
for cur_duplicate in sql_results:
|
||||
|
||||
logger.log(u'Duplicate show detected! %s: %s count: %s' % (column, cur_duplicate[column],
|
||||
cur_duplicate['count']), logger.DEBUG)
|
||||
logger.log(u'Duplicate show detected! %s: %s count: %s' % (
|
||||
column, cur_duplicate[column], cur_duplicate['count']), logger.DEBUG)
|
||||
|
||||
cur_dupe_results = self.connection.select(
|
||||
'SELECT show_id, ' + column + ' FROM tv_shows WHERE ' + column + ' = ? LIMIT ?',
|
||||
|
@ -58,9 +59,8 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
|
||||
cl = []
|
||||
for cur_dupe_id in cur_dupe_results:
|
||||
logger.log(
|
||||
u'Deleting duplicate show with %s: %s show_id: %s' % (column, cur_dupe_id[column],
|
||||
cur_dupe_id['show_id']))
|
||||
logger.log(u'Deleting duplicate show with %s: %s show_id: %s' % (
|
||||
column, cur_dupe_id[column], cur_dupe_id['show_id']))
|
||||
cl.append(['DELETE FROM tv_shows WHERE show_id = ?', [cur_dupe_id['show_id']]])
|
||||
|
||||
if 0 < len(cl):
|
||||
|
@ -72,7 +72,10 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
def fix_duplicate_episodes(self):
|
||||
|
||||
sql_results = self.connection.select(
|
||||
'SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1')
|
||||
'SELECT showid, season, episode, COUNT(showid) as count'
|
||||
' FROM tv_episodes'
|
||||
' GROUP BY showid, season, episode'
|
||||
' HAVING count > 1')
|
||||
|
||||
for cur_duplicate in sql_results:
|
||||
|
||||
|
@ -81,7 +84,10 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
cur_duplicate['count']), logger.DEBUG)
|
||||
|
||||
cur_dupe_results = self.connection.select(
|
||||
'SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?',
|
||||
'SELECT episode_id'
|
||||
' FROM tv_episodes'
|
||||
' WHERE showid = ? AND season = ? and episode = ?'
|
||||
' ORDER BY episode_id DESC LIMIT ?',
|
||||
[cur_duplicate['showid'], cur_duplicate['season'], cur_duplicate['episode'],
|
||||
int(cur_duplicate['count']) - 1]
|
||||
)
|
||||
|
@ -100,12 +106,15 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
def fix_orphan_episodes(self):
|
||||
|
||||
sql_results = self.connection.select(
|
||||
'SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL')
|
||||
'SELECT episode_id, showid, tv_shows.indexer_id'
|
||||
' FROM tv_episodes'
|
||||
' LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id'
|
||||
' WHERE tv_shows.indexer_id is NULL')
|
||||
|
||||
cl = []
|
||||
for cur_orphan in sql_results:
|
||||
logger.log(u'Orphan episode detected! episode_id: %s showid: %s' % (cur_orphan['episode_id'],
|
||||
cur_orphan['showid']), logger.DEBUG)
|
||||
logger.log(u'Orphan episode detected! episode_id: %s showid: %s' % (
|
||||
cur_orphan['episode_id'], cur_orphan['showid']), logger.DEBUG)
|
||||
logger.log(u'Deleting orphan episode with episode_id: %s' % cur_orphan['episode_id'])
|
||||
cl.append(['DELETE FROM tv_episodes WHERE episode_id = ?', [cur_orphan['episode_id']]])
|
||||
|
||||
|
@ -150,12 +159,12 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
|
||||
sql_results = self.connection.select(
|
||||
'SELECT episode_id, showid FROM tv_episodes WHERE status = ? or ( airdate > ? AND status in (?,?) ) or '
|
||||
'( airdate <= 1 AND status = ? )', ['', cur_date.toordinal(), common.SKIPPED, common.WANTED, common.WANTED])
|
||||
'(airdate <= 1 AND status = ? )', ['', cur_date.toordinal(), common.SKIPPED, common.WANTED, common.WANTED])
|
||||
|
||||
cl = []
|
||||
for cur_unaired in sql_results:
|
||||
logger.log(u'UNAIRED episode detected! episode_id: %s showid: %s' % (cur_unaired['episode_id'],
|
||||
cur_unaired['showid']), logger.DEBUG)
|
||||
logger.log(u'UNAIRED episode detected! episode_id: %s showid: %s' % (
|
||||
cur_unaired['episode_id'], cur_unaired['showid']), logger.DEBUG)
|
||||
logger.log(u'Fixing unaired episode status with episode_id: %s' % cur_unaired['episode_id'])
|
||||
cl.append(['UPDATE tv_episodes SET status = ? WHERE episode_id = ?',
|
||||
[common.UNAIRED, cur_unaired['episode_id']]])
|
||||
|
@ -176,42 +185,90 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
self.connection.action('UPDATE scene_exceptions SET season = -1 WHERE season = "null"')
|
||||
|
||||
def fix_orphan_not_found_show(self):
|
||||
sql_result = self.connection.action('DELETE FROM tv_shows_not_found WHERE NOT EXISTS (SELECT NULL FROM '
|
||||
'tv_shows WHERE tv_shows_not_found.indexer == tv_shows.indexer AND '
|
||||
'tv_shows_not_found.indexer_id == tv_shows.indexer_id)')
|
||||
sql_result = self.connection.action(
|
||||
'DELETE FROM tv_shows_not_found'
|
||||
' WHERE NOT EXISTS (SELECT NULL FROM tv_shows WHERE tv_shows_not_found.indexer == tv_shows.indexer AND'
|
||||
' tv_shows_not_found.indexer_id == tv_shows.indexer_id)')
|
||||
if sql_result.rowcount:
|
||||
logger.log('Fixed orphaned not found shows')
|
||||
|
||||
# ======================
|
||||
# = Main DB Migrations =
|
||||
# ======================
|
||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||
# 0 -> 20003
|
||||
|
||||
class InitialSchema(db.SchemaUpgrade):
|
||||
# ======================
|
||||
# = Main DB Migrations =
|
||||
# ======================
|
||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||
# 0 -> 20007
|
||||
def execute(self):
|
||||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
|
||||
if not self.hasTable('tv_shows') and not self.hasTable('db_version'):
|
||||
queries = [
|
||||
# original sick beard tables
|
||||
'CREATE TABLE db_version (db_version INTEGER);',
|
||||
'CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT, version NUMERIC)',
|
||||
'CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)',
|
||||
'CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, version NUMERIC, release_group TEXT, trakt_watched NUMERIC)',
|
||||
'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, overview TEXT, tag TEXT)',
|
||||
'CREATE INDEX idx_showid ON tv_episodes (showid)',
|
||||
'CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes (showid,airdate)',
|
||||
# blacklist
|
||||
'CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)',
|
||||
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC, PRIMARY KEY (indexer_id, indexer))',
|
||||
'CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)',
|
||||
'CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, absolute_number NUMERIC, scene_absolute_number NUMERIC, PRIMARY KEY (indexer_id, season, episode))',
|
||||
'CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT)',
|
||||
'CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)',
|
||||
# db_version
|
||||
'CREATE TABLE db_version (db_version INTEGER)',
|
||||
'INSERT INTO db_version (db_version) VALUES (20008)',
|
||||
# flags
|
||||
'CREATE TABLE flags (flag PRIMARY KEY NOT NULL)',
|
||||
# history
|
||||
'CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC,'
|
||||
' quality NUMERIC, resource TEXT, provider TEXT, version NUMERIC)',
|
||||
# imdb_info
|
||||
'CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC,'
|
||||
' akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT,'
|
||||
' rating TEXT, votes INTEGER, last_update NUMERIC)',
|
||||
# indexer_mapping
|
||||
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER NOT NULL,'
|
||||
' mindexer NUMERIC, date NUMERIC NOT NULL DEFAULT 0, status INTEGER NOT NULL DEFAULT 0,'
|
||||
' PRIMARY KEY (indexer_id, indexer, mindexer))',
|
||||
'CREATE INDEX idx_mapping ON indexer_mapping (indexer_id, indexer)',
|
||||
# info
|
||||
'CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC,'
|
||||
' last_run_backlog NUMERIC NOT NULL DEFAULT 1)',
|
||||
# scene_exceptions
|
||||
'CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY,'
|
||||
' show_name TEXT, season NUMERIC, custom NUMERIC)',
|
||||
# scene_exceptions_refresh
|
||||
'CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)',
|
||||
# scene_numbering
|
||||
'CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER,'
|
||||
' scene_season INTEGER, scene_episode INTEGER, absolute_number NUMERIC, scene_absolute_number NUMERIC,'
|
||||
' PRIMARY KEY (indexer_id, season, episode))',
|
||||
# tv_episodes
|
||||
'CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC,'
|
||||
' indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC,'
|
||||
' hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT,'
|
||||
' subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC,'
|
||||
' scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC,'
|
||||
' version NUMERIC, release_group TEXT, trakt_watched NUMERIC)',
|
||||
'CREATE INDEX idx_showid ON tv_episodes (showid)',
|
||||
'CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes (showid, airdate)',
|
||||
'CREATE INDEX idx_sta_epi_air ON tv_episodes (status, episode, airdate)',
|
||||
'CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, airdate)',
|
||||
'CREATE INDEX idx_status ON tv_episodes (status, season, episode, airdate)',
|
||||
# tv_episodes_watched
|
||||
'CREATE TABLE tv_episodes_watched (tvep_id NUMERIC NOT NULL, clientep_id TEXT, label TEXT,'
|
||||
' played NUMERIC DEFAULT 0 NOT NULL, date_watched NUMERIC NOT NULL, date_added NUMERIC,'
|
||||
' status NUMERIC, location TEXT, file_size NUMERIC, hide INT default 0 not null)',
|
||||
# tv_shows
|
||||
'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC,'
|
||||
' show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC,'
|
||||
' quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC,'
|
||||
' air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT,'
|
||||
' last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT,'
|
||||
' rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, overview TEXT, tag TEXT)',
|
||||
'CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id)',
|
||||
'CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate)',
|
||||
'CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate)',
|
||||
'CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate)',
|
||||
'INSERT INTO db_version (db_version) VALUES (20003)'
|
||||
# tv_shows_not_found
|
||||
'CREATE TABLE tv_shows_not_found (indexer NUMERIC NOT NULL, indexer_id NUMERIC NOT NULL,'
|
||||
' fail_count NUMERIC NOT NULL DEFAULT 0, last_check NUMERIC NOT NULL, last_success NUMERIC,'
|
||||
' PRIMARY KEY (indexer_id, indexer))',
|
||||
# webdl_types
|
||||
'CREATE TABLE webdl_types (dname TEXT NOT NULL, regex TEXT NOT NULL)',
|
||||
# whitelist
|
||||
'CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT)',
|
||||
# xem_refresh
|
||||
'CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)',
|
||||
]
|
||||
for query in queries:
|
||||
self.connection.action(query)
|
||||
|
@ -220,19 +277,21 @@ class InitialSchema(db.SchemaUpgrade):
|
|||
cur_db_version = self.checkDBVersion()
|
||||
|
||||
if cur_db_version < MIN_DB_VERSION:
|
||||
logger.log_error_and_exit(u'Your database version ('
|
||||
+ str(cur_db_version)
|
||||
logger.log_error_and_exit(
|
||||
u'Your database version (' + str(cur_db_version)
|
||||
+ ') is too old to migrate from what this version of SickGear supports ('
|
||||
+ str(MIN_DB_VERSION) + ').' + "\n"
|
||||
+ 'Upgrade using a previous version (tag) build 496 to build 501 of SickGear first or remove database file to begin fresh.'
|
||||
+ 'Upgrade using a previous version (tag) build 496 to build 501 of SickGear'
|
||||
' first or remove database file to begin fresh.'
|
||||
)
|
||||
|
||||
if cur_db_version > MAX_DB_VERSION:
|
||||
logger.log_error_and_exit(u'Your database version ('
|
||||
+ str(cur_db_version)
|
||||
logger.log_error_and_exit(
|
||||
u'Your database version (' + str(cur_db_version)
|
||||
+ ') has been incremented past what this version of SickGear supports ('
|
||||
+ str(MAX_DB_VERSION) + ').' + "\n"
|
||||
+ 'If you have used other forks of SickGear, your database may be unusable due to their modifications.'
|
||||
+ str(MAX_DB_VERSION) + ').\n'
|
||||
+ 'If you have used other forks of SickGear,'
|
||||
' your database may be unusable due to their modifications.'
|
||||
)
|
||||
|
||||
return self.checkDBVersion()
|
||||
|
@ -270,7 +329,9 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
|
|||
for cur_result in history_results:
|
||||
# find the associated download, if there isn't one then ignore it
|
||||
download_results = self.connection.select(
|
||||
'SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?',
|
||||
'SELECT resource'
|
||||
' FROM history'
|
||||
' WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?',
|
||||
[cur_result['showid'], cur_result['season'], cur_result['episode'], cur_result['date']])
|
||||
if not download_results:
|
||||
logger.log(u'Found a snatch in the history for ' + cur_result[
|
||||
|
@ -286,7 +347,9 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
|
|||
|
||||
# find the associated episode on disk
|
||||
ep_results = self.connection.select(
|
||||
'SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ""',
|
||||
'SELECT episode_id, status'
|
||||
' FROM tv_episodes'
|
||||
' WHERE showid = ? AND season = ? AND episode = ? AND location != ""',
|
||||
[cur_result['showid'], cur_result['season'], cur_result['episode']])
|
||||
if not ep_results:
|
||||
logger.log(
|
||||
|
@ -358,8 +421,14 @@ class RenameSeasonFolders(db.SchemaUpgrade):
|
|||
# rename the column
|
||||
self.connection.action('ALTER TABLE tv_shows RENAME TO tmp_tv_shows')
|
||||
self.connection.action(
|
||||
'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)')
|
||||
sql = 'INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang) SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows'
|
||||
'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC,'
|
||||
' network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT,'
|
||||
' flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT,'
|
||||
' air_by_date NUMERIC, lang TEXT)')
|
||||
sql = 'INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime,' \
|
||||
' quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang)' \
|
||||
' SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status,' \
|
||||
' seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows'
|
||||
self.connection.action(sql)
|
||||
|
||||
# flip the values to be opposite of what they were before
|
||||
|
@ -419,13 +488,13 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
|
|||
return result
|
||||
|
||||
def _update_composite_qualities(self, status):
|
||||
'''
|
||||
"""
|
||||
Unpack, Update, Return new quality values
|
||||
|
||||
Unpack the composite archive/initial values.
|
||||
Update either qualities if needed.
|
||||
Then return the new compsite quality value.
|
||||
'''
|
||||
"""
|
||||
|
||||
best = (status & (0xffff << 16)) >> 16
|
||||
initial = status & 0xffff
|
||||
|
@ -449,7 +518,8 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
|
|||
new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL,
|
||||
common.Quality.HDBLURAY], [])
|
||||
|
||||
# update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template
|
||||
# update ANY -- shift existing qualities and add new 1080p qualities,
|
||||
# note that rawHD was not added to the ANY template
|
||||
old_any = common.Quality.combineQualities(
|
||||
[common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2,
|
||||
common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], [])
|
||||
|
@ -472,7 +542,8 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
|
|||
cl.append(['UPDATE tv_shows SET quality = ? WHERE show_id = ?', [new_quality, cur_show['show_id']]])
|
||||
self.connection.mass_action(cl)
|
||||
|
||||
# update status that are are within the old hdwebdl (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768)
|
||||
# update status that are are within the old hdwebdl
|
||||
# (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768)
|
||||
logger.log(u'[2/4] Updating the status for the episodes within each show...', logger.MESSAGE)
|
||||
cl = []
|
||||
episodes = self.connection.select('SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800')
|
||||
|
@ -481,7 +552,8 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
|
|||
[self._update_status(cur_episode['status']), cur_episode['episode_id']]])
|
||||
self.connection.mass_action(cl)
|
||||
|
||||
# make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together
|
||||
# make two seperate passes through the history since snatched and downloaded (action & quality)
|
||||
# may not always coordinate together
|
||||
|
||||
# update previous history so it shows the correct action
|
||||
logger.log(u'[3/4] Updating history to reflect the correct action...', logger.MESSAGE)
|
||||
|
@ -564,7 +636,9 @@ class AddIMDbInfo(db.SchemaUpgrade):
|
|||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
db_backed_up = True
|
||||
self.connection.action(
|
||||
'CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)')
|
||||
'CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC,'
|
||||
' akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT,'
|
||||
' rating TEXT, votes INTEGER, last_update NUMERIC)')
|
||||
|
||||
if not self.hasColumn('tv_shows', 'imdb_id'):
|
||||
logger.log(u'Adding IMDb column imdb_id to tv_shows')
|
||||
|
@ -680,9 +754,18 @@ class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade):
|
|||
|
||||
self.connection.action('ALTER TABLE tv_shows RENAME TO tmp_tv_shows')
|
||||
self.connection.action(
|
||||
'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC)')
|
||||
'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT,'
|
||||
' location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC,'
|
||||
' airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC,'
|
||||
' lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT,'
|
||||
' last_update_indexer NUMERIC, dvdorder NUMERIC)')
|
||||
self.connection.action(
|
||||
'INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_indexer, dvdorder) SELECT show_id, tvdb_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows')
|
||||
'INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs,'
|
||||
' status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id,'
|
||||
' last_update_indexer, dvdorder)'
|
||||
' SELECT show_id, tvdb_id, show_name, location, network, genre, runtime,'
|
||||
' quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list,'
|
||||
' imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows')
|
||||
self.connection.action('DROP TABLE tmp_tv_shows')
|
||||
|
||||
self.connection.action('CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id);')
|
||||
|
@ -707,9 +790,16 @@ class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade):
|
|||
|
||||
self.connection.action('ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes')
|
||||
self.connection.action(
|
||||
'CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC)')
|
||||
'CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC,'
|
||||
' indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC,'
|
||||
' hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT,'
|
||||
' subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC)')
|
||||
self.connection.action(
|
||||
'INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper FROM tmp_tv_episodes')
|
||||
'INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate,'
|
||||
' hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount,'
|
||||
' subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description,'
|
||||
' airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount,'
|
||||
' subtitles_lastsearch, is_proper FROM tmp_tv_episodes')
|
||||
self.connection.action('DROP TABLE tmp_tv_episodes')
|
||||
|
||||
self.connection.action('CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate);')
|
||||
|
@ -737,9 +827,13 @@ class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade):
|
|||
|
||||
self.connection.action('ALTER TABLE imdb_info RENAME TO tmp_imdb_info')
|
||||
self.connection.action(
|
||||
'CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)')
|
||||
'CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT,'
|
||||
' runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT,'
|
||||
' votes INTEGER, last_update NUMERIC)')
|
||||
self.connection.action(
|
||||
'INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info')
|
||||
'INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes,'
|
||||
' certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres,'
|
||||
' countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info')
|
||||
self.connection.action('DROP TABLE tmp_imdb_info')
|
||||
|
||||
self.incDBVersion()
|
||||
|
@ -761,7 +855,8 @@ class ConvertInfoToIndexerScheme(db.SchemaUpgrade):
|
|||
self.connection.action(
|
||||
'CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)')
|
||||
self.connection.action(
|
||||
'INSERT INTO info(last_backlog, last_indexer, last_proper_search) SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info')
|
||||
'INSERT INTO info(last_backlog, last_indexer, last_proper_search)'
|
||||
' SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info')
|
||||
self.connection.action('DROP TABLE tmp_info')
|
||||
|
||||
self.incDBVersion()
|
||||
|
@ -791,7 +886,9 @@ class AddSceneNumbering(db.SchemaUpgrade):
|
|||
|
||||
logger.log(u'Upgrading table scene_numbering ...', logger.MESSAGE)
|
||||
self.connection.action(
|
||||
'CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id,season,episode))')
|
||||
'CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER,'
|
||||
' scene_season INTEGER, scene_episode INTEGER,'
|
||||
' PRIMARY KEY (indexer_id,season,episode))')
|
||||
|
||||
self.incDBVersion()
|
||||
return self.checkDBVersion()
|
||||
|
@ -981,7 +1078,8 @@ class AddIndexerMapping(db.SchemaUpgrade):
|
|||
|
||||
logger.log(u'Adding table indexer_mapping')
|
||||
self.connection.action(
|
||||
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC, PRIMARY KEY (indexer_id, indexer))')
|
||||
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC,'
|
||||
' PRIMARY KEY (indexer_id, indexer))')
|
||||
|
||||
self.incDBVersion()
|
||||
return self.checkDBVersion()
|
||||
|
@ -1190,7 +1288,9 @@ class ChangeMapIndexer(db.SchemaUpgrade):
|
|||
|
||||
logger.log(u'Changing table indexer_mapping')
|
||||
self.connection.action(
|
||||
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER NOT NULL, mindexer NUMERIC, date NUMERIC NOT NULL DEFAULT 0, status INTEGER NOT NULL DEFAULT 0, PRIMARY KEY (indexer_id, indexer, mindexer))')
|
||||
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER NOT NULL,'
|
||||
' mindexer NUMERIC, date NUMERIC NOT NULL DEFAULT 0, status INTEGER NOT NULL DEFAULT 0,'
|
||||
' PRIMARY KEY (indexer_id, indexer, mindexer))')
|
||||
|
||||
self.connection.action('CREATE INDEX IF NOT EXISTS idx_mapping ON indexer_mapping (indexer_id, indexer)')
|
||||
|
||||
|
@ -1204,21 +1304,23 @@ class ChangeMapIndexer(db.SchemaUpgrade):
|
|||
self.connection.action('CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)')
|
||||
if self.hasTable('scene_exceptions'):
|
||||
self.connection.action('DROP TABLE scene_exceptions')
|
||||
self.connection.action('CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT, season NUMERIC, custom NUMERIC)')
|
||||
self.connection.action('CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY,'
|
||||
' indexer_id INTEGER KEY, show_name TEXT, season NUMERIC, custom NUMERIC)')
|
||||
|
||||
try:
|
||||
cachedb = db.DBConnection(filename='cache.db')
|
||||
if cachedb.hasTable('scene_exceptions'):
|
||||
sqlResults = cachedb.action('SELECT * FROM scene_exceptions')
|
||||
sql_results = cachedb.action('SELECT * FROM scene_exceptions')
|
||||
cs = []
|
||||
for r in sqlResults:
|
||||
cs.append(['INSERT OR REPLACE INTO scene_exceptions (exception_id, indexer_id, show_name, season, custom)'
|
||||
' VALUES (?,?,?,?,?)', [r['exception_id'], r['indexer_id'], r['show_name'],
|
||||
r['season'], r['custom']]])
|
||||
for r in sql_results:
|
||||
cs.append(
|
||||
['INSERT OR REPLACE INTO scene_exceptions (exception_id, indexer_id, show_name, season, custom)'
|
||||
' VALUES (?,?,?,?,?)',
|
||||
[r['exception_id'], r['indexer_id'], r['show_name'], r['season'], r['custom']]])
|
||||
|
||||
if len(cs) > 0:
|
||||
if 0 < len(cs):
|
||||
self.connection.mass_action(cs)
|
||||
except:
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
keep_tables = {'scene_exceptions', 'scene_exceptions_refresh', 'info', 'indexer_mapping', 'blacklist',
|
||||
|
@ -1242,7 +1344,10 @@ class AddShowNotFoundCounter(db.SchemaUpgrade):
|
|||
logger.log(u'Adding table tv_shows_not_found')
|
||||
|
||||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
self.connection.action('CREATE TABLE tv_shows_not_found (indexer NUMERIC NOT NULL, indexer_id NUMERIC NOT NULL, fail_count NUMERIC NOT NULL DEFAULT 0, last_check NUMERIC NOT NULL, last_success NUMERIC, PRIMARY KEY (indexer_id, indexer))')
|
||||
self.connection.action(
|
||||
'CREATE TABLE tv_shows_not_found (indexer NUMERIC NOT NULL, indexer_id NUMERIC NOT NULL,'
|
||||
' fail_count NUMERIC NOT NULL DEFAULT 0, last_check NUMERIC NOT NULL, last_success NUMERIC,'
|
||||
' PRIMARY KEY (indexer_id, indexer))')
|
||||
|
||||
self.setDBVersion(20005)
|
||||
return self.checkDBVersion()
|
||||
|
@ -1255,7 +1360,7 @@ class AddFlagTable(db.SchemaUpgrade):
|
|||
logger.log(u'Adding table flags')
|
||||
|
||||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
self.connection.action('CREATE TABLE flags (flag PRIMARY KEY NOT NULL )')
|
||||
self.connection.action('CREATE TABLE flags (flag PRIMARY KEY NOT NULL)')
|
||||
|
||||
self.setDBVersion(20006)
|
||||
return self.checkDBVersion()
|
||||
|
@ -1275,7 +1380,29 @@ class DBIncreaseTo20007(db.SchemaUpgrade):
|
|||
class AddWebdlTypesTable(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
self.connection.action('CREATE TABLE webdl_types (dname TEXT NOT NULL , regex TEXT NOT NULL )')
|
||||
self.connection.action('CREATE TABLE webdl_types (dname TEXT NOT NULL, regex TEXT NOT NULL)')
|
||||
|
||||
self.setDBVersion(20008)
|
||||
return self.checkDBVersion()
|
||||
|
||||
|
||||
# 20008 -> 20009
|
||||
class AddWatched(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
# remove old table from version 20007
|
||||
if self.hasTable('tv_episodes_watched') and not self.hasColumn('tv_episodes_watched', 'clientep_id'):
|
||||
self.connection.action('DROP TABLE tv_episodes_watched')
|
||||
self.connection.action('VACUUM')
|
||||
|
||||
if not self.hasTable('tv_episodes_watched'):
|
||||
logger.log(u'Adding table tv_episodes_watched')
|
||||
|
||||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
self.connection.action(
|
||||
'CREATE TABLE tv_episodes_watched (tvep_id NUMERIC NOT NULL, clientep_id TEXT, label TEXT,'
|
||||
' played NUMERIC DEFAULT 0 NOT NULL, date_watched NUMERIC NOT NULL, date_added NUMERIC,'
|
||||
' status NUMERIC, location TEXT, file_size NUMERIC, hide INT default 0 not null)'
|
||||
)
|
||||
|
||||
self.setDBVersion(20009)
|
||||
return self.checkDBVersion()
|
||||
|
|
|
@ -47,26 +47,35 @@ def dbFilename(filename='sickbeard.db', suffix=None):
|
|||
return ek.ek(os.path.join, sickbeard.DATA_DIR, filename)
|
||||
|
||||
|
||||
def mass_upsert_sql(tableName, valueDict, keyDict):
|
||||
|
||||
def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True):
|
||||
"""
|
||||
use with cl.extend(mass_upsert_sql(tableName, valueDict, keyDict))
|
||||
|
||||
:param tableName: table name
|
||||
:param valueDict: dict of values to be set {'table_fieldname': value}
|
||||
:param keyDict: dict of restrains for update {'table_fieldname': value}
|
||||
:param table_name: table name
|
||||
:param value_dict: dict of values to be set {'table_fieldname': value}
|
||||
:param key_dict: dict of restrains for update {'table_fieldname': value}
|
||||
:param sanitise: True to remove k, v pairs in keyDict from valueDict as they must not exist in both.
|
||||
This option has a performance hit so it's best to remove key_dict keys from value_dict and set this False instead.
|
||||
:type sanitise: Boolean
|
||||
:return: list of 2 sql command
|
||||
"""
|
||||
cl = []
|
||||
|
||||
genParams = lambda myDict: [x + ' = ?' for x in myDict.keys()]
|
||||
gen_params = (lambda my_dict: [x + ' = ?' for x in my_dict.keys()])
|
||||
|
||||
cl.append(['UPDATE [%s] SET %s WHERE %s' % (
|
||||
tableName, ', '.join(genParams(valueDict)), ' AND '.join(genParams(keyDict))), valueDict.values() + keyDict.values()])
|
||||
# sanity: remove k, v pairs in keyDict from valueDict
|
||||
if sanitise:
|
||||
value_dict = dict(filter(lambda (k, _): k not in key_dict.keys(), value_dict.items()))
|
||||
|
||||
cl.append(['UPDATE [%s] SET %s WHERE %s' %
|
||||
(table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict))),
|
||||
value_dict.values() + key_dict.values()])
|
||||
|
||||
cl.append(['INSERT INTO [' + tableName + '] (' + ', '.join(["'%s'" % ('%s' % v).replace("'", "''") for v in valueDict.keys() + keyDict.keys()]) + ')' +
|
||||
' SELECT ' + ', '.join(["'%s'" % ('%s' % v).replace("'", "''") for v in valueDict.values() + keyDict.values()]) + ' WHERE changes() = 0'])
|
||||
cl.append(['INSERT INTO [' + table_name + '] (' +
|
||||
', '.join(["'%s'" % ('%s' % v).replace("'", "''") for v in value_dict.keys() + key_dict.keys()]) + ')' +
|
||||
' SELECT ' +
|
||||
', '.join(["'%s'" % ('%s' % v).replace("'", "''") for v in value_dict.values() + key_dict.values()]) +
|
||||
' WHERE changes() = 0'])
|
||||
return cl
|
||||
|
||||
|
||||
|
@ -261,12 +270,41 @@ class DBConnection(object):
|
|||
return False
|
||||
|
||||
def add_flag(self, flag_name):
|
||||
if not self.has_flag(flag_name):
|
||||
has_flag = self.has_flag(flag_name)
|
||||
if not has_flag:
|
||||
self.action('INSERT INTO flags (flag) VALUES (?)', [flag_name])
|
||||
return not has_flag
|
||||
|
||||
def remove_flag(self, flag_name):
|
||||
if self.has_flag(flag_name):
|
||||
has_flag = self.has_flag(flag_name)
|
||||
if has_flag:
|
||||
self.action('DELETE FROM flags WHERE flag = ?', [flag_name])
|
||||
return has_flag
|
||||
|
||||
def toggle_flag(self, flag_name):
|
||||
"""
|
||||
Add or remove a flag
|
||||
:param flag_name: Name of flag
|
||||
:type flag_name: String
|
||||
:return: True if this call added the flag, False if flag is removed
|
||||
:rtype: Boolean
|
||||
"""
|
||||
if self.remove_flag(flag_name):
|
||||
return False
|
||||
self.add_flag(flag_name)
|
||||
return True
|
||||
|
||||
def set_flag(self, flag_name, state=True):
|
||||
"""
|
||||
Set state of flag
|
||||
:param flag_name: Name of flag
|
||||
:type flag_name: String
|
||||
:param state: If true, create flag otherwise remove flag
|
||||
:type state: Boolean
|
||||
:return: Previous state of flag
|
||||
:rtype: Boolean
|
||||
"""
|
||||
return (self.add_flag, self.remove_flag)[not bool(state)](flag_name)
|
||||
|
||||
def close(self):
|
||||
"""Close database connection"""
|
||||
|
@ -515,6 +553,7 @@ def MigrationCode(myDB):
|
|||
20005: sickbeard.mainDB.AddFlagTable,
|
||||
20006: sickbeard.mainDB.DBIncreaseTo20007,
|
||||
20007: sickbeard.mainDB.AddWebdlTypesTable,
|
||||
20008: sickbeard.mainDB.AddWatched,
|
||||
# 20002: sickbeard.mainDB.AddCoolSickGearFeature3,
|
||||
}
|
||||
|
||||
|
@ -532,6 +571,9 @@ def MigrationCode(myDB):
|
|||
else:
|
||||
|
||||
while db_version < sickbeard.mainDB.MAX_DB_VERSION:
|
||||
if None is schema[db_version]: # skip placeholders used when multi PRs are updating DB
|
||||
db_version += 1
|
||||
continue
|
||||
try:
|
||||
update = schema[db_version](myDB)
|
||||
db_version = update.execute()
|
||||
|
|
|
@ -122,7 +122,7 @@ class EmbyNotifier(Notifier):
|
|||
else:
|
||||
new_keys += [key]
|
||||
|
||||
apikeys = (new_keys, [x.strip() for x in sickbeard.EMBY_APIKEY.split(',') if x.strip()] + new_keys)[has_old_key]
|
||||
apikeys = has_old_key and [x.strip() for x in sickbeard.EMBY_APIKEY.split(',') if x.strip()] or [] + new_keys
|
||||
|
||||
if len(hosts) != len(apikeys):
|
||||
message = ('Not enough Api keys for hosts', 'More Api keys than hosts')[len(apikeys) > len(hosts)]
|
||||
|
@ -178,5 +178,10 @@ class EmbyNotifier(Notifier):
|
|||
def discover_server(self):
|
||||
return self._discover_server()
|
||||
|
||||
def check_config(self, hosts=None, apikeys=None):
|
||||
|
||||
self._testing = True # ensure _choose() uses passed args
|
||||
return self._check_config(hosts, apikeys)
|
||||
|
||||
|
||||
notifier = EmbyNotifier
|
||||
|
|
|
@ -100,7 +100,8 @@ class ProviderFailList(object):
|
|||
fail_hour = e.fail_time.time().hour
|
||||
date_time = datetime.datetime.combine(fail_date, datetime.time(hour=fail_hour))
|
||||
if ProviderFailTypes.names[e.fail_type] not in fail_dict.get(date_time, {}):
|
||||
default = {'date': str(fail_date), 'date_time': date_time, 'multirow': False}
|
||||
default = {'date': str(fail_date), 'date_time': date_time,
|
||||
'timestamp': helpers.tryInt(sbdatetime.totimestamp(e.fail_time)), 'multirow': False}
|
||||
for et in ProviderFailTypes.names.itervalues():
|
||||
default[et] = b_d.copy()
|
||||
fail_dict.setdefault(date_time, default)[ProviderFailTypes.names[e.fail_type]]['count'] = 1
|
||||
|
@ -502,8 +503,10 @@ class GenericProvider(object):
|
|||
|
||||
kwargs['raise_exceptions'] = True
|
||||
kwargs['raise_status_code'] = True
|
||||
for k, v in dict(headers=self.headers, hooks=dict(response=self.cb_response), session=self.session).items():
|
||||
for k, v in dict(headers=self.headers, hooks=dict(response=self.cb_response)).items():
|
||||
kwargs.setdefault(k, v)
|
||||
if 'nzbs.in' not in url: # this provider returns 503's 3 out of 4 requests with the persistent session system
|
||||
kwargs.setdefault('session', self.session)
|
||||
|
||||
post_data = kwargs.get('post_data')
|
||||
post_json = kwargs.get('post_json')
|
||||
|
|
|
@ -55,14 +55,13 @@ class Scheduler(threading.Thread):
|
|||
self._stop.set()
|
||||
self.unpause()
|
||||
|
||||
def check_paused(self):
|
||||
if hasattr(self.action, 'check_paused'):
|
||||
if self.action.check_paused():
|
||||
def set_paused_state(self):
|
||||
if hasattr(self.action, 'is_enabled'):
|
||||
self.silent = not self.action.is_enabled()
|
||||
if self.silent:
|
||||
self.pause()
|
||||
self.silent = True
|
||||
else:
|
||||
self.unpause()
|
||||
self.silent = False
|
||||
|
||||
def timeLeft(self):
|
||||
return self.cycleTime - (datetime.datetime.now() - self.lastRun)
|
||||
|
@ -74,11 +73,12 @@ class Scheduler(threading.Thread):
|
|||
return False
|
||||
|
||||
def run(self):
|
||||
self.check_paused()
|
||||
self.set_paused_state()
|
||||
|
||||
# if self._unpause Event() is NOT set the loop pauses
|
||||
while self._unpause.wait() and not self._stop.is_set():
|
||||
|
||||
if getattr(self.action, 'is_enabled', True):
|
||||
try:
|
||||
current_time = datetime.datetime.now()
|
||||
should_run = False
|
||||
|
@ -99,7 +99,8 @@ class Scheduler(threading.Thread):
|
|||
if self.force:
|
||||
should_run = True
|
||||
|
||||
if should_run and self.prevent_cycle_run is not None and self.prevent_cycle_run():
|
||||
if should_run and ((self.prevent_cycle_run is not None and self.prevent_cycle_run()) or
|
||||
getattr(self.action, 'prevent_run', False)):
|
||||
logger.log(u'%s skipping this cycleTime' % self.name, logger.WARNING)
|
||||
# set lastRun to only check start_time after another cycleTime
|
||||
self.lastRun = current_time
|
||||
|
@ -120,6 +121,9 @@ class Scheduler(threading.Thread):
|
|||
finally:
|
||||
if self.force:
|
||||
self.force = False
|
||||
else:
|
||||
# disabled schedulers will only be rechecked every 30 seconds until enabled
|
||||
time.sleep(30)
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
|
|
|
@ -28,10 +28,8 @@ class ProperSearcher:
|
|||
self.amActive = False
|
||||
|
||||
@staticmethod
|
||||
def check_paused():
|
||||
if sickbeard.DOWNLOAD_PROPERS:
|
||||
return False
|
||||
return True
|
||||
def is_enabled():
|
||||
return sickbeard.DOWNLOAD_PROPERS
|
||||
|
||||
def run(self):
|
||||
|
||||
|
|
|
@ -81,18 +81,26 @@ def subtitlesLanguages(video_path):
|
|||
def subtitleLanguageFilter():
|
||||
return [language for language in subliminal.language.LANGUAGES if language[2] != ""]
|
||||
|
||||
class SubtitlesFinder():
|
||||
|
||||
class SubtitlesFinder:
|
||||
"""
|
||||
The SubtitlesFinder will be executed every hour but will not necessarly search
|
||||
and download subtitles. Only if the defined rule is true
|
||||
"""
|
||||
@staticmethod
|
||||
def check_paused():
|
||||
if sickbeard.USE_SUBTITLES:
|
||||
return False
|
||||
return True
|
||||
def __init__(self):
|
||||
self.amActive = False
|
||||
|
||||
def run(self, force=False):
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return sickbeard.USE_SUBTITLES
|
||||
|
||||
def run(self):
|
||||
if self.is_enabled():
|
||||
self.amActive = True
|
||||
self._main()
|
||||
self.amActive = False
|
||||
|
||||
def _main(self):
|
||||
if len(sickbeard.subtitles.getEnabledServiceList()) < 1:
|
||||
logger.log(u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR)
|
||||
return
|
||||
|
|
60
sickbeard/watchedstate.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import threading
|
||||
|
||||
import sickbeard
|
||||
from sickbeard import watchedstate_queue
|
||||
|
||||
|
||||
class WatchedStateUpdater(object):
|
||||
def __init__(self, name, queue_item):
|
||||
|
||||
self.amActive = False
|
||||
self.lock = threading.Lock()
|
||||
self.name = name
|
||||
self.queue_item = queue_item
|
||||
|
||||
@property
|
||||
def prevent_run(self):
|
||||
return sickbeard.watchedStateQueueScheduler.action.is_in_queue(self.queue_item)
|
||||
|
||||
def run(self):
|
||||
if self.is_enabled():
|
||||
self.amActive = True
|
||||
new_item = self.queue_item()
|
||||
sickbeard.watchedStateQueueScheduler.action.add_item(new_item)
|
||||
self.amActive = False
|
||||
|
||||
|
||||
class EmbyWatchedStateUpdater(WatchedStateUpdater):
|
||||
|
||||
def __init__(self):
|
||||
super(EmbyWatchedStateUpdater, self).__init__('Emby', watchedstate_queue.EmbyWatchedStateQueueItem)
|
||||
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return sickbeard.USE_EMBY and sickbeard.EMBY_WATCHEDSTATE_SCHEDULED
|
||||
|
||||
|
||||
class PlexWatchedStateUpdater(WatchedStateUpdater):
|
||||
|
||||
def __init__(self):
|
||||
super(PlexWatchedStateUpdater, self).__init__('Plex', watchedstate_queue.PlexWatchedStateQueueItem)
|
||||
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return sickbeard.USE_PLEX and sickbeard.PLEX_WATCHEDSTATE_SCHEDULED
|
83
sickbeard/watchedstate_queue.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
from sickbeard import generic_queue, logger
|
||||
from sickbeard.webserve import History
|
||||
|
||||
EMBYWATCHEDSTATE = 10
|
||||
PLEXWATCHEDSTATE = 20
|
||||
|
||||
|
||||
class WatchedStateQueue(generic_queue.GenericQueue):
|
||||
def __init__(self):
|
||||
super(WatchedStateQueue, self).__init__()
|
||||
# self.queue_name = 'WATCHEDSTATEQUEUE'
|
||||
self.queue_name = 'Q'
|
||||
|
||||
def is_in_queue(self, itemtype):
|
||||
with self.lock:
|
||||
for cur_item in self.queue + [self.currentItem]:
|
||||
if isinstance(cur_item, itemtype):
|
||||
return True
|
||||
return False
|
||||
|
||||
# method for possible UI usage, can be removed if not used
|
||||
def queue_length(self):
|
||||
length = {'emby': 0, 'plex': 0}
|
||||
with self.lock:
|
||||
for cur_item in [self.currentItem] + self.queue:
|
||||
if isinstance(cur_item, EmbyWatchedStateQueueItem):
|
||||
length['emby'] += 1
|
||||
elif isinstance(cur_item, PlexWatchedStateQueueItem):
|
||||
length['plex'] += 1
|
||||
|
||||
return length
|
||||
|
||||
def add_item(self, item):
|
||||
if isinstance(item, EmbyWatchedStateQueueItem) and not self.is_in_queue(EmbyWatchedStateQueueItem):
|
||||
# emby watched state item
|
||||
generic_queue.GenericQueue.add_item(self, item)
|
||||
elif isinstance(item, PlexWatchedStateQueueItem) and not self.is_in_queue(PlexWatchedStateQueueItem):
|
||||
# plex watched state item
|
||||
generic_queue.GenericQueue.add_item(self, item)
|
||||
else:
|
||||
logger.log(u'Not adding item, it\'s already in the queue', logger.DEBUG)
|
||||
|
||||
|
||||
class EmbyWatchedStateQueueItem(generic_queue.QueueItem):
|
||||
def __init__(self):
|
||||
super(EmbyWatchedStateQueueItem, self).__init__('Emby Watched', EMBYWATCHEDSTATE)
|
||||
|
||||
def run(self):
|
||||
super(EmbyWatchedStateQueueItem, self).run()
|
||||
try:
|
||||
History.update_watched_state_emby()
|
||||
finally:
|
||||
self.finish()
|
||||
|
||||
|
||||
class PlexWatchedStateQueueItem(generic_queue.QueueItem):
|
||||
def __init__(self):
|
||||
super(PlexWatchedStateQueueItem, self).__init__('Plex Watched', PLEXWATCHEDSTATE)
|
||||
|
||||
def run(self):
|
||||
super(PlexWatchedStateQueueItem, self).run()
|
||||
try:
|
||||
History.update_watched_state_plex()
|
||||
finally:
|
||||
self.finish()
|
|
@ -105,13 +105,26 @@ class Api(webserve.BaseHandler):
|
|||
self.set_header('X-Application', 'SickGear')
|
||||
self.set_header('X-API-Version', Api.version)
|
||||
|
||||
def prepare(self):
|
||||
# Incorporate request JSON into arguments dictionary.
|
||||
if self.request.body:
|
||||
try:
|
||||
json_data = {'payloadjson': json.loads(self.request.body)}
|
||||
self.request.arguments.update(json_data)
|
||||
except (StandardError, Exception):
|
||||
raise ApiError('Unable to parse JSON.')
|
||||
super(Api, self).prepare()
|
||||
|
||||
def post(self, route, *args, **kwargs):
|
||||
return self.get(route, *args, **kwargs)
|
||||
|
||||
@gen.coroutine
|
||||
def get(self, route, *args, **kwargs):
|
||||
route = route.strip('/') or 'index'
|
||||
|
||||
kwargs = self.request.arguments
|
||||
for arg, value in kwargs.items():
|
||||
if len(value) == 1:
|
||||
if not isinstance(value, dict) and len(value) == 1:
|
||||
kwargs[arg] = value[0]
|
||||
|
||||
args = args[1:]
|
||||
|
@ -475,6 +488,11 @@ class ApiCall(object):
|
|||
value = li
|
||||
else:
|
||||
value = value.split("|")
|
||||
elif type == "dict":
|
||||
if isinstance(value, dict):
|
||||
value = value
|
||||
else:
|
||||
error = True
|
||||
elif type == "string":
|
||||
pass
|
||||
elif type == "ignore":
|
||||
|
@ -2857,6 +2875,33 @@ class CMD_SickGearSetRequrieWords(ApiCall):
|
|||
return _responds(RESULT_SUCCESS, data=return_data, msg="%s set requried words" % return_type)
|
||||
|
||||
|
||||
class CMD_SickGearUpdateWatchedState(ApiCall):
|
||||
_help = {"desc": "Update db with details of media file that is watched or unwatched",
|
||||
"requiredParameters": {
|
||||
"payloadjson": {
|
||||
"desc": "Payload is a dict of dicts transmitted as JSON via POST request"},
|
||||
}}
|
||||
|
||||
def __init__(self, handler, args, kwargs):
|
||||
# required
|
||||
self.payloadjson, args = self.check_params(args, kwargs, "payloadjson", None, True, "dict", [])
|
||||
# optional
|
||||
# super, missing, help
|
||||
ApiCall.__init__(self, handler, args, kwargs)
|
||||
|
||||
def run(self):
|
||||
""" Update db with details of media file that is watched or unwatched """
|
||||
payload = self.payloadjson.copy()
|
||||
|
||||
from webserve import MainHandler
|
||||
MainHandler.update_watched_state(payload, as_json=False)
|
||||
|
||||
if not payload:
|
||||
return _responds(RESULT_FAILURE, msg='Request made to SickGear with invalid payload')
|
||||
|
||||
return _responds(RESULT_SUCCESS, payload)
|
||||
|
||||
|
||||
class CMD_SickGearShow(ApiCall):
|
||||
_help = {"desc": "display information for a given show",
|
||||
"requiredParameters": {"indexer": {"desc": "indexer of a show"},
|
||||
|
@ -4305,12 +4350,12 @@ class CMD_SickGearShowsStats(ApiCall):
|
|||
|
||||
return _responds(RESULT_SUCCESS, stats)
|
||||
|
||||
|
||||
# WARNING: never define a cmd call string that contains a "_" (underscore)
|
||||
class CMD_ShowsStats(CMD_SickGearShowsStats):
|
||||
_help = {"desc": "display the global thetvdb.com shows and episode stats",
|
||||
"SickGearCommand": "sg.shows.stats",
|
||||
}
|
||||
|
||||
# this is reserved for cmd indexes used while cmd chaining
|
||||
def __init__(self, handler, args, kwargs):
|
||||
# required
|
||||
# optional
|
||||
|
@ -4318,8 +4363,8 @@ class CMD_ShowsStats(CMD_SickGearShowsStats):
|
|||
self.sickbeard_call = True
|
||||
CMD_SickGearShowsStats.__init__(self, handler, args, kwargs)
|
||||
|
||||
|
||||
# WARNING: never define a cmd call string that contains a "_" (underscore)
|
||||
# WARNING: never define a param name that contains a "." (dot)
|
||||
# this is reserved for cmd namspaces used while cmd chaining
|
||||
# this is reserved for cmd indexes used while cmd chaining
|
||||
|
||||
# WARNING: never define a param name that contains a "." (dot)
|
||||
|
@ -4389,6 +4434,7 @@ _functionMaper = {"help": CMD_Help,
|
|||
"sg.setignorewords": CMD_SickGearSetIgnoreWords,
|
||||
"sg.listrequiredwords": CMD_SickGearListRequireWords,
|
||||
"sg.setrequiredwords": CMD_SickGearSetRequrieWords,
|
||||
"sg.updatewatchedstate": CMD_SickGearUpdateWatchedState,
|
||||
"show": CMD_Show,
|
||||
"sg.show": CMD_SickGearShow,
|
||||
"show.addexisting": CMD_ShowAddExisting,
|
||||
|
|
|
@ -93,6 +93,10 @@ class WebServer(threading.Thread):
|
|||
# javascript
|
||||
(r'%s/js/(.*)' % self.options['web_root'], webserve.BaseStaticFileHandler,
|
||||
{'path': os.path.join(self.options['data_root'], 'js')}),
|
||||
|
||||
(r'%s/kodi/(.*)' % self.options['web_root'], webserve.RepoHandler,
|
||||
{'path': os.path.join(sickbeard.CACHE_DIR, 'clients', 'kodi'),
|
||||
'default_filename': 'index.html'}),
|
||||
])
|
||||
|
||||
# Main Handler
|
||||
|
|