Merge pull request #1005 from JackDandy/feature/ChangeProperSearchPrio

Change overhaul handling of PROPERS/REPACKS/REAL.
This commit is contained in:
JackDandy 2017-10-30 22:31:02 +00:00 committed by GitHub
commit 3f4409cf3c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 562 additions and 185 deletions

View file

@ -130,6 +130,10 @@
* Change subtitle addons no longer need to be saved before Search Subtitles is enabled as a
forbidden action to reuse an exited FindSubtitles thread is no longer attempted
* Fix tools menu not opening for some browsers
* Change overhaul handling of PROPERS/REPACKS/REAL
* Add restriction to allow only same release group for repacks
* Change try all episode names with 'real', 'repack', 'proper'
* Add tip to search settings/media search about improved matching with optional regex library
[develop changelog]

View file

@ -438,6 +438,13 @@ class SickGear(object):
startup_background_tasks = threading.Thread(name='FETCH-XEMDATA', target=sickbeard.scene_exceptions.get_xem_ids)
startup_background_tasks.start()
# check history snatched_proper update
if not db.DBConnection().has_flag('history_snatch_proper'):
# noinspection PyUnresolvedReferences
history_snatched_proper_task = threading.Thread(name='UPGRADE-HISTORY-ACTION',
target=sickbeard.history.history_snatched_proper_fix)
history_snatched_proper_task.start()
if sickbeard.USE_FAILED_DOWNLOADS:
failed_history.remove_old_history()

View file

@ -47,7 +47,7 @@
</div>
<fieldset class="component-group-list">
<div class="field-pair">
<div class="field-pair" style="padding-bottom:0">
<label for="download_propers">
<span class="component-title">Download propers</span>
<span class="component-desc">
@ -57,6 +57,10 @@
</label>
</div>
<div id="content_download_propers">
<span class="component-desc">
<p>Optional: to improve matching, install the OS dependent <a href="https://pypi.python.org/pypi/regex" target="_blank">regex python library</a></p>
<p>at a command line, simply enter ... <code>python -m pip install regex</code></p>
</span>
<div class="field-pair">
<label>
<span class="component-title">Limit WebDL propers</span>

View file

@ -1,10 +1,9 @@
#import sickbeard
#import datetime
#import re
#from sickbeard.common import *
#from sickbeard import sbdatetime
#from sickbeard import history
#from sickbeard import providers
#import sickbeard
#from sickbeard import history, providers, sbdatetime
#from sickbeard.common import Quality, statusStrings, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, SUBTITLED, ARCHIVED, FAILED
#from sickbeard.providers import generic
<% def sg_var(varname, default=False): return getattr(sickbeard, varname, default) %>#slurp#
<% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp#
@ -130,21 +129,21 @@
<tr>
#set $curdatetime = $datetime.datetime.strptime(str($hItem['date']), $history.dateFormat)
<td><div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdatetime($curdatetime, show_seconds=True)</div><span class="sort-data">$time.mktime($curdatetime.timetuple())</span></td>
<td class="tvShow"><a href="$sbRoot/home/displayShow?show=$hItem['showid']#season-$hItem['season']">$display_name#if 'proper' in $hItem['resource'].lower or 'repack' in $hItem['resource'].lower then ' <span class="quality Proper">Proper</span>' else ''#</a></td>
<td#echo ('', ' class="subtitles_column"')[SUBTITLED == $curStatus]#>
#if SUBTITLED == $curStatus
<td class="tvShow"><a href="$sbRoot/home/displayShow?show=$hItem['showid']#season-$hItem['season']">$display_name#if $Quality.splitCompositeStatus($hItem['action'])[0] == $SNATCHED_PROPER then ' <span class="quality Proper">Proper</span>' else ''#</a></td>
<td#echo ('', ' class="subtitles_column"')[$SUBTITLED == $curStatus]#>
#if $SUBTITLED == $curStatus
<img width="16" height="11" src="$sbRoot/images/flags/<%= hItem["resource"][len(hItem["resource"])-6:len(hItem["resource"])-4] + '.png' %>">
#end if
<span class="help" title="$os.path.basename($hItem["resource"])">$statusStrings[$curStatus]</span>
</td>
<td class="provider">
#if DOWNLOADED == $curStatus
#if $DOWNLOADED == $curStatus
#if '-1' != $hItem['provider']
<span><i>$hItem['provider']</i></span>
#end if
#else
#if 0 < $hItem['provider']
#if $curStatus in [SNATCHED, FAILED]
#if $curStatus in [$SNATCHED, $SNATCHED_PROPER, $SNATCHED_BEST, $FAILED]
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($hItem['provider']))
#if None is not $provider
<img src="$sbRoot/images/providers/<%= provider.image_name() %>" width="16" height="16" /><span>$provider.name</span>
@ -196,20 +195,20 @@
#for $action in reversed($hItem['actions'])
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($action['action']))
#set $basename = $os.path.basename($action['resource'])
#if $curStatus in [SNATCHED, FAILED]
#if $curStatus in [$SNATCHED, $SNATCHED_PROPER, $SNATCHED_BEST, $FAILED]
#set $provider = $providers.getProviderClass($generic.GenericProvider.make_id($action['provider']))
#if None is not $provider
#set $prov_list += ['<span%s><img class="help" src="%s/images/providers/%s" width="16" height="16" alt="%s" title="%s.. %s: %s" /></span>'\
% (('', ' class="fail"')[FAILED == $curStatus], $sbRoot, $provider.image_name(), $provider.name,
('%s%s' % ($order, 'th' if $order in [11, 12, 13] or str($order)[-1] not in $ordinal_indicators else $ordinal_indicators[str($order)[-1]]), 'Snatch failed')[FAILED == $curStatus],
% (('', ' class="fail"')[$FAILED == $curStatus], $sbRoot, $provider.image_name(), $provider.name,
('%s%s' % ($order, 'th' if $order in [11, 12, 13] or str($order)[-1] not in $ordinal_indicators else $ordinal_indicators[str($order)[-1]]), 'Snatch failed')[$FAILED == $curStatus],
$provider.name, $basename)]
#set $order += (0, 1)[SNATCHED == $curStatus]
#set $order += (0, 1)[$curStatus in ($SNATCHED, $SNATCHED_PROPER, $SNATCHED_BEST)]
#else
#set $prov_list += ['<img src="%s/images/providers/missing.png" width="16" height="16" alt="missing provider" title="missing provider" />'\
% $sbRoot]
#end if
#end if
#if $curStatus in [DOWNLOADED, ARCHIVED]
#if $curStatus in [$DOWNLOADED, $ARCHIVED]
#set $match = $re.search('\-(\w+)\.\w{3}\Z', $basename)
#set $non_scene_note = ''
#if not $match
@ -250,7 +249,7 @@
<td>
#for $action in reversed($hItem['actions'])
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($action['action']))
#if SUBTITLED == $curStatus
#if $SUBTITLED == $curStatus
<img src="$sbRoot/images/subtitles/<%= action['provider'] + '.png' %>" width="16" height="16" alt="$action['provider']" title="<%= action['provider'].capitalize() %>:$os.path.basename($action['resource'])" />
<span> / </span>
<img width="16" height="11" src="$sbRoot/images/flags/<%= action['resource'][len(action['resource'])-6:len(action['resource'])-4] + '.png' %>" style="vertical-align:middle !important">

View file

@ -28,7 +28,7 @@ except ImportError:
from requests.compat import OrderedDict
class SearchResult:
class SearchResult(object):
"""
Represents a search result from an indexer.
"""
@ -66,6 +66,24 @@ class SearchResult:
# version
self.version = -1
# proper level
self._properlevel = 0
# is a repack
self.is_repack = False
# provider unique id
self.puid = None
@property
def properlevel(self):
return self._properlevel
@properlevel.setter
def properlevel(self, v):
if isinstance(v, (int, long)):
self._properlevel = v
def __str__(self):
if self.provider is None:
@ -248,10 +266,12 @@ class ShowListUI(ShowFilter):
class Proper:
def __init__(self, name, url, date, show, parsed_show=None):
def __init__(self, name, url, date, show, parsed_show=None, size=-1, puid=None):
self.name = name
self.url = url
self.date = date
self.size = size
self.puid = puid
self.provider = None
self.quality = Quality.UNKNOWN
self.release_group = None

View file

@ -120,6 +120,31 @@ class Quality:
FAILED: 'Failed',
SNATCHED_BEST: 'Snatched (Best)'}
real_check = r'\breal\b\W?(?=proper|repack|e?ac3|aac|dts|read\Wnfo|(ws\W)?[ph]dtv|(ws\W)?dsr|web|dvd|blu|\d{2,3}0(p|i))(?!.*\d+(e|x)\d+)'
proper_levels = [(re.compile(r'\brepack\b(?!.*\d+(e|x)\d+)', flags=re.I), True),
(re.compile(r'\bproper\b(?!.*\d+(e|x)\d+)', flags=re.I), False),
(re.compile(real_check, flags=re.I), False)]
@staticmethod
def get_proper_level(extra_no_name, version, is_anime=False, check_is_repack=False):
level = 0
is_repack = False
if is_anime:
if isinstance(version, (int, long)):
level = version
else:
level = 1
elif isinstance(extra_no_name, basestring):
for p, r_check in Quality.proper_levels:
a = len(p.findall(extra_no_name))
level += a
if 0 < a and r_check:
is_repack = True
if check_is_repack:
return is_repack, level
return level
@staticmethod
def get_quality_css(quality):
return (Quality.qualityStrings[quality].replace('2160p', 'UHD2160p').replace('1080p', 'HD1080p')

View file

@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
MIN_DB_VERSION = 9 # oldest db version we support migrating from
MAX_DB_VERSION = 20005
MAX_DB_VERSION = 20006
class MainSanityCheck(db.DBSanityCheck):
@ -116,28 +116,32 @@ class MainSanityCheck(db.DBSanityCheck):
def fix_missing_table_indexes(self):
if not self.connection.select('PRAGMA index_info("idx_indexer_id")'):
logger.log(u'Missing idx_indexer_id for TV Shows table detected!, fixing...')
self.connection.action('CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id);')
logger.log('Updating TV Shows table with index idx_indexer_id')
self.connection.action('CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id)')
if not self.connection.select('PRAGMA index_info("idx_tv_episodes_showid_airdate")'):
logger.log(u'Missing idx_tv_episodes_showid_airdate for TV Episodes table detected!, fixing...')
self.connection.action('CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate);')
logger.log('Updating TV Episode table with index idx_tv_episodes_showid_airdate')
self.connection.action('CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid, airdate)')
if not self.connection.select('PRAGMA index_info("idx_showid")'):
logger.log(u'Missing idx_showid for TV Episodes table detected!, fixing...')
self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid);')
logger.log('Updating TV Episode table with index idx_showid')
self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid)')
if not self.connection.select('PRAGMA index_info("idx_status")'):
logger.log(u'Missing idx_status for TV Episodes table detected!, fixing...')
self.connection.action('CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate)')
logger.log('Updating TV Episode table with index idx_status')
self.connection.action('CREATE INDEX idx_status ON tv_episodes (status, season, episode, airdate)')
if not self.connection.select('PRAGMA index_info("idx_sta_epi_air")'):
logger.log(u'Missing idx_sta_epi_air for TV Episodes table detected!, fixing...')
self.connection.action('CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate)')
logger.log('Updating TV Episode table with index idx_sta_epi_air')
self.connection.action('CREATE INDEX idx_sta_epi_air ON tv_episodes (status, episode, airdate)')
if not self.connection.select('PRAGMA index_info("idx_sta_epi_sta_air")'):
logger.log(u'Missing idx_sta_epi_sta_air for TV Episodes table detected!, fixing...')
self.connection.action('CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate)')
logger.log('Updating TV Episode table with index idx_sta_epi_sta_air')
self.connection.action('CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, airdate)')
if not self.connection.hasIndex('tv_episodes', 'idx_tv_ep_ids'):
logger.log('Updating TV Episode table with index idx_tv_ep_ids')
self.connection.action('CREATE INDEX idx_tv_ep_ids ON tv_episodes (indexer, showid)')
def fix_unaired_episodes(self):
@ -1241,3 +1245,16 @@ class AddShowNotFoundCounter(db.SchemaUpgrade):
self.setDBVersion(20005)
return self.checkDBVersion()
# 20005 -> 20006
class AddFlagTable(db.SchemaUpgrade):
def execute(self):
if not self.hasTable('flags'):
logger.log(u'Adding table flags')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.connection.action('CREATE TABLE flags (flag PRIMARY KEY NOT NULL )')
self.setDBVersion(20006)
return self.checkDBVersion()

View file

@ -231,6 +231,20 @@ class DBConnection(object):
self.action('ALTER TABLE [%s] ADD %s %s' % (table, column, type))
self.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
def has_flag(self, flag_name):
sql_result = self.select('SELECT flag FROM flags WHERE flag = ?', [flag_name])
if 0 < len(sql_result):
return True
return False
def add_flag(self, flag_name):
if not self.has_flag(flag_name):
self.action('INSERT INTO flags (flag) VALUES (?)', [flag_name])
def remove_flag(self, flag_name):
if self.has_flag(flag_name):
self.action('DELETE FROM flags WHERE flag = ?', [flag_name])
def close(self):
"""Close database connection"""
if getattr(self, 'connection', None) is not None:
@ -454,7 +468,8 @@ def MigrationCode(myDB):
20001: sickbeard.mainDB.AddTvShowOverview,
20002: sickbeard.mainDB.AddTvShowTags,
20003: sickbeard.mainDB.ChangeMapIndexer,
20004: sickbeard.mainDB.AddShowNotFoundCounter
20004: sickbeard.mainDB.AddShowNotFoundCounter,
20005: sickbeard.mainDB.AddFlagTable
# 20002: sickbeard.mainDB.AddCoolSickGearFeature3,
}

View file

@ -19,50 +19,55 @@
import db
import datetime
from sickbeard.common import SNATCHED, SUBTITLED, FAILED, Quality
import sickbeard
from sickbeard import helpers, logger
from sickbeard.common import SNATCHED, SNATCHED_PROPER, SUBTITLED, FAILED, Quality
from sickbeard.name_parser.parser import NameParser
dateFormat = "%Y%m%d%H%M%S"
dateFormat = '%Y%m%d%H%M%S'
def _logHistoryItem(action, showid, season, episode, quality, resource, provider, version=-1):
logDate = datetime.datetime.today().strftime(dateFormat)
def _log_history_item(action, showid, season, episode, quality, resource, provider, version=-1):
log_date = datetime.datetime.today().strftime(dateFormat)
if not isinstance(resource, unicode):
resource = unicode(resource, 'utf-8', 'replace')
myDB = db.DBConnection()
myDB.action(
"INSERT INTO history (action, date, showid, season, episode, quality, resource, provider, version) VALUES (?,?,?,?,?,?,?,?,?)",
[action, logDate, showid, season, episode, quality, resource, provider, version])
my_db = db.DBConnection()
my_db.action(
'INSERT INTO history (action, date, showid, season, episode, quality, resource, provider, version)'
' VALUES (?,?,?,?,?,?,?,?,?)',
[action, log_date, showid, season, episode, quality, resource, provider, version])
def logSnatch(searchResult):
for curEpObj in searchResult.episodes:
def log_snatch(search_result):
for curEpObj in search_result.episodes:
showid = int(curEpObj.show.indexerid)
season = int(curEpObj.season)
episode = int(curEpObj.episode)
quality = searchResult.quality
version = searchResult.version
quality = search_result.quality
version = search_result.version
is_proper = 0 < search_result.properlevel
providerClass = searchResult.provider
if providerClass != None:
provider = providerClass.name
provider_class = search_result.provider
if None is not provider_class:
provider = provider_class.name
else:
provider = "unknown"
provider = 'unknown'
action = Quality.compositeStatus(SNATCHED, searchResult.quality)
action = Quality.compositeStatus((SNATCHED, SNATCHED_PROPER)[is_proper], search_result.quality)
resource = searchResult.name
resource = search_result.name
_logHistoryItem(action, showid, season, episode, quality, resource, provider, version)
_log_history_item(action, showid, season, episode, quality, resource, provider, version)
def logDownload(episode, filename, new_ep_quality, release_group=None, version=-1):
def log_download(episode, filename, new_ep_quality, release_group=None, version=-1):
showid = int(episode.show.indexerid)
season = int(episode.season)
epNum = int(episode.episode)
ep_num = int(episode.episode)
quality = new_ep_quality
@ -74,53 +79,87 @@ def logDownload(episode, filename, new_ep_quality, release_group=None, version=-
action = episode.status
_logHistoryItem(action, showid, season, epNum, quality, filename, provider, version)
_log_history_item(action, showid, season, ep_num, quality, filename, provider, version)
def logSubtitle(showid, season, episode, status, subtitleResult):
resource = subtitleResult.path
provider = subtitleResult.service
def log_subtitle(showid, season, episode, status, subtitle_result):
resource = subtitle_result.path
provider = subtitle_result.service
status, quality = Quality.splitCompositeStatus(status)
action = Quality.compositeStatus(SUBTITLED, quality)
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
_log_history_item(action, showid, season, episode, quality, resource, provider)
def logFailed(epObj, release, provider=None):
showid = int(epObj.show.indexerid)
season = int(epObj.season)
epNum = int(epObj.episode)
status, quality = Quality.splitCompositeStatus(epObj.status)
def log_failed(ep_obj, release, provider=None):
showid = int(ep_obj.show.indexerid)
season = int(ep_obj.season)
ep_num = int(ep_obj.episode)
status, quality = Quality.splitCompositeStatus(ep_obj.status)
action = Quality.compositeStatus(FAILED, quality)
_logHistoryItem(action, showid, season, epNum, quality, release, provider)
_log_history_item(action, showid, season, ep_num, quality, release, provider)
def reset_status(indexerid, season, episode):
''' Revert episode history to status from download history,
if history exists '''
""" Revert episode history to status from download history,
if history exists """
my_db = db.DBConnection()
history_sql = 'SELECT h.action, h.showid, h.season, h.episode,'\
' t.status FROM history AS h INNER JOIN tv_episodes AS t'\
' ON h.showid = t.showid AND h.season = t.season'\
' AND h.episode = t.episode WHERE t.showid = ? AND t.season = ?'\
' AND t.episode = ? GROUP BY h.action ORDER BY h.date DESC limit 1'
history_sql = 'SELECT h.action, h.showid, h.season, h.episode, t.status' \
' FROM history AS h' \
' INNER JOIN tv_episodes AS t' \
' ON h.showid = t.showid AND h.season = t.season AND h.episode = t.episode' \
' WHERE t.showid = ? AND t.season = ? AND t.episode = ?' \
' GROUP BY h.action' \
' ORDER BY h.date DESC' \
' LIMIT 1'
sql_history = my_db.select(history_sql, [str(indexerid),
str(season),
str(episode)])
if len(sql_history) == 1:
sql_history = my_db.select(history_sql, [str(indexerid), str(season), str(episode)])
if 1 == len(sql_history):
history = sql_history[0]
# update status only if status differs
# FIXME: this causes issues if the user changed status manually
# replicating refactored behavior anyway.
if history['status'] != history['action']:
undo_status = 'UPDATE tv_episodes SET status = ?'\
' WHERE showid = ? AND season = ? AND episode = ?'
undo_status = 'UPDATE tv_episodes SET status = ?' \
' WHERE showid = ? AND season = ? AND episode = ?'
my_db.action(undo_status, [history['action'],
history['showid'],
history['season'],
history['episode']])
def history_snatched_proper_fix():
my_db = db.DBConnection()
if not my_db.has_flag('history_snatch_proper'):
logger.log('Updating history items with status Snatched Proper in a background process...')
sql_result = my_db.select('SELECT rowid, resource, quality, showid'
' FROM history'
' WHERE action LIKE "%%%02d"' % SNATCHED +
' AND (UPPER(resource) LIKE "%PROPER%"'
' OR UPPER(resource) LIKE "%REPACK%"'
' OR UPPER(resource) LIKE "%REAL%")')
if sql_result:
cl = []
for r in sql_result:
show_obj = None
try:
show_obj = helpers.findCertainShow(sickbeard.showList, int(r['showid']))
except (StandardError, Exception):
pass
np = NameParser(False, showObj=show_obj, testing=True)
try:
pr = np.parse(r['resource'])
except (StandardError, Exception):
continue
if 0 < Quality.get_proper_level(pr.extra_info_no_name, pr.version, pr.is_anime):
cl.append(['UPDATE history SET action = ? WHERE rowid = ?',
[Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])),
r['rowid']]])
if cl:
my_db.mass_action(cl)
logger.log('Completed the history table update with status Snatched Proper.')
my_db.add_flag('history_snatch_proper')

View file

@ -18,14 +18,21 @@
from __future__ import with_statement
import os
import time
import re
import datetime
import os
import os.path
import re
import time
import regexes
import sickbeard
try:
import regex
from math import trunc # positioned here to import only if regex is available
except ImportError:
regex = None
from sickbeard import logger, helpers, scene_numbering, common, scene_exceptions, encodingKludge as ek, db
from sickbeard.exceptions import ex
@ -53,6 +60,29 @@ class NameParser(object):
else:
self._compile_regexes(self.ALL_REGEX)
def _compile_regexes(self, regex_mode):
if self.ANIME_REGEX == regex_mode:
logger.log(u'Using ANIME regexs', logger.DEBUG)
uncompiled_regex = [regexes.anime_regexes]
elif self.NORMAL_REGEX == regex_mode:
logger.log(u'Using NORMAL regexs', logger.DEBUG)
uncompiled_regex = [regexes.normal_regexes]
else:
logger.log(u'Using ALL regexes', logger.DEBUG)
uncompiled_regex = [regexes.normal_regexes, regexes.anime_regexes]
self.compiled_regexes = {0: [], 1: []}
index = 0
for regexItem in uncompiled_regex:
for cur_pattern_num, (cur_pattern_name, cur_pattern) in enumerate(regexItem):
try:
cur_regex = re.compile(cur_pattern, re.VERBOSE | re.IGNORECASE)
except re.error as errormsg:
logger.log(u'WARNING: Invalid episode_pattern, %s. %s' % (errormsg, cur_pattern))
else:
self.compiled_regexes[index].append([cur_pattern_num, cur_pattern_name, cur_regex])
index += 1
@staticmethod
def clean_series_name(series_name):
"""Cleans up series name by removing any . and _
@ -78,37 +108,14 @@ class NameParser(object):
series_name = re.sub('^\[.*\]', '', series_name)
return series_name.strip()
def _compile_regexes(self, regexMode):
if self.ANIME_REGEX == regexMode:
logger.log(u'Using ANIME regexs', logger.DEBUG)
uncompiled_regex = [regexes.anime_regexes]
elif self.NORMAL_REGEX == regexMode:
logger.log(u'Using NORMAL regexs', logger.DEBUG)
uncompiled_regex = [regexes.normal_regexes]
else:
logger.log(u'Using ALL regexes', logger.DEBUG)
uncompiled_regex = [regexes.normal_regexes, regexes.anime_regexes]
self.compiled_regexes = {0: [], 1: []}
index = 0
for regexItem in uncompiled_regex:
for cur_pattern_num, (cur_pattern_name, cur_pattern) in enumerate(regexItem):
try:
cur_regex = re.compile(cur_pattern, re.VERBOSE | re.IGNORECASE)
except re.error as errormsg:
logger.log(u'WARNING: Invalid episode_pattern, %s. %s' % (errormsg, cur_pattern))
else:
self.compiled_regexes[index].append([cur_pattern_num, cur_pattern_name, cur_regex])
index += 1
def _parse_string(self, name):
if not name:
return
matches = []
for regex in self.compiled_regexes:
for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[regex]:
for reg_ex in self.compiled_regexes:
for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[reg_ex]:
new_name = helpers.remove_non_release_groups(name, 'anime' in cur_regex_name)
match = cur_regex.match(new_name)
@ -254,7 +261,7 @@ class NameParser(object):
show = self.showObj
best_result.show = show
if show and show.is_anime and 1 < len(self.compiled_regexes[1]) and 1 != regex:
if show and show.is_anime and 1 < len(self.compiled_regexes[1]) and 1 != reg_ex:
continue
# if this is a naming pattern test then return best result
@ -465,6 +472,32 @@ class NameParser(object):
return number
@staticmethod
def _replace_ep_name_helper(e_i_n_n, n):
ep_regex = r'\W*%s\W*' % re.sub(r' ', r'\W', re.sub(r'[^a-zA-Z0-9 ]', r'\W?',
re.sub(r'\W+$', '', n.strip())))
if None is regex:
return re.sub(ep_regex, '', e_i_n_n, flags=re.I)
return regex.sub(r'(%s){e<=%d}' % (
ep_regex, trunc(len(re.findall(r'\w', ep_regex)) / 5)), '', e_i_n_n, flags=regex.I | regex.B)
def _extra_info_no_name(self, extra_info, show, season, episodes):
extra_info_no_name = extra_info
if isinstance(extra_info_no_name, basestring) and show and hasattr(show, 'indexer'):
for e in episodes:
if not hasattr(show, 'getEpisode'):
continue
ep = show.getEpisode(season, e)
if ep and isinstance(getattr(ep, 'name', None), basestring) and ep.name.strip():
extra_info_no_name = self._replace_ep_name_helper(extra_info_no_name, ep.name)
if hasattr(show, 'getAllEpisodes'):
for e in [ep.name for ep in show.getAllEpisodes(check_related_eps=False) if getattr(ep, 'name', None)
and re.search(r'real|proper|repack', ep.name, re.I)]:
extra_info_no_name = self._replace_ep_name_helper(extra_info_no_name, e)
return extra_info_no_name
def parse(self, name, cache_result=True):
name = self._unicodify(name)
@ -525,6 +558,10 @@ class NameParser(object):
final_result.show = self._combine_results(file_name_result, dir_name_result, 'show')
final_result.quality = self._combine_results(file_name_result, dir_name_result, 'quality')
final_result.extra_info_no_name = self._extra_info_no_name(final_result.extra_info, final_result.show,
final_result.season_number,
final_result.episode_numbers)
if not final_result.show:
if self.testing:
pass
@ -557,7 +594,8 @@ class ParseResult(object):
show=None,
score=None,
quality=None,
version=None):
version=None,
extra_info_no_name=None):
self.original_name = original_name
@ -579,6 +617,7 @@ class ParseResult(object):
self.quality = quality
self.extra_info = extra_info
self.extra_info_no_name = extra_info_no_name
self.release_group = release_group
self.air_date = air_date

View file

@ -18,7 +18,6 @@ import datetime
import re
import sickbeard
import config
from lib.six import moves
from base64 import standard_b64encode
from common import Quality
@ -64,7 +63,7 @@ def test_nzbget(host, use_https, username, password):
return result, msg, rpc_client
def send_nzb(nzb, proper=False):
def send_nzb(nzb):
result = False
add_to_top = False
nzbget_prio = 0
@ -80,7 +79,7 @@ def send_nzb(nzb, proper=False):
# if it aired recently make it high priority and generate DupeKey/Score
for curEp in nzb.episodes:
if '' == dupekey:
dupekey = "SickGear-%s%s" % (
dupekey = 'SickGear-%s%s' % (
sickbeard.indexerApi(curEp.show.indexer).config.get('dupekey', ''), curEp.show.indexerid)
dupekey += '-%s.%s' % (curEp.season, curEp.episode)
@ -90,8 +89,8 @@ def send_nzb(nzb, proper=False):
if Quality.UNKNOWN != nzb.quality:
dupescore = nzb.quality * 100
if proper:
dupescore += 10
dupescore += (0, 9 + nzb.properlevel)[0 < nzb.properlevel]
nzbcontent64 = None
if 'nzbdata' == nzb.resultType:
@ -155,7 +154,7 @@ def send_nzb(nzb, proper=False):
result = True
else:
logger.log(u'NZBget could not add %s to the queue' % ('%s.nzb' % nzb.name), logger.ERROR)
except:
except(StandardError, Exception):
logger.log(u'Connect Error to NZBget: could not add %s to the queue' % ('%s.nzb' % nzb.name), logger.ERROR)
return result

View file

@ -504,8 +504,9 @@ class PostProcessor(object):
self.release_group = parse_result.release_group
# remember whether it's a proper
if parse_result.extra_info:
self.is_proper = None is not re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I)
if parse_result.extra_info_no_name:
self.is_proper = 0 < common.Quality.get_proper_level(parse_result.extra_info_no_name, parse_result.version,
parse_result.is_anime)
# if the result is complete then set release name
if parse_result.series_name and\
@ -775,10 +776,27 @@ class PostProcessor(object):
# if there's an existing downloaded file with same quality, check filesize to decide
if new_ep_quality == old_ep_quality:
if (isinstance(self.nzb_name, basestring) and re.search(r'\bproper|repack\b', self.nzb_name, re.I)) or \
(isinstance(self.file_name, basestring) and re.search(r'\bproper|repack\b', self.file_name, re.I)):
self._log(u'Proper or repack with same quality, marking it safe to replace', logger.DEBUG)
return True
np = NameParser(showObj=self.showObj)
cur_proper_level = 0
try:
pr = np.parse(ep_obj.release_name)
cur_proper_level = common.Quality.get_proper_level(pr.extra_info_no_name, pr.version, pr.is_anime)
except (StandardError, Exception):
pass
new_name = (('', self.file_name)[isinstance(self.file_name, basestring)], self.nzb_name)[isinstance(
self.nzb_name, basestring)]
if new_name:
try:
npr = np.parse(new_name)
except (StandardError, Exception):
npr = None
if npr:
is_repack, new_proper_level = common.Quality.get_proper_level(npr.extra_info_no_name, npr.version,
npr.is_anime, check_is_repack=True)
if new_proper_level > cur_proper_level and \
(not is_repack or npr.release_group == ep_obj.release_group):
self._log(u'Proper or repack with same quality, marking it safe to replace', logger.DEBUG)
return True
self._log(u'An episode exists in the database with the same quality as the episode to process', logger.DEBUG)
@ -1049,7 +1067,7 @@ class PostProcessor(object):
ep_obj.createMetaFiles()
# log it to history
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, anime_version)
history.log_download(ep_obj, self.file_path, new_ep_quality, self.release_group, anime_version)
# send notifications
notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))

View file

@ -645,7 +645,7 @@ class ProcessTVShow(object):
'media_pattern': re.compile('|'.join([
r'\.s\d{2}e\d{2}\.', r'\.(?:36|72|216)0p\.', r'\.(?:480|576|1080)[pi]\.', r'\.[xh]26[45]\b',
r'\.bluray\.', r'\.[hp]dtv\.', r'\.web(?:[.-]?dl)?\.', r'\.(?:vhs|vod|dvd|web|bd|br).?rip\.',
r'\.dvdr\b', r'\.(?:stv|vcd)\.', r'\bhd(?:cam|rip)\b', r'\.(?:internal|proper|repack|screener)\.',
r'\.dvdr\b', r'\.(?:stv|vcd)\.', r'\bhd(?:cam|rip)\b', r'\.(?:internal|real|proper|repack|screener)\.',
r'\b(?:aac|ac3|mp3)\b', r'\.(?:ntsc|pal|secam)\.', r'\.r5\.', r'\bscr\b', r'\b(?:divx|xvid)\b'
]), flags=re.IGNORECASE)
}

View file

@ -27,8 +27,10 @@ import sickbeard
from sickbeard import db, exceptions, helpers, history, logger, search, show_name_helpers
from sickbeard import encodingKludge as ek
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality, ARCHIVED, SNATCHED_BEST
from sickbeard.exceptions import ex
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality, ARCHIVED, SNATCHED_BEST, FAILED
from sickbeard.exceptions import ex, MultipleShowObjectsException
from sickbeard import failed_history
from sickbeard.history import dateFormat
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
@ -70,13 +72,56 @@ def search_propers():
logger.log(u'Completed the search for new propers%s' % run_at)
def get_old_proper_level(showObj, indexer, indexerid, season, episodes, old_status, new_quality,
extra_no_name, version, is_anime=False):
level = 0
is_internal = False
codec = ''
if old_status not in (SNATCHED, SNATCHED_BEST, SNATCHED_PROPER):
level = Quality.get_proper_level(extra_no_name, version, is_anime)
elif showObj:
myDB = db.DBConnection()
np = NameParser(False, showObj=showObj)
for episode in episodes:
result = myDB.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND '
'(' + ' OR '.join("action LIKE '%%%02d'" %
x for x in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST)) + ') '
'ORDER BY date DESC LIMIT 1',
[indexerid, season, episode])
if not result or not isinstance(result[0]['resource'], basestring) or not result[0]['resource']:
continue
nq = Quality.sceneQuality(result[0]['resource'], showObj.is_anime)
if nq != new_quality:
continue
try:
p = np.parse(result[0]['resource'])
except (StandardError, Exception):
continue
level = Quality.get_proper_level(p.extra_info_no_name, p.version, showObj.is_anime)
is_internal = p.extra_info_no_name and re.search(r'\binternal\b', p.extra_info_no_name, flags=re.I)
codec = _get_codec(p.extra_info_no_name)
break
return level, is_internal, codec
def _get_codec(extra_info_no_name):
if not extra_info_no_name:
return ''
if re.search(r'\b[xh]264\b', extra_info_no_name, flags=re.I):
return '264'
elif re.search(r'\bxvid\b', extra_info_no_name, flags=re.I):
return 'xvid'
elif re.search(r'\b[xh]265|hevc\b', extra_info_no_name, flags=re.I):
return 'hevc'
return ''
def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
propers = {}
# for each provider get a list of the
orig_thread_name = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
np = NameParser(False, try_scene_exceptions=True, indexer_lookup=False)
for cur_provider in providers:
if not recent_anime and cur_provider.anime_only:
continue
@ -106,10 +151,20 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
np = NameParser(False, try_scene_exceptions=True, showObj=x.parsed_show, indexer_lookup=False)
parse_result = np.parse(x.name)
if parse_result.series_name and parse_result.episode_numbers and \
parse_result.show.indexerid in recent_shows + recent_anime:
(parse_result.show.indexer, parse_result.show.indexerid) in recent_shows + recent_anime:
cur_size = getattr(x, 'size', None)
if failed_history.has_failed(x.name, cur_size, cur_provider.name):
continue
logger.log(u'Found new proper: ' + x.name, logger.DEBUG)
x.show = parse_result.show.indexerid
x.provider = cur_provider
x.is_repack, x.properlevel = Quality.get_proper_level(parse_result.extra_info_no_name,
parse_result.version,
parse_result.is_anime,
check_is_repack=True)
x.is_internal = parse_result.extra_info_no_name and \
re.search(r'\binternal\b', parse_result.extra_info_no_name, flags=re.I)
x.codec = _get_codec(parse_result.extra_info_no_name)
propers[name] = x
count += 1
except (InvalidNameException, InvalidShowException):
@ -120,12 +175,16 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
cur_provider.log_result('Propers', count, '%s' % cur_provider.name)
# take the list of unique propers and get it sorted by
sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
verified_propers = []
sorted_propers = sorted(propers.values(), key=operator.attrgetter('properlevel', 'date'), reverse=True)
verified_propers = set()
for cur_proper in sorted_propers:
parse_result = np.parse(cur_proper.name)
np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False)
try:
parse_result = np.parse(cur_proper.name)
except (StandardError, Exception):
continue
# set the indexerid in the db to the show's indexerid
cur_proper.indexerid = parse_result.show.indexerid
@ -138,7 +197,10 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
cur_proper.episode = parse_result.episode_numbers[0]
cur_proper.release_group = parse_result.release_group
cur_proper.version = parse_result.version
cur_proper.extra_info = parse_result.extra_info
cur_proper.extra_info_no_name = parse_result.extra_info_no_name
cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
cur_proper.is_anime = parse_result.is_anime
# only get anime proper if it has release group and version
if parse_result.is_anime:
@ -166,25 +228,56 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
# check if we actually want this proper (if it's the right quality)
my_db = db.DBConnection()
sql_results = my_db.select(
'SELECT release_group, status, version, release_name FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
[cur_proper.indexerid, cur_proper.season, cur_proper.episode])
'SELECT release_group, status, version, release_name FROM tv_episodes WHERE showid = ? AND indexer = ? ' +
'AND season = ? AND episode = ?',
[cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode])
if not sql_results:
continue
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
# don't take proper of the same level we already downloaded
old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status']))
if old_status not in (DOWNLOADED, SNATCHED, SNATCHED_BEST, ARCHIVED) \
or cur_proper.quality != old_quality:
continue
cur_proper.is_repack, cur_proper.proper_level = Quality.get_proper_level(cur_proper.extra_info_no_name,
cur_proper.version,
cur_proper.is_anime,
check_is_repack=True)
old_release_group = sql_results[0]['release_group']
# check if we want this release: same quality as current, current has correct status
# restrict other release group releases to proper's
if old_status not in (DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, ARCHIVED) \
or cur_proper.quality != old_quality \
or (cur_proper.is_repack and cur_proper.release_group != old_release_group):
continue
np = NameParser(False, try_scene_exceptions=True, showObj=parse_result.show, indexer_lookup=False)
try:
extra_info = np.parse(sql_results[0]['release_name']).extra_info_no_name
except (StandardError, Exception):
extra_info = None
old_proper_level, old_is_internal, old_codec = get_old_proper_level(parse_result.show, cur_proper.indexer,
cur_proper.indexerid, cur_proper.season,
parse_result.episode_numbers, old_status,
cur_proper.quality, extra_info,
cur_proper.version, cur_proper.is_anime)
if cur_proper.proper_level < old_proper_level:
continue
elif cur_proper.proper_level == old_proper_level:
if '264' == cur_proper.codec and 'xvid' == old_codec:
pass
elif old_is_internal and not cur_proper.is_internal:
pass
else:
continue
log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\
% (cur_proper.release_group, old_release_group, cur_proper.name)
# for webldls, prevent propers from different groups
if sickbeard.PROPERS_WEBDL_ONEGRP and \
(old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) and \
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) and \
cur_proper.release_group != old_release_group:
logger.log(log_same_grp, logger.DEBUG)
continue
@ -204,12 +297,24 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
# if the show is in our list and there hasn't been a proper already added for that particular episode
# then add it to our list of propers
if cur_proper.indexerid != -1 and (cur_proper.indexerid, cur_proper.season, cur_proper.episode) not in map(
operator.attrgetter('indexerid', 'season', 'episode'), verified_propers):
logger.log(u'Found a proper that may be useful: %s' % cur_proper.name)
verified_propers.append(cur_proper)
if cur_proper.indexerid != -1:
if (cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode) not in map(
operator.attrgetter('indexerid', 'indexer', 'season', 'episode'), verified_propers):
logger.log(u'Found a proper that may be useful: %s' % cur_proper.name)
verified_propers.add(cur_proper)
else:
rp = set()
for vp in verified_propers:
if vp.indexer == cur_proper.indexer and vp.indexerid == cur_proper.indexerid and \
vp.season == cur_proper.season and vp.episode == cur_proper.episode and \
vp.proper_level < cur_proper.proper_level:
rp.add(vp)
if rp:
verified_propers = verified_propers - rp
logger.log(u'Found a proper that may be useful: %s' % cur_proper.name)
verified_propers.add(cur_proper)
return verified_propers
return list(verified_propers)
def _download_propers(proper_list):
@ -264,6 +369,9 @@ def _download_propers(proper_list):
result.name = cur_proper.name
result.quality = cur_proper.quality
result.version = cur_proper.version
result.properlevel = cur_proper.proper_level
result.is_repack = cur_proper.is_repack
result.puid = cur_proper.puid
# snatch it
search.snatch_episode(result, SNATCHED_PROPER)
@ -273,24 +381,29 @@ def _recent_history(aired_since_shows, aired_since_anime):
recent_shows, recent_anime = [], []
aired_since_shows = aired_since_shows.toordinal()
aired_since_anime = aired_since_anime.toordinal()
my_db = db.DBConnection()
sql_results = my_db.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
'SELECT DISTINCT s.indexer, s.indexer_id FROM history as h' +
' INNER JOIN tv_episodes AS e ON (h.showid == e.showid AND h.season == e.season AND h.episode == e.episode)' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= %s' % min(aired_since_shows, aired_since_anime) +
' AND (e.status IN (%s))' % ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED])
' WHERE h.date >= %s' % min(aired_since_shows, aired_since_anime).strftime(dateFormat) +
' AND (%s)' % ' OR '.join(['h.action LIKE "%%%02d"' % x for x in (DOWNLOADED, SNATCHED, SNATCHED_PROPER,
SNATCHED_BEST, FAILED)])
)
for sqlshow in sql_results:
show = helpers.findCertainShow(sickbeard.showList, sqlshow['showid'])
try:
show = helpers.find_show_by_id(sickbeard.showList, {int(sqlshow['indexer']): int(sqlshow['indexer_id'])})
except MultipleShowObjectsException:
continue
if show:
if sqlshow['airdate'] >= aired_since_shows and not show.is_anime:
sqlshow['showid'] not in recent_shows and recent_shows.append(sqlshow['showid'])
if not show.is_anime:
(sqlshow['indexer'], sqlshow['indexer_id']) not in recent_shows and \
recent_shows.append((sqlshow['indexer'], sqlshow['indexer_id']))
else:
sqlshow['showid'] not in recent_anime and show.is_anime and recent_anime.append(sqlshow['showid'])
(sqlshow['indexer'], sqlshow['indexer_id']) not in recent_anime and show.is_anime and \
recent_anime.append((sqlshow['indexer'], sqlshow['indexer_id']))
return recent_shows, recent_anime

View file

@ -418,6 +418,9 @@ class GenericProvider:
def get_show(self, item, **kwargs):
return None
def get_size_uid(self, item, **kwargs):
return -1, None
def find_search_results(self, show, episodes, search_mode, manual_search=False, **kwargs):
self._check_auth()
@ -585,6 +588,10 @@ class GenericProvider:
result.release_group = release_group
result.content = None
result.version = version
result.size, result.puid = self.get_size_uid(item, **kwargs)
result.is_repack, result.properlevel = Quality.get_proper_level(parse_result.extra_info_no_name,
parse_result.version, show_obj.is_anime,
check_is_repack=True)
if 1 == len(ep_obj):
ep_num = ep_obj[0].episode
@ -754,8 +761,8 @@ class NZBProvider(object, GenericProvider):
search_terms = []
regex = []
if shows:
search_terms += ['.proper.', '.repack.']
regex += ['proper|repack']
search_terms += ['.proper.', '.repack.', '.real.']
regex += ['proper|repack', Quality.real_check]
proper_check = re.compile(r'(?i)(\b%s\b)' % '|'.join(regex))
if anime:
terms = 'v1|v2|v3|v4|v5'
@ -1146,10 +1153,10 @@ class TorrentProvider(object, GenericProvider):
"""
results = []
search_terms = getattr(self, 'proper_search_terms', ['proper', 'repack'])
search_terms = getattr(self, 'proper_search_terms', ['proper', 'repack', 'real'])
if not isinstance(search_terms, list):
if None is search_terms:
search_terms = 'proper|repack'
search_terms = 'proper|repack|real'
search_terms = [search_terms]
items = self._search_provider({'Propers': search_terms})

View file

@ -28,7 +28,7 @@ from math import ceil
from sickbeard.sbdatetime import sbdatetime
from . import generic
from sickbeard import helpers, logger, scene_exceptions, tvcache, classes, db
from sickbeard.common import neededQualities
from sickbeard.common import neededQualities, Quality
from sickbeard.exceptions import AuthException, MultipleShowObjectsException
from sickbeard.indexers.indexer_config import *
from io import BytesIO
@ -453,6 +453,13 @@ class NewznabProvider(generic.NZBProvider):
return title, url
def get_size_uid(self, item, **kwargs):
size = -1
uid = None
if 'name_space' in kwargs and 'newznab' in kwargs['name_space']:
size, uid = self._parse_size_uid(item, kwargs['name_space'])
return size, uid
def get_show(self, item, **kwargs):
show_obj = None
if 'name_space' in kwargs and 'newznab' in kwargs['name_space']:
@ -575,6 +582,21 @@ class NewznabProvider(generic.NZBProvider):
return parsed_date
@staticmethod
def _parse_size_uid(item, ns, default=-1):
parsed_size = default
uid = None
try:
if ns and 'newznab' in ns:
for attr in item.findall('%sattr' % ns['newznab']):
if 'size' == attr.get('name', ''):
parsed_size = helpers.tryInt(attr.get('value'), -1)
elif 'guid' == attr.get('name', ''):
uid = attr.get('value')
except (StandardError, Exception):
pass
return parsed_size, uid
def _search_provider(self, search_params, needed=neededQualities(need_all=True), max_items=400,
try_all_searches=False, **kwargs):
@ -751,8 +773,8 @@ class NewznabProvider(generic.NZBProvider):
search_terms = []
regex = []
if shows:
search_terms += ['.proper.', '.repack.']
regex += ['proper|repack']
search_terms += ['.proper.', '.repack.', '.real.']
regex += ['proper|repack', Quality.real_check]
proper_check = re.compile(r'(?i)(\b%s\b)' % '|'.join(regex))
if anime:
terms = 'v1|v2|v3|v4|v5'
@ -789,9 +811,11 @@ class NewznabProvider(generic.NZBProvider):
logger.log(u'Unable to figure out the date for entry %s, skipping it' % title)
continue
result_size, result_uid = self._parse_size_uid(item, ns=n_space)
if not search_date or search_date < result_date:
show_obj = self.get_show(item, name_space=n_space)
search_result = classes.Proper(title, url, result_date, self.show, parsed_show=show_obj)
search_result = classes.Proper(title, url, result_date, self.show, parsed_show=show_obj,
size=result_size, puid=result_uid)
results.append(search_result)
time.sleep(0.5)

View file

@ -239,7 +239,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
def find_propers(self, **kwargs):
search_terms = ['.PROPER.', '.REPACK.']
search_terms = ['.PROPER.', '.REPACK.', '.REAL.']
results = []
for term in search_terms:

View file

@ -113,7 +113,7 @@ def snatch_episode(result, end_status=SNATCHED):
for cur_ep in result.episodes:
if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7):
result.priority = 1
if None is not re.search('(^|[. _-])(proper|repack)([. _-]|$)', result.name, re.I):
if 0 < result.properlevel:
end_status = SNATCHED_PROPER
# NZBs can be sent straight to SAB or saved to disk
@ -123,8 +123,7 @@ def snatch_episode(result, end_status=SNATCHED):
elif 'sabnzbd' == sickbeard.NZB_METHOD:
dl_result = sab.send_nzb(result)
elif 'nzbget' == sickbeard.NZB_METHOD:
is_proper = True if SNATCHED_PROPER == end_status else False
dl_result = nzbget.send_nzb(result, is_proper)
dl_result = nzbget.send_nzb(result)
else:
logger.log(u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR)
dl_result = False
@ -159,7 +158,7 @@ def snatch_episode(result, end_status=SNATCHED):
ui.notifications.message(u'Episode snatched', result.name)
history.logSnatch(result)
history.log_snatch(result)
# don't notify when we re-download an episode
sql_l = []
@ -230,14 +229,15 @@ def pick_best_result(results, show, quality_list=None):
best_result = cur_result
elif best_result.quality == cur_result.quality:
if re.search('(?i)(proper|repack)', cur_result.name) or \
show.is_anime and re.search('(?i)(v1|v2|v3|v4|v5)', cur_result.name):
best_result = cur_result
elif 'internal' in best_result.name.lower() and 'internal' not in cur_result.name.lower():
best_result = cur_result
elif 'xvid' in best_result.name.lower() and 'x264' in cur_result.name.lower():
logger.log(u'Preferring (x264 over xvid) [%s]' % cur_result.name)
if cur_result.properlevel > best_result.properlevel and \
(not cur_result.is_repack or cur_result.release_group == best_result.release_group):
best_result = cur_result
elif cur_result.properlevel == best_result.properlevel:
if 'xvid' in best_result.name.lower() and 'x264' in cur_result.name.lower():
logger.log(u'Preferring (x264 over xvid) [%s]' % cur_result.name)
best_result = cur_result
elif 'internal' in best_result.name.lower() and 'internal' not in cur_result.name.lower():
best_result = cur_result
if best_result:
logger.log(u'Picked as the best [%s]' % best_result.name, logger.DEBUG)

View file

@ -465,7 +465,7 @@ class FailedQueueItem(generic_queue.QueueItem):
failed_history.revert_episode(ep_obj)
if release:
failed_history.add_failed(release)
history.logFailed(ep_obj, release, provider)
history.log_failed(ep_obj, release, provider)
logger.log(u'Beginning failed download search for: [%s]' % ep_obj.prettyName())

View file

@ -311,37 +311,43 @@ class TVShow(object):
self.episodes[curSeason][curEp] = None
del myEp
def getAllEpisodes(self, season=None, has_location=False):
def getAllEpisodes(self, season=None, has_location=False, check_related_eps=True):
sql_selection = 'SELECT season, episode, '
sql_selection = 'SELECT season, episode'
# subselection to detect multi-episodes early, share_location > 0
sql_selection = sql_selection + ' (SELECT COUNT (*) FROM tv_episodes WHERE showid = tve.showid AND season = tve.season AND location != "" AND location = tve.location AND episode != tve.episode) AS share_location '
if check_related_eps:
# subselection to detect multi-episodes early, share_location > 0
sql_selection += ' , (SELECT COUNT (*) FROM tv_episodes WHERE showid = tve.showid AND season = ' \
'tve.season AND location != "" AND location = tve.location AND episode != tve.episode) ' \
'AS share_location '
sql_selection = sql_selection + ' FROM tv_episodes tve WHERE showid = ' + str(self.indexerid)
sql_selection += ' FROM tv_episodes tve WHERE indexer = ? AND showid = ?'
sql_parameter = [self.indexer, self.indexerid]
if season is not None:
sql_selection = sql_selection + ' AND season = ' + str(season)
sql_selection += ' AND season = ?'
sql_parameter += [season]
if has_location:
sql_selection = sql_selection + ' AND location != "" '
sql_selection += ' AND location != "" '
# need ORDER episode ASC to rename multi-episodes in order S01E01-02
sql_selection = sql_selection + ' ORDER BY season ASC, episode ASC'
sql_selection += ' ORDER BY season ASC, episode ASC'
myDB = db.DBConnection()
results = myDB.select(sql_selection)
results = myDB.select(sql_selection, sql_parameter)
ep_list = []
for cur_result in results:
cur_ep = self.getEpisode(int(cur_result['season']), int(cur_result['episode']))
if cur_ep:
cur_ep.relatedEps = []
if cur_ep.location:
if check_related_eps and cur_ep.location:
# if there is a location, check if it's a multi-episode (share_location > 0) and put them in relatedEps
if cur_result['share_location'] > 0:
related_eps_result = myDB.select(
'SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND location = ? AND episode != ? ORDER BY episode ASC',
'SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND location = ? AND '
'episode != ? ORDER BY episode ASC',
[self.indexerid, cur_ep.season, cur_ep.location, cur_ep.episode])
for cur_related_ep in related_eps_result:
related_ep = self.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"]))
@ -351,7 +357,6 @@ class TVShow(object):
return ep_list
def getEpisode(self, season=None, episode=None, file=None, noCreate=False, absolute_number=None, forceUpdate=False):
# if we get an anime get the real season and episode
@ -1690,7 +1695,7 @@ class TVEpisode(object):
if sickbeard.SUBTITLES_HISTORY:
for video in subtitles:
for subtitle in subtitles.get(video):
history.logSubtitle(self.show.indexerid, self.season, self.episode, self.status, subtitle)
history.log_subtitle(self.show.indexerid, self.season, self.episode, self.status, subtitle)
return subtitles

View file

@ -258,7 +258,8 @@ class TVCache:
def listPropers(self, date=None):
myDB = self.get_db()
sql = "SELECT * FROM provider_cache WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%' AND provider = ?"
sql = "SELECT * FROM provider_cache WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%' " \
"OR name LIKE '%.REAL.%' AND provider = ?"
if date:
sql += ' AND time >= ' + str(int(time.mktime(date.timetuple())))

View file

@ -5,6 +5,7 @@ import os.path
sys.path.insert(1, os.path.abspath('..'))
from sickbeard import common
from sickbeard.name_parser.parser import NameParser
class QualityTests(unittest.TestCase):
@ -14,6 +15,13 @@ class QualityTests(unittest.TestCase):
second = common.Quality.nameQuality(fn)
self.assertEqual(quality, second, 'fail %s != %s for case: %s' % (quality, second, fn))
def check_proper_level(self, cases, is_anime=False):
np = NameParser(False, indexer_lookup=False, try_scene_exceptions=False, testing=True)
for case, level in cases:
p = np.parse(case)
second = common.Quality.get_proper_level(p.extra_info_no_name, p.version, is_anime)
self.assertEqual(level, second, 'fail %s != %s for case: %s' % (level, second, case))
# TODO: repack / proper ? air-by-date ? season rip? multi-ep?
def test_SDTV(self):
@ -159,6 +167,39 @@ class QualityTests(unittest.TestCase):
self.check_quality_names(common.Quality.FULLHDBLURAY, ['Test Show - S01E02 - 1080p BluRay - GROUP'])
self.check_quality_names(common.Quality.UNKNOWN, ['Test Show - S01E02 - Unknown - SiCKGEAR'])
def test_get_proper_level(self):
# release_name, expected level
self.check_proper_level([
('Test.Show.S01E13.PROPER.REPACK.720p.HDTV.x264-GROUP', 2),
('Test.Show.S01E13.720p.WEBRip.AAC2.0.x264-GROUP', 0),
('Test.Show.S01E13.PROPER.720p.HDTV.x264-GROUP', 1),
('Test.Show.S03E09-E10.REAL.PROPER.720p.HDTV.x264-GROUP', 2),
('Test.Show.S01E07.REAL.PROPER.1080p.WEB.x264-GROUP', 2),
('Test.Show.S13E20.REAL.REPACK.720p.HDTV.x264-GROUP', 2),
('Test.Show.S02E04.REAL.HDTV.x264-GROUP', 1),
('Test.Show.S01E10.Episode.Name.HDTV.x264-GROUP', 0),
('Test.Show.S12E10.1080p.WEB.x264-GROUP', 0),
('Test.Show.S03E01.Real.720p.WEB-DL.DD5.1.H.264-GROUP', 1),
('Test.Show.S04E06.REAL.PROPER.RERIP.720p.WEBRip.X264-GROUP', 2),
('Test.Show.S01E09.REPACK.REAL.PROPER.HDTV.XviD-GROUP.[SOMETHING].GROUP', 3),
('Test.Show.S01E13.REPACK.REAL.PROPER.720p.HDTV.x264-GROUP', 3),
('Test.Show.S01E06.The.Episode.Name.PROPER.480p.BluRay.x264-GROUP', 1),
('Test.Show.S01E19.PROPER.1080p.BluRay.x264-GROUP', 1),
('Test.Show.S01E03.REAL.PROPER.720p.BluRay.x264-GROUP', 2),
('Test.Show.S03E09.Episode.Name.720p.HDTV.x264-GROUP', 0),
('Test.Show.S02E07.PROPER.HDTV.x264-GROUP', 1),
('Test.Show.S02E12.REAL.REPACK.DSR.XviD-GROUP', 2),
('Test.Show Part2.REAL.AC3.WS DVDRip XviD-GROUP', 1),
('Test.Show.S01E02.Some.episode.title.REAL.READ.NFO.DVDRip.XviD-GROUP', 1)
])
# TODO: add anime test cases
def test_get_proper_level_anime(self):
# release_name, expected level
self.check_proper_level([
], is_anime=True)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(QualityTests)
unittest.TextTestRunner(verbosity=2).run(suite)