mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-07 02:23:38 +00:00
Merge branch 'origin/dev'
This commit is contained in:
commit
887107284e
2 changed files with 16 additions and 18 deletions
|
@ -104,6 +104,8 @@ def hasFailed(release, size, provider="%"):
|
||||||
is found with any provider.
|
is found with any provider.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
release = prepareFailedName(release)
|
||||||
|
|
||||||
myDB = db.DBConnection("failed.db")
|
myDB = db.DBConnection("failed.db")
|
||||||
sql_results = myDB.select(
|
sql_results = myDB.select(
|
||||||
"SELECT * FROM failed WHERE release=? AND size=? AND provider LIKE ?",
|
"SELECT * FROM failed WHERE release=? AND size=? AND provider LIKE ?",
|
||||||
|
@ -136,9 +138,6 @@ def revertEpisode(epObj):
|
||||||
except EpisodeNotFoundException, e:
|
except EpisodeNotFoundException, e:
|
||||||
logger.log(u"Unable to create episode, please set its status manually: " + ex(e),
|
logger.log(u"Unable to create episode, please set its status manually: " + ex(e),
|
||||||
logger.WARNING)
|
logger.WARNING)
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def markFailed(epObj):
|
def markFailed(epObj):
|
||||||
log_str = u""
|
log_str = u""
|
||||||
|
@ -161,7 +160,6 @@ def logSnatch(searchResult):
|
||||||
logDate = datetime.datetime.today().strftime(dateFormat)
|
logDate = datetime.datetime.today().strftime(dateFormat)
|
||||||
release = prepareFailedName(searchResult.name)
|
release = prepareFailedName(searchResult.name)
|
||||||
|
|
||||||
|
|
||||||
providerClass = searchResult.provider
|
providerClass = searchResult.provider
|
||||||
if providerClass is not None:
|
if providerClass is not None:
|
||||||
provider = providerClass.name
|
provider = providerClass.name
|
||||||
|
@ -192,7 +190,6 @@ def trimHistory():
|
||||||
myDB.action("DELETE FROM history WHERE date < " + str(
|
myDB.action("DELETE FROM history WHERE date < " + str(
|
||||||
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
|
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
|
||||||
|
|
||||||
|
|
||||||
def findRelease(epObj):
|
def findRelease(epObj):
|
||||||
"""
|
"""
|
||||||
Find releases in history by show ID and season.
|
Find releases in history by show ID and season.
|
||||||
|
|
|
@ -515,22 +515,23 @@ def searchProviders(queueItem, show, season, episodes, seasonSearch=False, manua
|
||||||
continue
|
continue
|
||||||
|
|
||||||
result = pickBestResult(foundResults[provider.name][curEp], show)
|
result = pickBestResult(foundResults[provider.name][curEp], show)
|
||||||
finalResults.append(result)
|
if result:
|
||||||
|
finalResults.append(result)
|
||||||
|
|
||||||
logger.log(u"Checking if we should snatch " + result.name, logger.DEBUG)
|
logger.log(u"Checking if we should snatch " + result.name, logger.DEBUG)
|
||||||
any_qualities, best_qualities = Quality.splitQuality(show.quality)
|
any_qualities, best_qualities = Quality.splitQuality(show.quality)
|
||||||
|
|
||||||
# if there is a redownload that's higher than this then we definitely need to keep looking
|
# if there is a redownload that's higher than this then we definitely need to keep looking
|
||||||
if best_qualities and result.quality == max(best_qualities):
|
if best_qualities and result.quality == max(best_qualities):
|
||||||
logger.log(u"Found a highest quality archive match to snatch [" + result.name + "]", logger.DEBUG)
|
logger.log(u"Found a highest quality archive match to snatch [" + result.name + "]", logger.DEBUG)
|
||||||
queueItem.results = [result]
|
queueItem.results = [result]
|
||||||
return queueItem
|
return queueItem
|
||||||
|
|
||||||
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
||||||
elif any_qualities and result.quality in any_qualities:
|
elif any_qualities and result.quality in any_qualities:
|
||||||
logger.log(u"Found a initial quality match to snatch [" + result.name + "]", logger.DEBUG)
|
logger.log(u"Found a initial quality match to snatch [" + result.name + "]", logger.DEBUG)
|
||||||
queueItem.results = [result]
|
queueItem.results = [result]
|
||||||
return queueItem
|
return queueItem
|
||||||
|
|
||||||
# remove duplicates and insures snatch of highest quality from results
|
# remove duplicates and insures snatch of highest quality from results
|
||||||
for i1, result1 in enumerate(finalResults):
|
for i1, result1 in enumerate(finalResults):
|
||||||
|
|
Loading…
Reference in a new issue