mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-07 10:33:38 +00:00
Change to improve the integrity of the already post processed video checker.
This reduces the likelihood where new files are skipped even though a video does not exist in the target location.
This commit is contained in:
parent
76ad144745
commit
e92ac49cea
2 changed files with 14 additions and 0 deletions
|
@ -9,6 +9,7 @@
|
||||||
* Change startup code cleanup and PEP8
|
* Change startup code cleanup and PEP8
|
||||||
* Change authentication credentials to display more securely on config pages
|
* Change authentication credentials to display more securely on config pages
|
||||||
* Add a "Use as default home page" selector to General Config/Interface/User Interface
|
* Add a "Use as default home page" selector to General Config/Interface/User Interface
|
||||||
|
* Change to improve the integrity of the already post processed video checker
|
||||||
* Add Kodi notifier and metadata
|
* Add Kodi notifier and metadata
|
||||||
* Add priority, device, and sound support to Pushover notifier (port from midgetspy/sickbeard)
|
* Add priority, device, and sound support to Pushover notifier (port from midgetspy/sickbeard)
|
||||||
* Fix updating of pull requests
|
* Fix updating of pull requests
|
||||||
|
|
|
@ -380,6 +380,19 @@ def already_postprocessed(dirName, videofile, force):
|
||||||
#Needed if we have downloaded the same episode @ different quality
|
#Needed if we have downloaded the same episode @ different quality
|
||||||
search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid"
|
search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid"
|
||||||
search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode"
|
search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode"
|
||||||
|
|
||||||
|
np = NameParser(dirName, tryIndexers=True, convert=True)
|
||||||
|
try:
|
||||||
|
parse_result = np.parse(dirName)
|
||||||
|
except:
|
||||||
|
parse_result = False
|
||||||
|
pass
|
||||||
|
|
||||||
|
if parse_result and (parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number):
|
||||||
|
search_sql += " and tv_episodes.showid = '" + str(parse_result.show.indexerid)\
|
||||||
|
+ "' and tv_episodes.season = '" + str(parse_result.season_number)\
|
||||||
|
+ "' and tv_episodes.episode = '" + str(parse_result.episode_numbers[0]) + "'"
|
||||||
|
|
||||||
search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in common.Quality.DOWNLOADED]) + ")"
|
search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in common.Quality.DOWNLOADED]) + ")"
|
||||||
search_sql += " and history.resource LIKE ?"
|
search_sql += " and history.resource LIKE ?"
|
||||||
sqlResult = myDB.select(search_sql, [u'%' + videofile])
|
sqlResult = myDB.select(search_sql, [u'%' + videofile])
|
||||||
|
|
Loading…
Reference in a new issue