Fixed issue with tvcache filling with duplicate data.

This commit is contained in:
echel0n 2014-06-26 03:20:07 -07:00
parent 9d8f695e5a
commit 9f8c49ce88
3 changed files with 8 additions and 6 deletions

View file

@ -130,12 +130,8 @@ class DBConnection(object):
sqlResult = []
attempt = 0
# Transaction
#self.execute('BEGIN')
while attempt < 5:
try:
for qu in querylist:
if len(qu) == 1:
if logTransaction:

View file

@ -76,7 +76,7 @@ normal_regexes = {'normal':[
(?P<ep_num>\d+) # 02 and separator
(([. _-]*x|-) # linking x/- char
(?P<extra_ep_num>
(?!(1080|720|480)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps
(?!(1080|720|480)[pi])(?!(?<=[hx])264) # ignore obviously wrong multi-eps
\d+))* # additional x03/etc
[\]. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group

View file

@ -67,6 +67,12 @@ class CacheDBConnection(db.DBConnection):
raise
# Create unique index for provider table to prevent duplicate entries
try:
self.action("CREATE UNIQUE INDEX IF NOT EXISTS idx_url ON " + providerName + " (url)")
except Exception, e:
raise
class TVCache():
def __init__(self, provider):
@ -306,7 +312,7 @@ class TVCache():
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
return [
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
"INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality]]