Merge remote-tracking branch 'origin/dev'

This commit is contained in:
echel0n 2014-06-26 17:17:11 -07:00
commit 335cc55380
6 changed files with 31 additions and 9 deletions

View file

@ -27,6 +27,8 @@ from sickbeard import db
from sickbeard import common from sickbeard import common
from sickbeard import helpers from sickbeard import helpers
from sickbeard import exceptions from sickbeard import exceptions
from sickbeard.exceptions import ex
class DailySearcher(): class DailySearcher():
def __init__(self): def __init__(self):
@ -38,6 +40,23 @@ class DailySearcher():
self.amActive = True self.amActive = True
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and not x.backlog_only]
for curProviderCount, curProvider in enumerate(providers):
try:
logger.log(u"Updating [" + curProvider.name + "} RSS cache ...")
curProvider.cache.updateCache()
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
if curProviderCount != len(providers):
continue
break
except Exception, e:
logger.log(u"Error while updating cache for " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
if curProviderCount != len(providers):
continue
break
logger.log(u"Searching for coming episodes and 1 weeks worth of previously WANTED episodes ...") logger.log(u"Searching for coming episodes and 1 weeks worth of previously WANTED episodes ...")
fromDate = datetime.date.today() - datetime.timedelta(weeks=1) fromDate = datetime.date.today() - datetime.timedelta(weeks=1)

View file

@ -130,12 +130,8 @@ class DBConnection(object):
sqlResult = [] sqlResult = []
attempt = 0 attempt = 0
# Transaction
#self.execute('BEGIN')
while attempt < 5: while attempt < 5:
try: try:
for qu in querylist: for qu in querylist:
if len(qu) == 1: if len(qu) == 1:
if logTransaction: if logTransaction:

View file

@ -249,7 +249,7 @@ class NameParser(object):
matches.append(result) matches.append(result)
if len(matches): if len(matches):
result = max(matches, key=lambda x: x.score) result = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score)
if result.show: if result.show:
if self.convert and not self.naming_pattern: if self.convert and not self.naming_pattern:

View file

@ -76,7 +76,7 @@ normal_regexes = {'normal':[
(?P<ep_num>\d+) # 02 and separator (?P<ep_num>\d+) # 02 and separator
(([. _-]*x|-) # linking x/- char (([. _-]*x|-) # linking x/- char
(?P<extra_ep_num> (?P<extra_ep_num>
(?!(1080|720|480)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps (?!(1080|720|480)[pi])(?!(?<=[hx])264) # ignore obviously wrong multi-eps
\d+))* # additional x03/etc \d+))* # additional x03/etc
[\]. _-]*((?P<extra_info>.+?) # Source_Quality_Etc- [\]. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group ((?<![. _-])(?<!WEB) # Make sure this is really the release group

View file

@ -355,8 +355,6 @@ def searchForNeededEpisodes(show, episodes):
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
try: try:
logger.log(u"Updating RSS cache ...")
curProvider.cache.updateCache()
logger.log(u"Searching RSS cache ...") logger.log(u"Searching RSS cache ...")
curFoundResults = curProvider.searchRSS(episodes) curFoundResults = curProvider.searchRSS(episodes)
except exceptions.AuthException, e: except exceptions.AuthException, e:

View file

@ -54,6 +54,9 @@ class CacheDBConnection(db.DBConnection):
if not self.hasTable(providerName): if not self.hasTable(providerName):
self.action( self.action(
"CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT)") "CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT)")
else:
# remove duplicates
self.action("DELETE FROM " + providerName + " WHERE url NOT IN (SELECT url FROM " + providerName + " GROUP BY url)")
except Exception, e: except Exception, e:
if str(e) != "table [" + providerName + "] already exists": if str(e) != "table [" + providerName + "] already exists":
raise raise
@ -67,6 +70,12 @@ class CacheDBConnection(db.DBConnection):
raise raise
# Create unique index for provider table to prevent duplicate entries
try:
self.action("CREATE UNIQUE INDEX IF NOT EXISTS idx_url ON " + providerName + " (url)")
except Exception, e:
raise
class TVCache(): class TVCache():
def __init__(self, provider): def __init__(self, provider):
@ -306,7 +315,7 @@ class TVCache():
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
return [ return [
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality]] [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality]]