2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# This file is part of SickRage.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# SickRage is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# SickRage is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2014-05-26 10:42:34 +00:00
|
|
|
# GNU General Public License for more details.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-05-23 12:37:22 +00:00
|
|
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
2014-05-04 12:05:27 +00:00
|
|
|
import os
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
import time
|
|
|
|
import datetime
|
2014-05-11 12:49:07 +00:00
|
|
|
import threading
|
2014-03-10 05:18:05 +00:00
|
|
|
import sickbeard
|
|
|
|
|
|
|
|
from sickbeard import db
|
|
|
|
from sickbeard import logger
|
2014-06-29 10:05:33 +00:00
|
|
|
from sickbeard.common import Quality
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from sickbeard import helpers, show_name_helpers
|
2014-05-11 12:49:07 +00:00
|
|
|
from sickbeard.exceptions import MultipleShowObjectsException
|
2014-05-04 12:05:27 +00:00
|
|
|
from sickbeard.exceptions import AuthException
|
2014-03-10 05:18:05 +00:00
|
|
|
from name_parser.parser import NameParser, InvalidNameException
|
2014-06-29 10:05:33 +00:00
|
|
|
from sickbeard.rssfeeds import RSSFeeds
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
cache_lock = threading.Lock()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-30 15:57:32 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
class CacheDBConnection(db.DBConnection):
|
|
|
|
def __init__(self, providerName):
|
|
|
|
db.DBConnection.__init__(self, "cache.db")
|
|
|
|
|
2014-06-07 23:16:01 +00:00
|
|
|
# Create the table if it's not already there
|
|
|
|
try:
|
|
|
|
if not self.hasTable(providerName):
|
2014-06-26 04:39:34 +00:00
|
|
|
self.action(
|
|
|
|
"CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT)")
|
2014-06-26 10:29:05 +00:00
|
|
|
else:
|
2014-06-28 03:33:31 +00:00
|
|
|
sqlResults = self.select(
|
|
|
|
"SELECT url, COUNT(url) as count FROM [" + providerName + "] GROUP BY url HAVING count > 1")
|
|
|
|
|
|
|
|
for cur_dupe in sqlResults:
|
|
|
|
self.action("DELETE FROM [" + providerName + "] WHERE url = ?", [cur_dupe["url"]])
|
|
|
|
|
2014-06-29 05:54:29 +00:00
|
|
|
# add unique index to prevent further dupes from happening if one does not exist
|
|
|
|
self.action("CREATE UNIQUE INDEX IF NOT EXISTS idx_url ON " + providerName + " (url)")
|
2014-06-07 23:16:01 +00:00
|
|
|
except Exception, e:
|
|
|
|
if str(e) != "table [" + providerName + "] already exists":
|
|
|
|
raise
|
|
|
|
|
|
|
|
# Create the table if it's not already there
|
|
|
|
try:
|
|
|
|
if not self.hasTable('lastUpdate'):
|
|
|
|
self.action("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)")
|
|
|
|
except Exception, e:
|
|
|
|
if str(e) != "table lastUpdate already exists":
|
|
|
|
raise
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-30 17:48:18 +00:00
|
|
|
def __del__(self):
|
|
|
|
pass
|
2014-06-30 15:57:32 +00:00
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
class TVCache():
|
2014-03-10 05:18:05 +00:00
|
|
|
def __init__(self, provider):
|
|
|
|
|
|
|
|
self.provider = provider
|
|
|
|
self.providerID = self.provider.getID()
|
|
|
|
self.minTime = 10
|
|
|
|
|
2014-06-30 17:48:18 +00:00
|
|
|
def __del__(self):
|
|
|
|
pass
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def _getDB(self):
|
|
|
|
return CacheDBConnection(self.providerID)
|
|
|
|
|
|
|
|
def _clearCache(self):
|
2014-06-29 10:05:33 +00:00
|
|
|
if self.shouldClearCache():
|
2014-06-30 15:57:32 +00:00
|
|
|
logger.log(u"Clearing " + self.provider.name + " cache")
|
|
|
|
|
2014-06-29 10:05:33 +00:00
|
|
|
curDate = datetime.date.today() - datetime.timedelta(weeks=1)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-29 10:05:33 +00:00
|
|
|
myDB = self._getDB()
|
|
|
|
myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [int(time.mktime(curDate.timetuple()))])
|
2014-05-18 12:59:42 +00:00
|
|
|
|
2014-06-29 10:05:33 +00:00
|
|
|
# clear RSS Feed cache
|
2014-06-30 19:02:45 +00:00
|
|
|
RSSFeeds(self.providerID).clearCache()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def _getRSSData(self):
|
|
|
|
|
|
|
|
data = None
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2014-04-26 01:49:38 +00:00
|
|
|
def _checkAuth(self, data):
|
2014-03-10 05:18:05 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
def _checkItemAuth(self, title, url):
|
|
|
|
return True
|
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
def updateCache(self):
|
2014-05-18 15:33:31 +00:00
|
|
|
|
2014-06-30 15:57:32 +00:00
|
|
|
if self.shouldUpdate() and self._checkAuth(None):
|
|
|
|
self._clearCache()
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
|
|
data = self._getRSSData()
|
|
|
|
|
|
|
|
# as long as the http request worked we count this as an update
|
|
|
|
if data:
|
|
|
|
self.setLastUpdate()
|
|
|
|
else:
|
|
|
|
return []
|
|
|
|
|
|
|
|
if self._checkAuth(data):
|
|
|
|
cl = []
|
2014-06-29 10:05:33 +00:00
|
|
|
for item in data.entries:
|
2014-05-11 12:49:07 +00:00
|
|
|
ci = self._parseItem(item)
|
|
|
|
if ci is not None:
|
|
|
|
cl.append(ci)
|
|
|
|
|
2014-07-02 18:51:14 +00:00
|
|
|
time.sleep(.2)
|
|
|
|
|
2014-05-30 11:42:31 +00:00
|
|
|
if cl:
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = self._getDB()
|
|
|
|
myDB.mass_action(cl)
|
2014-05-11 12:49:07 +00:00
|
|
|
else:
|
|
|
|
raise AuthException(
|
|
|
|
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
|
|
|
|
|
|
|
return []
|
|
|
|
|
2014-06-30 03:50:12 +00:00
|
|
|
def getRSSFeed(self, url, post_data=None, request_headers=None):
|
2014-06-30 17:48:18 +00:00
|
|
|
return RSSFeeds(self.providerID).getFeed(url, post_data, request_headers)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def _translateTitle(self, title):
|
2014-03-25 05:57:24 +00:00
|
|
|
return title.replace(' ', '.')
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def _translateLinkURL(self, url):
|
|
|
|
return url.replace('&', '&')
|
|
|
|
|
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
def _parseItem(self, item):
|
2014-04-25 23:41:40 +00:00
|
|
|
title = item.title
|
|
|
|
url = item.link
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
self._checkItemAuth(title, url)
|
|
|
|
|
|
|
|
if title and url:
|
|
|
|
title = self._translateTitle(title)
|
|
|
|
url = self._translateLinkURL(url)
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-05-04 07:10:49 +00:00
|
|
|
logger.log(u"Checking if item from RSS feed is in the cache: " + title, logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return self._addCacheEntry(title, url)
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(
|
2014-04-26 04:08:27 +00:00
|
|
|
u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.DEBUG)
|
|
|
|
return None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _getLastUpdate(self):
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = self._getDB()
|
|
|
|
sqlResults = myDB.select("SELECT time FROM lastUpdate WHERE provider = ?", [self.providerID])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
if sqlResults:
|
|
|
|
lastTime = int(sqlResults[0]["time"])
|
2014-03-11 20:22:00 +00:00
|
|
|
if lastTime > int(time.mktime(datetime.datetime.today().timetuple())):
|
|
|
|
lastTime = 0
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
|
|
|
lastTime = 0
|
|
|
|
|
|
|
|
return datetime.datetime.fromtimestamp(lastTime)
|
|
|
|
|
2014-05-15 07:20:00 +00:00
|
|
|
def _getLastSearch(self):
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = self._getDB()
|
|
|
|
sqlResults = myDB.select("SELECT time FROM lastSearch WHERE provider = ?", [self.providerID])
|
2014-05-15 07:20:00 +00:00
|
|
|
|
|
|
|
if sqlResults:
|
|
|
|
lastTime = int(sqlResults[0]["time"])
|
|
|
|
if lastTime > int(time.mktime(datetime.datetime.today().timetuple())):
|
|
|
|
lastTime = 0
|
|
|
|
else:
|
|
|
|
lastTime = 0
|
|
|
|
|
|
|
|
return datetime.datetime.fromtimestamp(lastTime)
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
def setLastUpdate(self, toDate=None):
|
2014-03-10 05:18:05 +00:00
|
|
|
if not toDate:
|
|
|
|
toDate = datetime.datetime.today()
|
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = self._getDB()
|
|
|
|
myDB.upsert("lastUpdate",
|
|
|
|
{'time': int(time.mktime(toDate.timetuple()))},
|
|
|
|
{'provider': self.providerID})
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-15 07:20:00 +00:00
|
|
|
def setLastSearch(self, toDate=None):
|
|
|
|
if not toDate:
|
|
|
|
toDate = datetime.datetime.today()
|
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = self._getDB()
|
|
|
|
myDB.upsert("lastSearch",
|
|
|
|
{'time': int(time.mktime(toDate.timetuple()))},
|
|
|
|
{'provider': self.providerID})
|
2014-05-11 12:49:07 +00:00
|
|
|
|
2014-05-15 07:20:00 +00:00
|
|
|
lastUpdate = property(_getLastUpdate)
|
|
|
|
lastSearch = property(_getLastSearch)
|
2014-05-11 12:49:07 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def shouldUpdate(self):
|
|
|
|
# if we've updated recently then skip the update
|
2014-06-30 15:57:32 +00:00
|
|
|
if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime):
|
|
|
|
logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(
|
|
|
|
datetime.timedelta(minutes=self.minTime)), logger.DEBUG)
|
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
2014-05-15 07:20:00 +00:00
|
|
|
def shouldClearCache(self):
|
|
|
|
# if daily search hasn't used our previous results yet then don't clear the cache
|
|
|
|
if self.lastUpdate > self.lastSearch:
|
|
|
|
logger.log(
|
2014-06-30 15:57:32 +00:00
|
|
|
u"Daily search has not yet used our last cache results, not clearing cache ...", logger.DEBUG)
|
2014-05-15 07:20:00 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
def _addCacheEntry(self, name, url, quality=None):
|
2014-05-26 20:16:07 +00:00
|
|
|
|
2014-04-30 22:07:18 +00:00
|
|
|
try:
|
2014-05-31 10:35:57 +00:00
|
|
|
myParser = NameParser(convert=True)
|
|
|
|
parse_result = myParser.parse(name)
|
2014-04-30 22:07:18 +00:00
|
|
|
except InvalidNameException:
|
|
|
|
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
|
|
|
return None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-26 20:16:07 +00:00
|
|
|
if not parse_result or not parse_result.series_name:
|
2014-04-30 22:07:18 +00:00
|
|
|
return None
|
|
|
|
|
2014-05-26 20:16:07 +00:00
|
|
|
if not parse_result.show:
|
2014-05-11 12:49:07 +00:00
|
|
|
logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG)
|
2014-05-01 01:20:53 +00:00
|
|
|
return None
|
2014-04-28 23:03:49 +00:00
|
|
|
|
2014-05-04 12:05:27 +00:00
|
|
|
season = episodes = None
|
2014-05-05 03:04:46 +00:00
|
|
|
if parse_result.air_by_date or parse_result.sports:
|
2014-06-26 04:39:34 +00:00
|
|
|
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_event_date.toordinal()
|
2014-06-07 21:32:38 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = db.DBConnection()
|
|
|
|
sql_results = myDB.select(
|
|
|
|
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
|
|
|
|
[parse_result.show.indexerid, parse_result.show.indexer, airdate])
|
2014-05-03 09:23:26 +00:00
|
|
|
if sql_results > 0:
|
|
|
|
season = int(sql_results[0]["season"])
|
|
|
|
episodes = [int(sql_results[0]["episode"])]
|
|
|
|
else:
|
2014-05-13 07:03:10 +00:00
|
|
|
season = parse_result.season_number if parse_result.season_number != None else 1
|
2014-05-03 09:23:26 +00:00
|
|
|
episodes = parse_result.episode_numbers
|
2014-04-26 05:42:40 +00:00
|
|
|
|
2014-05-03 09:23:26 +00:00
|
|
|
if season and episodes:
|
|
|
|
# store episodes as a seperated string
|
|
|
|
episodeText = "|" + "|".join(map(str, episodes)) + "|"
|
2014-04-29 13:14:19 +00:00
|
|
|
|
2014-05-03 09:23:26 +00:00
|
|
|
# get the current timestamp
|
|
|
|
curTimestamp = int(time.mktime(datetime.datetime.today().timetuple()))
|
2014-04-30 22:07:18 +00:00
|
|
|
|
2014-05-03 09:23:26 +00:00
|
|
|
# get quality of release
|
|
|
|
if quality is None:
|
2014-05-26 06:29:22 +00:00
|
|
|
quality = Quality.sceneQuality(name, parse_result.is_anime)
|
2014-04-30 22:07:18 +00:00
|
|
|
|
2014-05-03 09:23:26 +00:00
|
|
|
if not isinstance(name, unicode):
|
|
|
|
name = unicode(name, 'utf-8')
|
|
|
|
|
|
|
|
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
|
2014-05-11 12:49:07 +00:00
|
|
|
|
2014-05-04 12:05:27 +00:00
|
|
|
return [
|
2014-06-26 10:20:07 +00:00
|
|
|
"INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
|
2014-05-26 20:16:07 +00:00
|
|
|
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality]]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-04 12:05:27 +00:00
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
def searchCache(self, episodes, manualSearch=False):
|
|
|
|
neededEps = self.findNeededEpisodes(episodes, manualSearch)
|
2014-05-01 04:09:03 +00:00
|
|
|
return neededEps
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
def listPropers(self, date=None, delimiter="."):
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = self._getDB()
|
|
|
|
sql = "SELECT * FROM [" + self.providerID + "] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'"
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
if date != None:
|
|
|
|
sql += " AND time >= " + str(int(time.mktime(date.timetuple())))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
def findNeededEpisodes(self, episodes, manualSearch=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
neededEps = {}
|
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
for epObj in episodes:
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = self._getDB()
|
|
|
|
sqlResults = myDB.select(
|
|
|
|
"SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?",
|
|
|
|
[epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%"])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
# for each cache entry
|
|
|
|
for curResult in sqlResults:
|
|
|
|
|
|
|
|
# skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well)
|
|
|
|
if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"]):
|
|
|
|
continue
|
|
|
|
|
|
|
|
# get the show object, or if it's not one of our shows then ignore it
|
|
|
|
try:
|
|
|
|
showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"]))
|
|
|
|
except MultipleShowObjectsException:
|
|
|
|
showObj = None
|
|
|
|
|
|
|
|
if not showObj:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# get season and ep data (ignoring multi-eps for now)
|
|
|
|
curSeason = int(curResult["season"])
|
|
|
|
if curSeason == -1:
|
|
|
|
continue
|
|
|
|
curEp = curResult["episodes"].split("|")[1]
|
|
|
|
if not curEp:
|
|
|
|
continue
|
|
|
|
curEp = int(curEp)
|
|
|
|
curQuality = int(curResult["quality"])
|
|
|
|
|
|
|
|
# if the show says we want that episode then add it to the list
|
|
|
|
if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch):
|
|
|
|
logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " +
|
|
|
|
Quality.qualityStrings[curQuality], logger.DEBUG)
|
|
|
|
else:
|
2014-05-04 07:10:49 +00:00
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
if not epObj:
|
|
|
|
epObj = showObj.getEpisode(curSeason, curEp)
|
2014-05-04 07:10:49 +00:00
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
# build a result object
|
|
|
|
title = curResult["name"]
|
|
|
|
url = curResult["url"]
|
2014-05-04 07:10:49 +00:00
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
logger.log(u"Found result " + title + " at " + url)
|
2014-05-04 07:10:49 +00:00
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
result = self.provider.getResult([epObj])
|
|
|
|
result.url = url
|
|
|
|
result.name = title
|
|
|
|
result.quality = curQuality
|
|
|
|
result.content = self.provider.getURL(url) \
|
|
|
|
if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \
|
2014-05-26 10:42:34 +00:00
|
|
|
and not url.startswith('magnet') else None
|
2014-05-04 07:10:49 +00:00
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
# add it to the list
|
|
|
|
if epObj not in neededEps:
|
|
|
|
neededEps[epObj] = [result]
|
|
|
|
else:
|
|
|
|
neededEps[epObj].append(result)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-15 07:20:00 +00:00
|
|
|
# datetime stamp this search so cache gets cleared
|
|
|
|
self.setLastSearch()
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return neededEps
|
2014-05-11 12:49:07 +00:00
|
|
|
|