2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
import datetime
|
|
|
|
import operator
|
2017-05-28 22:32:02 +00:00
|
|
|
import os
|
2014-05-19 17:40:25 +00:00
|
|
|
import threading
|
2014-07-24 16:12:29 +00:00
|
|
|
import traceback
|
2017-07-17 19:29:32 +00:00
|
|
|
import re
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
|
2017-05-28 22:32:02 +00:00
|
|
|
from sickbeard import db, exceptions, helpers, history, logger, search, show_name_helpers
|
|
|
|
from sickbeard import encodingKludge as ek
|
2016-09-27 20:18:58 +00:00
|
|
|
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality, ARCHIVED, SNATCHED_BEST
|
2017-05-28 22:32:02 +00:00
|
|
|
from sickbeard.exceptions import ex
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-06-23 19:58:26 +00:00
|
|
|
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def search_propers():
|
2014-05-14 22:23:59 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
if not sickbeard.DOWNLOAD_PROPERS:
|
|
|
|
return
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Beginning search for new propers')
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14
|
2015-09-18 00:06:34 +00:00
|
|
|
aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows)
|
|
|
|
aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime)
|
|
|
|
recent_shows, recent_anime = _recent_history(aired_since_shows, aired_since_anime)
|
|
|
|
if recent_shows or recent_anime:
|
|
|
|
propers = _get_proper_list(aired_since_shows, recent_shows, recent_anime)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if propers:
|
|
|
|
_download_propers(propers)
|
|
|
|
else:
|
|
|
|
logger.log(u'No downloads or snatches found for the last %s%s days to use for a propers search' %
|
|
|
|
(age_shows, ('', ' (%s for anime)' % age_anime)[helpers.has_anime()]))
|
2014-05-19 17:40:25 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
_set_last_proper_search(datetime.datetime.today().toordinal())
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
run_at = ''
|
2016-04-29 19:15:34 +00:00
|
|
|
proper_sch = sickbeard.properFinderScheduler
|
|
|
|
if None is proper_sch.start_time:
|
|
|
|
run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
|
|
|
|
run_at = u', next check '
|
|
|
|
if datetime.timedelta() > run_in:
|
|
|
|
run_at += u'imminent'
|
|
|
|
else:
|
|
|
|
hours, remainder = divmod(run_in.seconds, 3600)
|
|
|
|
minutes, seconds = divmod(remainder, 60)
|
|
|
|
run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else '%dm, %ds' % (minutes, seconds))
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
logger.log(u'Completed the search for new propers%s' % run_at)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-19 07:12:30 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|
|
|
propers = {}
|
2014-05-14 22:23:59 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# for each provider get a list of the
|
2015-09-18 00:06:34 +00:00
|
|
|
orig_thread_name = threading.currentThread().name
|
2015-07-13 09:39:20 +00:00
|
|
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
|
2016-08-26 23:36:01 +00:00
|
|
|
np = NameParser(False, try_scene_exceptions=True)
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_provider in providers:
|
|
|
|
if not recent_anime and cur_provider.anime_only:
|
|
|
|
continue
|
|
|
|
threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Searching for new PROPER releases')
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
try:
|
2017-02-15 12:27:21 +00:00
|
|
|
found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
|
|
|
|
anime=recent_anime)
|
2015-06-08 12:47:01 +00:00
|
|
|
except exceptions.AuthException as e:
|
2015-05-04 19:14:29 +00:00
|
|
|
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
|
|
|
|
continue
|
2015-06-08 12:47:01 +00:00
|
|
|
except Exception as e:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
|
2017-06-04 16:19:22 +00:00
|
|
|
logger.log(traceback.format_exc(), logger.ERROR)
|
2015-05-04 19:14:29 +00:00
|
|
|
continue
|
|
|
|
finally:
|
2015-09-18 00:06:34 +00:00
|
|
|
threading.currentThread().name = orig_thread_name
|
2015-05-04 19:14:29 +00:00
|
|
|
|
|
|
|
# if they haven't been added by a different provider than add the proper to the list
|
2015-09-18 00:06:34 +00:00
|
|
|
count = 0
|
|
|
|
for x in found_propers:
|
|
|
|
name = _generic_name(x.name)
|
|
|
|
if name not in propers:
|
|
|
|
try:
|
2016-09-04 20:00:44 +00:00
|
|
|
np = NameParser(False, try_scene_exceptions=True, showObj=x.parsed_show)
|
2016-02-19 17:38:38 +00:00
|
|
|
parse_result = np.parse(x.name)
|
2015-09-18 00:06:34 +00:00
|
|
|
if parse_result.series_name and parse_result.episode_numbers and \
|
|
|
|
parse_result.show.indexerid in recent_shows + recent_anime:
|
|
|
|
logger.log(u'Found new proper: ' + x.name, logger.DEBUG)
|
|
|
|
x.show = parse_result.show.indexerid
|
|
|
|
x.provider = cur_provider
|
|
|
|
propers[name] = x
|
|
|
|
count += 1
|
2016-06-23 19:58:26 +00:00
|
|
|
except (InvalidNameException, InvalidShowException):
|
|
|
|
continue
|
2017-02-15 12:27:21 +00:00
|
|
|
except (StandardError, Exception):
|
2015-09-18 00:06:34 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
cur_provider.log_result('Propers', count, '%s' % cur_provider.name)
|
2015-05-04 19:14:29 +00:00
|
|
|
|
|
|
|
# take the list of unique propers and get it sorted by
|
2015-09-18 00:06:34 +00:00
|
|
|
sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
|
|
|
|
verified_propers = []
|
2015-05-04 19:14:29 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_proper in sorted_propers:
|
2014-05-30 07:36:47 +00:00
|
|
|
|
2016-08-26 23:36:01 +00:00
|
|
|
parse_result = np.parse(cur_proper.name)
|
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# set the indexerid in the db to the show's indexerid
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_proper.indexerid = parse_result.show.indexerid
|
2014-05-30 07:36:47 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# set the indexer in the db to the show's indexer
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_proper.indexer = parse_result.show.indexer
|
2014-08-01 09:40:46 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# populate our Proper instance
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
|
|
|
|
cur_proper.episode = parse_result.episode_numbers[0]
|
|
|
|
cur_proper.release_group = parse_result.release_group
|
|
|
|
cur_proper.version = parse_result.version
|
|
|
|
cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# only get anime proper if it has release group and version
|
|
|
|
if parse_result.is_anime:
|
2015-09-18 00:06:34 +00:00
|
|
|
if not cur_proper.release_group and -1 == cur_proper.version:
|
|
|
|
logger.log(u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name,
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
continue
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False):
|
|
|
|
logger.log(u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name,
|
2015-05-04 19:14:29 +00:00
|
|
|
logger.DEBUG)
|
|
|
|
continue
|
2014-04-24 05:18:16 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
re_extras = dict(re_prefix='.*', re_suffix='.*')
|
|
|
|
result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_ignore_words, **re_extras)
|
|
|
|
if None is not result and result:
|
|
|
|
logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name)
|
2015-05-04 19:14:29 +00:00
|
|
|
continue
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_require_words, **re_extras)
|
|
|
|
if None is not result and not result:
|
|
|
|
logger.log(u'Ignored: %s for not containing any required word match' % cur_proper.name)
|
2015-05-04 19:14:29 +00:00
|
|
|
continue
|
2014-04-24 05:18:16 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# check if we actually want this proper (if it's the right quality)
|
2015-09-18 00:06:34 +00:00
|
|
|
my_db = db.DBConnection()
|
2017-02-15 12:27:21 +00:00
|
|
|
sql_results = my_db.select(
|
2017-07-17 19:29:32 +00:00
|
|
|
'SELECT release_group, status, version, release_name FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
2017-02-15 12:27:21 +00:00
|
|
|
[cur_proper.indexerid, cur_proper.season, cur_proper.episode])
|
2015-09-18 00:06:34 +00:00
|
|
|
if not sql_results:
|
2015-05-04 19:14:29 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
|
2015-09-18 00:06:34 +00:00
|
|
|
old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status']))
|
2016-09-27 20:18:58 +00:00
|
|
|
if old_status not in (DOWNLOADED, SNATCHED, SNATCHED_BEST, ARCHIVED) \
|
|
|
|
or cur_proper.quality != old_quality:
|
2015-05-04 19:14:29 +00:00
|
|
|
continue
|
|
|
|
|
2017-02-15 12:27:21 +00:00
|
|
|
old_release_group = sql_results[0]['release_group']
|
|
|
|
log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\
|
|
|
|
% (cur_proper.release_group, old_release_group, cur_proper.name)
|
|
|
|
|
|
|
|
# for webldls, prevent propers from different groups
|
|
|
|
if sickbeard.PROPERS_WEBDL_ONEGRP and \
|
2017-07-17 19:29:32 +00:00
|
|
|
(old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
|
|
|
|
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) and \
|
2017-02-15 12:27:21 +00:00
|
|
|
cur_proper.release_group != old_release_group:
|
|
|
|
logger.log(log_same_grp, logger.DEBUG)
|
2017-01-29 13:07:07 +00:00
|
|
|
continue
|
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# check if we actually want this proper (if it's the right release group and a higher version)
|
|
|
|
if parse_result.is_anime:
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
old_version = int(sql_results[0]['version'])
|
|
|
|
if -1 < old_version < cur_proper.version:
|
|
|
|
logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version))
|
2015-05-04 19:14:29 +00:00
|
|
|
else:
|
2014-03-10 05:18:05 +00:00
|
|
|
continue
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if cur_proper.release_group != old_release_group:
|
2017-02-15 12:27:21 +00:00
|
|
|
logger.log(log_same_grp, logger.DEBUG)
|
2015-05-04 19:14:29 +00:00
|
|
|
continue
|
2014-07-22 04:53:32 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
# if the show is in our list and there hasn't been a proper already added for that particular episode
|
|
|
|
# then add it to our list of propers
|
|
|
|
if cur_proper.indexerid != -1 and (cur_proper.indexerid, cur_proper.season, cur_proper.episode) not in map(
|
|
|
|
operator.attrgetter('indexerid', 'season', 'episode'), verified_propers):
|
|
|
|
logger.log(u'Found a proper that may be useful: %s' % cur_proper.name)
|
|
|
|
verified_propers.append(cur_proper)
|
|
|
|
|
|
|
|
return verified_propers
|
2014-07-22 04:53:32 +00:00
|
|
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _download_propers(proper_list):
|
2014-07-22 04:53:32 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_proper in proper_list:
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
history_limit = datetime.datetime.today() - datetime.timedelta(days=30)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# make sure the episode has been downloaded before
|
2015-09-18 00:06:34 +00:00
|
|
|
my_db = db.DBConnection()
|
|
|
|
history_results = my_db.select(
|
|
|
|
'SELECT resource FROM history ' +
|
|
|
|
'WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? ' +
|
2017-05-28 22:32:02 +00:00
|
|
|
'AND (' + ' OR '.join("action LIKE '%%%02d'" % x for x in (SNATCHED, DOWNLOADED, SNATCHED_PROPER,
|
|
|
|
SNATCHED_BEST, ARCHIVED)) + ')',
|
2015-09-18 00:06:34 +00:00
|
|
|
[cur_proper.indexerid, cur_proper.season, cur_proper.episode, cur_proper.quality,
|
|
|
|
history_limit.strftime(history.dateFormat)])
|
2015-05-04 19:14:29 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
# if we didn't download this episode in the first place we don't know what quality to use for the proper = skip
|
|
|
|
if 0 == len(history_results):
|
|
|
|
logger.log(u'Skipping download because cannot find an original history entry for proper ' + cur_proper.name)
|
2015-05-04 19:14:29 +00:00
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
else:
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-11-25 12:12:40 +00:00
|
|
|
# get the show object
|
2015-09-18 00:06:34 +00:00
|
|
|
show_obj = helpers.findCertainShow(sickbeard.showList, cur_proper.indexerid)
|
|
|
|
if None is show_obj:
|
2015-11-25 12:12:40 +00:00
|
|
|
logger.log(u'Unable to find the show with indexerid ' + str(
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_proper.indexerid) + ' so unable to download the proper', logger.ERROR)
|
2015-11-25 12:12:40 +00:00
|
|
|
continue
|
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# make sure that none of the existing history downloads are the same proper we're trying to download
|
2016-02-07 17:57:48 +00:00
|
|
|
clean_proper_name = _generic_name(helpers.remove_non_release_groups(cur_proper.name, show_obj.is_anime))
|
2015-09-18 00:06:34 +00:00
|
|
|
is_same = False
|
|
|
|
for result in history_results:
|
2015-05-04 19:14:29 +00:00
|
|
|
# if the result exists in history already we need to skip it
|
2017-05-28 22:32:02 +00:00
|
|
|
if clean_proper_name == _generic_name(helpers.remove_non_release_groups(
|
|
|
|
ek.ek(os.path.basename, result['resource']))):
|
2015-09-18 00:06:34 +00:00
|
|
|
is_same = True
|
2015-05-04 19:14:29 +00:00
|
|
|
break
|
2015-09-18 00:06:34 +00:00
|
|
|
if is_same:
|
2015-05-04 19:14:29 +00:00
|
|
|
logger.log(u'This proper is already in history, skipping it', logger.DEBUG)
|
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
ep_obj = show_obj.getEpisode(cur_proper.season, cur_proper.episode)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# make the result object
|
2015-09-18 00:06:34 +00:00
|
|
|
result = cur_proper.provider.get_result([ep_obj], cur_proper.url)
|
2015-07-13 09:39:20 +00:00
|
|
|
if None is result:
|
|
|
|
continue
|
2015-09-18 00:06:34 +00:00
|
|
|
result.name = cur_proper.name
|
|
|
|
result.quality = cur_proper.quality
|
|
|
|
result.version = cur_proper.version
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# snatch it
|
2015-09-18 00:06:34 +00:00
|
|
|
search.snatch_episode(result, SNATCHED_PROPER)
|
|
|
|
|
|
|
|
|
|
|
|
def _recent_history(aired_since_shows, aired_since_anime):
|
|
|
|
|
|
|
|
recent_shows, recent_anime = [], []
|
|
|
|
|
|
|
|
aired_since_shows = aired_since_shows.toordinal()
|
|
|
|
aired_since_anime = aired_since_anime.toordinal()
|
|
|
|
|
|
|
|
my_db = db.DBConnection()
|
|
|
|
sql_results = my_db.select(
|
|
|
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
|
|
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
|
|
|
' WHERE e.airdate >= %s' % min(aired_since_shows, aired_since_anime) +
|
|
|
|
' AND (e.status IN (%s))' % ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED])
|
|
|
|
)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
for sqlshow in sql_results:
|
|
|
|
show = helpers.findCertainShow(sickbeard.showList, sqlshow['showid'])
|
|
|
|
if show:
|
|
|
|
if sqlshow['airdate'] >= aired_since_shows and not show.is_anime:
|
|
|
|
sqlshow['showid'] not in recent_shows and recent_shows.append(sqlshow['showid'])
|
|
|
|
else:
|
|
|
|
sqlshow['showid'] not in recent_anime and show.is_anime and recent_anime.append(sqlshow['showid'])
|
|
|
|
|
|
|
|
return recent_shows, recent_anime
|
|
|
|
|
|
|
|
|
|
|
|
def _generic_name(name):
|
2015-05-04 19:14:29 +00:00
|
|
|
return name.replace('.', ' ').replace('-', ' ').replace('_', ' ').lower()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _set_last_proper_search(when):
|
|
|
|
|
|
|
|
logger.log(u'Setting the last Proper search in the DB to %s' % when, logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
my_db = db.DBConnection()
|
|
|
|
sql_results = my_db.select('SELECT * FROM info')
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if 0 == len(sql_results):
|
|
|
|
my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)',
|
|
|
|
[0, 0, str(when)])
|
2015-05-04 19:14:29 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
my_db.action('UPDATE info SET last_proper_search=%s' % when)
|
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _get_last_proper_search():
|
2015-05-04 19:14:29 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
my_db = db.DBConnection()
|
|
|
|
sql_results = my_db.select('SELECT * FROM info')
|
2015-05-04 19:14:29 +00:00
|
|
|
|
|
|
|
try:
|
2015-09-18 00:06:34 +00:00
|
|
|
last_proper_search = datetime.date.fromordinal(int(sql_results[0]['last_proper_search']))
|
2017-02-15 12:27:21 +00:00
|
|
|
except (StandardError, Exception):
|
2015-05-04 19:14:29 +00:00
|
|
|
return datetime.date.fromordinal(1)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
return last_proper_search
|