diff --git a/CHANGES.md b/CHANGES.md index df558de5..715b4344 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -14,6 +14,7 @@ * Add logging around the restart/shutdown event * Update package resource API 63.2.0 (3ae44cd) to 67.3.2 (b9bf2ec) +* Refactor `timestamp_near` to `SGDatetime.timestamp_near` ### 3.27.11 (2023-03-06 23:40:00 UTC) diff --git a/sickgear/clients/download_station.py b/sickgear/clients/download_station.py index 2752ab48..42111909 100644 --- a/sickgear/clients/download_station.py +++ b/sickgear/clients/download_station.py @@ -17,13 +17,12 @@ # Uses the Synology Download Station API: # http://download.synology.com/download/Document/DeveloperGuide/Synology_Download_Station_Web_API.pdf -from datetime import datetime import re import time from .generic import GenericClient from .. import logger -from ..sgdatetime import timestamp_near +from ..sgdatetime import SGDatetime import sickgear from _23 import unquote_plus @@ -298,7 +297,7 @@ class DownloadStationAPI(GenericClient): if 1 < self._task_version and sickgear.TORRENT_PATH: params['destination'] = re.sub(r'^/(volume\d*/)?', '', sickgear.TORRENT_PATH) - task_stamp = int(timestamp_near(datetime.now())) + task_stamp = SGDatetime.timestamp_near() response = self._client_request('create', t_params=params, files=files) # noinspection PyUnresolvedReferences if response and response.get('success'): diff --git a/sickgear/clients/qbittorrent.py b/sickgear/clients/qbittorrent.py index 6f571128..36737f59 100644 --- a/sickgear/clients/qbittorrent.py +++ b/sickgear/clients/qbittorrent.py @@ -14,14 +14,13 @@ # You should have received a copy of the GNU General Public License # along with SickGear. If not, see . -from datetime import datetime import re import time from .generic import GenericClient from .. import logger from ..helpers import get_url, try_int -from ..sgdatetime import timestamp_near +from ..sgdatetime import SGDatetime import sickgear from requests.exceptions import HTTPError @@ -372,7 +371,7 @@ class QbittorrentAPI(GenericClient): else: kwargs = dict(post_data=params, files={'torrents': ('%s.torrent' % data.name, data.content)}) - task_stamp = int(timestamp_near(datetime.now())) + task_stamp = SGDatetime.timestamp_near() response = self._client_request(('torrents/add', 'command/%s' % cmd)[not self.api_ns], **kwargs) if True is response: diff --git a/sickgear/db.py b/sickgear/db.py index c1efaed1..b20485fa 100644 --- a/sickgear/db.py +++ b/sickgear/db.py @@ -28,7 +28,7 @@ from exceptions_helper import ex import sickgear from . import logger, sgdatetime -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from sg_helpers import make_path, compress_file, remove_file_perm, scantree @@ -841,7 +841,7 @@ def backup_all_dbs(target, compress=True, prefer_7z=True): my_db = DBConnection('cache.db') last_backup = my_db.select('SELECT time FROM lastUpdate WHERE provider = ?', ['sickgear_db_backup']) if last_backup: - now_stamp = int(timestamp_near(datetime.datetime.now())) + now_stamp = SGDatetime.timestamp_near() the_time = int(last_backup[0]['time']) # only backup every 23 hours if now_stamp - the_time < 60 * 60 * 23: diff --git a/sickgear/helpers.py b/sickgear/helpers.py index 4cb670a3..4c78e787 100644 --- a/sickgear/helpers.py +++ b/sickgear/helpers.py @@ -34,7 +34,7 @@ import sickgear from . import db, logger, notifiers from .common import cpu_presets, mediaExtensions, Overview, Quality, statusStrings, subtitleExtensions, \ ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED_ANY, SUBTITLED, UNAIRED, UNKNOWN, WANTED -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from lib.tvinfo_base.exceptions import * from exceptions_helper import ex, MultipleShowObjectsException @@ -1031,7 +1031,7 @@ def clear_cache(force=False): """ # clean out cache directory, remove everything > 12 hours old dirty = None - del_time = int(timestamp_near((datetime.datetime.now() - datetime.timedelta(hours=12)))) + del_time = SGDatetime.timestamp_near(td=datetime.timedelta(hours=12)) direntry_args = dict(follow_symlinks=False) for direntry in scantree(sickgear.CACHE_DIR, ['images|rss|zoneinfo'], follow_symlinks=True): if direntry.is_file(**direntry_args) and (force or del_time > direntry.stat(**direntry_args).st_mtime): @@ -1342,7 +1342,7 @@ def delete_not_changed_in(paths, days=30, minutes=0): :param minutes: Purge files not modified in this number of minutes (default: 0 minutes) :return: tuple; number of files that qualify for deletion, number of qualifying files that failed to be deleted """ - del_time = int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=days, minutes=minutes)))) + del_time = SGDatetime.timestamp_near(td=datetime.timedelta(days=days, minutes=minutes)) errors = 0 qualified = 0 for cur_path in (paths, [paths])[not isinstance(paths, list)]: @@ -1367,7 +1367,7 @@ def set_file_timestamp(filename, min_age=3, new_time=None): :param new_time: :type new_time: None or int """ - min_time = int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=min_age)))) + min_time = SGDatetime.timestamp_near(td=datetime.timedelta(days=min_age)) try: if os.path.isfile(filename) and os.path.getmtime(filename) < min_time: os.utime(filename, new_time) diff --git a/sickgear/image_cache.py b/sickgear/image_cache.py index 10fe08da..09f8910c 100644 --- a/sickgear/image_cache.py +++ b/sickgear/image_cache.py @@ -26,7 +26,7 @@ import sickgear import sg_helpers from . import db, logger from .metadata.generic import GenericMetadata -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from .indexers.indexer_config import TVINFO_TVDB, TVINFO_TVMAZE, TVINFO_TMDB, TVINFO_IMDB from six import itervalues, iteritems @@ -465,7 +465,7 @@ class ImageCache(object): minutes_iv = 60 * 3 # daily_interval = 60 * 60 * 23 iv = minutes_iv - now_stamp = int(timestamp_near(datetime.datetime.now())) + now_stamp = SGDatetime.timestamp_near() the_time = int(sql_result[0]['time']) return now_stamp - the_time > iv @@ -482,7 +482,7 @@ class ImageCache(object): """ my_db = db.DBConnection('cache.db') my_db.upsert('lastUpdate', - {'time': int(timestamp_near(datetime.datetime.now()))}, + {'time': SGDatetime.timestamp_near()}, {'provider': 'imsg_%s_%s' % ((image_type, self.FANART)[None is image_type], provider)}) def _cache_image_from_file(self, image_path, img_type, tvid, prodid, prefix='', move_file=False): diff --git a/sickgear/logger.py b/sickgear/logger.py index 702fdb8f..1ecbca02 100644 --- a/sickgear/logger.py +++ b/sickgear/logger.py @@ -31,7 +31,7 @@ from logging.handlers import TimedRotatingFileHandler import sickgear from . import classes -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from sg_helpers import md5_for_text, remove_file_perm # noinspection PyUnreachableCode @@ -198,7 +198,7 @@ class SBRotatingLogHandler(object): mem_key = 'logger' for to_log in log_list: log_id = md5_for_text(to_log) - now = int(timestamp_near(datetime.datetime.now())) + now = SGDatetime.timestamp_near() expired = now > sickgear.MEMCACHE.get(mem_key, {}).get(log_id, 0) sickgear.MEMCACHE[mem_key] = {} sickgear.MEMCACHE[mem_key][log_id] = 2 + now diff --git a/sickgear/processTV.py b/sickgear/processTV.py index f6a0a5bb..16326af2 100644 --- a/sickgear/processTV.py +++ b/sickgear/processTV.py @@ -33,7 +33,7 @@ from . import common, db, failedProcessor, helpers, logger, notifiers, postProce from .common import SNATCHED_ANY from .history import reset_status from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from six import iteritems, iterkeys, string_types, text_type from sg_helpers import long_path, scantree @@ -571,7 +571,7 @@ class ProcessTVShow(object): archives = [os.path.basename(x) for x in unused_files] if unused_files: for f in unused_files: - archive_history.setdefault(f, int(timestamp_near(datetime.datetime.utcnow()))) + archive_history.setdefault(f, SGDatetime.timestamp_near(datetime.datetime.utcnow())) if init_history_cnt != len(archive_history): try: diff --git a/sickgear/properFinder.py b/sickgear/properFinder.py index 12a07326..1397e06a 100644 --- a/sickgear/properFinder.py +++ b/sickgear/properFinder.py @@ -30,7 +30,7 @@ from .common import ARCHIVED, FAILED, DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER, NeededQualities, Quality from .history import dateFormat from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from _23 import map_consume from six import string_types @@ -689,10 +689,10 @@ def _set_last_proper_search(when): if 0 == len(sql_result): my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)', - [0, 0, int(timestamp_near(when))]) + [0, 0, SGDatetime.timestamp_near(when)]) else: # noinspection SqlConstantCondition - my_db.action('UPDATE info SET last_proper_search=%s WHERE 1=1' % int(timestamp_near(when))) + my_db.action('UPDATE info SET last_proper_search=%s WHERE 1=1' % SGDatetime.timestamp_near(when)) def next_proper_timeleft(): diff --git a/sickgear/providers/generic.py b/sickgear/providers/generic.py index 17c00f20..dab1fc5c 100644 --- a/sickgear/providers/generic.py +++ b/sickgear/providers/generic.py @@ -39,7 +39,7 @@ from ..helpers import maybe_plural, remove_file_perm from ..name_parser.parser import InvalidNameException, InvalidShowException, NameParser from ..scene_exceptions import has_season_exceptions from ..show_name_helpers import get_show_names_all_possible -from ..sgdatetime import SGDatetime, timestamp_near +from ..sgdatetime import SGDatetime from ..tv import TVEpisode, TVShow from cfscrape import CloudflareScraper @@ -112,7 +112,7 @@ class ProviderFailList(object): date_time = datetime.datetime.combine(fail_date, datetime.time(hour=fail_hour)) if ProviderFailTypes.names[e.fail_type] not in fail_dict.get(date_time, {}): if isinstance(e.fail_time, datetime.datetime): - value = timestamp_near(e.fail_time) + value = SGDatetime.timestamp_near(e.fail_time, return_int=False) else: value = SGDatetime.timestamp_far(e.fail_time) default = {'date': str(fail_date), 'date_time': date_time, @@ -178,7 +178,7 @@ class ProviderFailList(object): cl = [] for f in self._fails: if isinstance(f.fail_time, datetime.datetime): - value = int(timestamp_near(f.fail_time)) + value = SGDatetime.timestamp_near(f.fail_time) else: value = SGDatetime.timestamp_far(f.fail_time) cl.append(['INSERT OR IGNORE INTO provider_fails (prov_name, fail_type, fail_code, fail_time) ' @@ -211,7 +211,7 @@ class ProviderFailList(object): my_db = db.DBConnection('cache.db') if my_db.has_table('provider_fails'): # noinspection PyCallByClass,PyTypeChecker - time_limit = int(timestamp_near(datetime.datetime.now() - datetime.timedelta(days=28))) + time_limit = SGDatetime.timestamp_near(td=datetime.timedelta(days=28)) my_db.action('DELETE FROM provider_fails WHERE fail_time < ?', [time_limit]) except (BaseException, Exception): pass @@ -340,7 +340,7 @@ class GenericProvider(object): self._failure_time = value if changed_val: if isinstance(value, datetime.datetime): - value = int(timestamp_near(value)) + value = SGDatetime.timestamp_near(value) elif value: # noinspection PyCallByClass value = SGDatetime.timestamp_far(value) @@ -370,7 +370,7 @@ class GenericProvider(object): self._tmr_limit_time = value if changed_val: if isinstance(value, datetime.datetime): - value = int(timestamp_near(value)) + value = SGDatetime.timestamp_near(value) elif value: # noinspection PyCallByClass value = SGDatetime.timestamp_far(value) diff --git a/sickgear/providers/newznab.py b/sickgear/providers/newznab.py index fb7cc782..b9cac5e2 100644 --- a/sickgear/providers/newznab.py +++ b/sickgear/providers/newznab.py @@ -31,7 +31,7 @@ from ..common import NeededQualities, Quality from ..helpers import remove_non_release_groups from ..indexers.indexer_config import * from ..network_timezones import SG_TIMEZONE -from ..sgdatetime import SGDatetime, timestamp_near +from ..sgdatetime import SGDatetime from ..search import get_aired_in_season, get_wanted_qualities from ..show_name_helpers import get_show_names from ..scene_exceptions import has_season_exceptions @@ -217,7 +217,7 @@ class NewznabProvider(generic.NZBProvider): try: my_db = db.DBConnection('cache.db') if isinstance(value, datetime.datetime): - save_value = int(timestamp_near(value)) + save_value = SGDatetime.timestamp_near(value) else: save_value = SGDatetime.timestamp_far(value, default=0) my_db.action('INSERT OR REPLACE INTO "lastrecentsearch" (name, datetime) VALUES (?,?)', diff --git a/sickgear/scene_exceptions.py b/sickgear/scene_exceptions.py index 3e4064fe..148285cc 100644 --- a/sickgear/scene_exceptions.py +++ b/sickgear/scene_exceptions.py @@ -31,7 +31,7 @@ from . import db, helpers, logger, name_cache from .anime import create_anidb_obj from .classes import OrderedDefaultdict from .indexers.indexer_config import TVINFO_TVDB -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime import lib.rarfile.rarfile as rarfile @@ -68,9 +68,9 @@ def should_refresh(name, max_refresh_age_secs=86400, remaining=False): if rows: last_refresh = int(rows[0]['last_refreshed']) if remaining: - time_left = (last_refresh + max_refresh_age_secs - int(timestamp_near(datetime.datetime.now()))) + time_left = (last_refresh + max_refresh_age_secs - SGDatetime.timestamp_near()) return (0, time_left)[time_left > 0] - return int(timestamp_near(datetime.datetime.now())) > last_refresh + max_refresh_age_secs + return SGDatetime.timestamp_near() > last_refresh + max_refresh_age_secs return True @@ -82,7 +82,7 @@ def set_last_refresh(name): """ my_db = db.DBConnection() my_db.upsert('scene_exceptions_refresh', - {'last_refreshed': int(timestamp_near(datetime.datetime.now()))}, + {'last_refreshed': SGDatetime.timestamp_near()}, {'list': name}) diff --git a/sickgear/scene_numbering.py b/sickgear/scene_numbering.py index 885fc527..2afc2914 100644 --- a/sickgear/scene_numbering.py +++ b/sickgear/scene_numbering.py @@ -30,7 +30,7 @@ import sickgear from . import db, logger from .helpers import try_int from .scene_exceptions import xem_ids_list -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime # noinspection PyUnreachableCode if False: @@ -794,7 +794,7 @@ def xem_refresh(tvid, prodid, force=False): """, [tvid, prodid]) if sql_result: last_refresh = int(sql_result[0]['last_refreshed']) - refresh = int(timestamp_near(datetime.datetime.now())) > last_refresh + max_refresh_age_secs + refresh = SGDatetime.timestamp_near() > last_refresh + max_refresh_age_secs else: refresh = True @@ -803,7 +803,7 @@ def xem_refresh(tvid, prodid, force=False): # mark refreshed my_db.upsert('xem_refresh', - dict(last_refreshed=int(timestamp_near(datetime.datetime.now()))), + dict(last_refreshed=SGDatetime.timestamp_near()), dict(indexer=tvid, indexer_id=prodid)) try: diff --git a/sickgear/search_backlog.py b/sickgear/search_backlog.py index e2936f52..ecd9b369 100644 --- a/sickgear/search_backlog.py +++ b/sickgear/search_backlog.py @@ -25,7 +25,7 @@ from . import db, logger, scheduler, search_queue, ui from .helpers import find_show_by_id from .providers.generic import GenericProvider from .search import wanted_episodes -from .sgdatetime import SGDatetime, timestamp_near +from .sgdatetime import SGDatetime from .tv import TVidProdid, TVEpisode, TVShow from six import iteritems, itervalues, moves @@ -344,7 +344,7 @@ class BacklogSearcher(object): last_run_time = 1 else: last_run_time = int(sql_result[0]['last_run_backlog']) - if last_run_time > int(timestamp_near(datetime.datetime.now())): + if last_run_time > SGDatetime.timestamp_near(): last_run_time = 1 return last_run_time @@ -356,7 +356,7 @@ class BacklogSearcher(object): sql_result = my_db.select('SELECT * FROM info') if isinstance(when, datetime.datetime): - when = int(timestamp_near(when)) + when = SGDatetime.timestamp_near(when) else: when = SGDatetime.timestamp_far(when, default=0) if 0 == len(sql_result): diff --git a/sickgear/sgdatetime.py b/sickgear/sgdatetime.py index 1e6ffaf0..5beada3a 100644 --- a/sickgear/sgdatetime.py +++ b/sickgear/sgdatetime.py @@ -282,16 +282,25 @@ class SGDatetime(datetime.datetime): finally: return (default, timestamp)[isinstance(timestamp, (float, integer_types))] + @static_or_instance + def timestamp_near(self, + dt=None, # type: Optional[SGDatetime, datetime.datetime] + td=None, # type: Optional[datetime.timedelta] + return_int=True # type: bool + ): + # type: (...) -> Union[float, integer_types] + """ + Use `timestamp_near` for a timestamp in the near future or near past -# noinspection PyUnreachableCode -if False: - # just to trick pycharm in correct type detection - # noinspection PyUnusedLocal - def timestamp_near(d_t): - # type: (datetime.datetime) -> float - pass + Raises exception if dt cannot be converted to int - -# py3 native timestamp uses milliseconds -# noinspection PyRedeclaration -timestamp_near = datetime.datetime.timestamp + td is timedelta to subtract from datetime + """ + obj = (dt, self)[self is not None] # type: datetime.datetime + if None is obj: + obj = datetime.datetime.now() + if isinstance(td, datetime.timedelta): + obj -= td + if not return_int: + return datetime.datetime.timestamp(obj) + return int(datetime.datetime.timestamp(obj)) diff --git a/sickgear/tv.py b/sickgear/tv.py index b9e6d5db..8073d907 100644 --- a/sickgear/tv.py +++ b/sickgear/tv.py @@ -52,7 +52,7 @@ from .helpers import try_float, try_int from .indexermapper import del_mapping, MapStatus, save_mapping from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT, TVINFO_TVDB, TVINFO_TVMAZE, TVINFO_TVRAGE from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser -from .sgdatetime import SGDatetime, timestamp_near +from .sgdatetime import SGDatetime from .tv_base import TVEpisodeBase, TVShowBase from lib import imdbpie, subliminal @@ -1530,7 +1530,7 @@ class TVShow(TVShowBase): self._last_found_on_indexer = self.last_found_on_indexer my_db = db.DBConnection() # noinspection PyUnresolvedReferences - last_check = int(timestamp_near(datetime.datetime.now())) + last_check = SGDatetime.timestamp_near() # in case of flag change (+/-) don't change last_check date if abs(v) == abs(self._not_found_count): sql_result = my_db.select( diff --git a/sickgear/tvcache.py b/sickgear/tvcache.py index 3bab265e..d7fbd365 100644 --- a/sickgear/tvcache.py +++ b/sickgear/tvcache.py @@ -27,7 +27,7 @@ from .classes import SearchResult from .common import Quality from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser, ParseResult from .rssfeeds import RSSFeeds -from .sgdatetime import timestamp_near +from .sgdatetime import SGDatetime from .tv import TVEpisode # noinspection PyUnreachableCode @@ -172,7 +172,7 @@ class TVCache(object): if sql_result: last_time = int(sql_result[0]['time']) - if last_time > int(timestamp_near(datetime.datetime.now())): + if last_time > SGDatetime.timestamp_near(): last_time = 0 else: last_time = 0 @@ -190,7 +190,7 @@ class TVCache(object): if sql_result: last_time = int(sql_result[0]['time']) - if last_time > int(timestamp_near(datetime.datetime.now())): + if last_time > SGDatetime.timestamp_near(): last_time = 0 else: last_time = 0 @@ -300,7 +300,7 @@ class TVCache(object): episode_text = '|%s|' % '|'.join(map(str, episode_numbers)) # get the current timestamp - cur_timestamp = int(timestamp_near(datetime.datetime.now())) + cur_timestamp = SGDatetime.timestamp_near() # get quality of release quality = parse_result.quality diff --git a/sickgear/webserve.py b/sickgear/webserve.py index 1a97fe8e..82cf7697 100644 --- a/sickgear/webserve.py +++ b/sickgear/webserve.py @@ -66,7 +66,7 @@ from .providers import newznab, rsstorrent from .scene_numbering import get_scene_absolute_numbering_for_show, get_scene_numbering_for_show, \ get_xem_absolute_numbering_for_show, get_xem_numbering_for_show, set_scene_numbering_helper from .search_backlog import FORCED_BACKLOG -from .sgdatetime import SGDatetime, timestamp_near +from .sgdatetime import SGDatetime from .show_name_helpers import abbr_showname from .show_updater import clean_ignore_require_words @@ -674,7 +674,7 @@ class RepoHandler(BaseStaticFileHandler): def get_watchedstate_updater_addon_xml(self): mem_key = 'kodi_xml' - if int(timestamp_near(datetime.datetime.now())) < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0): + if SGDatetime.timestamp_near() < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0): return sickgear.MEMCACHE.get(mem_key).get('data') filename = 'addon%s.xml' % self.kodi_include @@ -682,7 +682,7 @@ class RepoHandler(BaseStaticFileHandler): 'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh: xml = fh.read().strip() % dict(ADDON_VERSION=self.get_addon_version(self.kodi_include)) - sickgear.MEMCACHE[mem_key] = dict(last_update=30 + int(timestamp_near(datetime.datetime.now())), data=xml) + sickgear.MEMCACHE[mem_key] = dict(last_update=30 + SGDatetime.timestamp_near(), data=xml) return xml @staticmethod @@ -696,7 +696,7 @@ class RepoHandler(BaseStaticFileHandler): Must use an arg here instead of `self` due to static call use case from external class """ mem_key = 'kodi_ver' - if int(timestamp_near(datetime.datetime.now())) < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0): + if SGDatetime.timestamp_near() < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0): return sickgear.MEMCACHE.get(mem_key).get('data') filename = 'service%s.py' % kodi_include @@ -704,7 +704,7 @@ class RepoHandler(BaseStaticFileHandler): 'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh: version = re.findall(r'ADDON_VERSION\s*?=\s*?\'([^\']+)', fh.read())[0] - sickgear.MEMCACHE[mem_key] = dict(last_update=30 + int(timestamp_near(datetime.datetime.now())), data=version) + sickgear.MEMCACHE[mem_key] = dict(last_update=30 + SGDatetime.timestamp_near(), data=version) return version def render_kodi_repo_addon_xml(self): @@ -1465,7 +1465,7 @@ r.close() continue if bname in ep_results: - date_watched = now = int(timestamp_near(datetime.datetime.now())) + date_watched = now = SGDatetime.timestamp_near() if 1500000000 < date_watched: date_watched = helpers.try_int(float(v.get('date_watched'))) @@ -9589,8 +9589,8 @@ class CachedImages(MainHandler): dummy_file = '%s.%s.dummy' % (os.path.splitext(filename)[0], source) if os.path.isfile(dummy_file): if os.stat(dummy_file).st_mtime \ - < (int(timestamp_near((datetime.datetime.now() - - datetime.timedelta(days=days, minutes=minutes))))): + < (SGDatetime.timestamp_near(datetime.datetime.now() + - datetime.timedelta(days=days, minutes=minutes))): CachedImages.delete_dummy_image(dummy_file) else: result = False @@ -9695,7 +9695,7 @@ class CachedImages(MainHandler): """ if not os.path.isfile(filename) or \ os.stat(filename).st_mtime < \ - (int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=days))))): + SGDatetime.timestamp_near(td=datetime.timedelta(days=days)): return True return False