mirror of
https://github.com/SickGear/SickGear.git
synced 2024-11-23 21:35:05 +00:00
Refactor timestamp_near
to SGDatetime.timestamp_near
.
This commit is contained in:
parent
59de9f8bda
commit
19f0a951e5
18 changed files with 74 additions and 66 deletions
|
@ -14,6 +14,7 @@
|
||||||
|
|
||||||
* Add logging around the restart/shutdown event
|
* Add logging around the restart/shutdown event
|
||||||
* Update package resource API 63.2.0 (3ae44cd) to 67.3.2 (b9bf2ec)
|
* Update package resource API 63.2.0 (3ae44cd) to 67.3.2 (b9bf2ec)
|
||||||
|
* Refactor `timestamp_near` to `SGDatetime.timestamp_near`
|
||||||
|
|
||||||
|
|
||||||
### 3.27.11 (2023-03-06 23:40:00 UTC)
|
### 3.27.11 (2023-03-06 23:40:00 UTC)
|
||||||
|
|
|
@ -17,13 +17,12 @@
|
||||||
# Uses the Synology Download Station API:
|
# Uses the Synology Download Station API:
|
||||||
# http://download.synology.com/download/Document/DeveloperGuide/Synology_Download_Station_Web_API.pdf
|
# http://download.synology.com/download/Document/DeveloperGuide/Synology_Download_Station_Web_API.pdf
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from .generic import GenericClient
|
from .generic import GenericClient
|
||||||
from .. import logger
|
from .. import logger
|
||||||
from ..sgdatetime import timestamp_near
|
from ..sgdatetime import SGDatetime
|
||||||
import sickgear
|
import sickgear
|
||||||
|
|
||||||
from _23 import unquote_plus
|
from _23 import unquote_plus
|
||||||
|
@ -298,7 +297,7 @@ class DownloadStationAPI(GenericClient):
|
||||||
if 1 < self._task_version and sickgear.TORRENT_PATH:
|
if 1 < self._task_version and sickgear.TORRENT_PATH:
|
||||||
params['destination'] = re.sub(r'^/(volume\d*/)?', '', sickgear.TORRENT_PATH)
|
params['destination'] = re.sub(r'^/(volume\d*/)?', '', sickgear.TORRENT_PATH)
|
||||||
|
|
||||||
task_stamp = int(timestamp_near(datetime.now()))
|
task_stamp = SGDatetime.timestamp_near()
|
||||||
response = self._client_request('create', t_params=params, files=files)
|
response = self._client_request('create', t_params=params, files=files)
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
if response and response.get('success'):
|
if response and response.get('success'):
|
||||||
|
|
|
@ -14,14 +14,13 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from .generic import GenericClient
|
from .generic import GenericClient
|
||||||
from .. import logger
|
from .. import logger
|
||||||
from ..helpers import get_url, try_int
|
from ..helpers import get_url, try_int
|
||||||
from ..sgdatetime import timestamp_near
|
from ..sgdatetime import SGDatetime
|
||||||
import sickgear
|
import sickgear
|
||||||
|
|
||||||
from requests.exceptions import HTTPError
|
from requests.exceptions import HTTPError
|
||||||
|
@ -372,7 +371,7 @@ class QbittorrentAPI(GenericClient):
|
||||||
else:
|
else:
|
||||||
kwargs = dict(post_data=params, files={'torrents': ('%s.torrent' % data.name, data.content)})
|
kwargs = dict(post_data=params, files={'torrents': ('%s.torrent' % data.name, data.content)})
|
||||||
|
|
||||||
task_stamp = int(timestamp_near(datetime.now()))
|
task_stamp = SGDatetime.timestamp_near()
|
||||||
response = self._client_request(('torrents/add', 'command/%s' % cmd)[not self.api_ns], **kwargs)
|
response = self._client_request(('torrents/add', 'command/%s' % cmd)[not self.api_ns], **kwargs)
|
||||||
|
|
||||||
if True is response:
|
if True is response:
|
||||||
|
|
|
@ -28,7 +28,7 @@ from exceptions_helper import ex
|
||||||
|
|
||||||
import sickgear
|
import sickgear
|
||||||
from . import logger, sgdatetime
|
from . import logger, sgdatetime
|
||||||
from .sgdatetime import timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
|
|
||||||
from sg_helpers import make_path, compress_file, remove_file_perm, scantree
|
from sg_helpers import make_path, compress_file, remove_file_perm, scantree
|
||||||
|
|
||||||
|
@ -841,7 +841,7 @@ def backup_all_dbs(target, compress=True, prefer_7z=True):
|
||||||
my_db = DBConnection('cache.db')
|
my_db = DBConnection('cache.db')
|
||||||
last_backup = my_db.select('SELECT time FROM lastUpdate WHERE provider = ?', ['sickgear_db_backup'])
|
last_backup = my_db.select('SELECT time FROM lastUpdate WHERE provider = ?', ['sickgear_db_backup'])
|
||||||
if last_backup:
|
if last_backup:
|
||||||
now_stamp = int(timestamp_near(datetime.datetime.now()))
|
now_stamp = SGDatetime.timestamp_near()
|
||||||
the_time = int(last_backup[0]['time'])
|
the_time = int(last_backup[0]['time'])
|
||||||
# only backup every 23 hours
|
# only backup every 23 hours
|
||||||
if now_stamp - the_time < 60 * 60 * 23:
|
if now_stamp - the_time < 60 * 60 * 23:
|
||||||
|
|
|
@ -34,7 +34,7 @@ import sickgear
|
||||||
from . import db, logger, notifiers
|
from . import db, logger, notifiers
|
||||||
from .common import cpu_presets, mediaExtensions, Overview, Quality, statusStrings, subtitleExtensions, \
|
from .common import cpu_presets, mediaExtensions, Overview, Quality, statusStrings, subtitleExtensions, \
|
||||||
ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED_ANY, SUBTITLED, UNAIRED, UNKNOWN, WANTED
|
ARCHIVED, DOWNLOADED, FAILED, IGNORED, SKIPPED, SNATCHED_ANY, SUBTITLED, UNAIRED, UNKNOWN, WANTED
|
||||||
from .sgdatetime import timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
from lib.tvinfo_base.exceptions import *
|
from lib.tvinfo_base.exceptions import *
|
||||||
from exceptions_helper import ex, MultipleShowObjectsException
|
from exceptions_helper import ex, MultipleShowObjectsException
|
||||||
|
|
||||||
|
@ -1031,7 +1031,7 @@ def clear_cache(force=False):
|
||||||
"""
|
"""
|
||||||
# clean out cache directory, remove everything > 12 hours old
|
# clean out cache directory, remove everything > 12 hours old
|
||||||
dirty = None
|
dirty = None
|
||||||
del_time = int(timestamp_near((datetime.datetime.now() - datetime.timedelta(hours=12))))
|
del_time = SGDatetime.timestamp_near(td=datetime.timedelta(hours=12))
|
||||||
direntry_args = dict(follow_symlinks=False)
|
direntry_args = dict(follow_symlinks=False)
|
||||||
for direntry in scantree(sickgear.CACHE_DIR, ['images|rss|zoneinfo'], follow_symlinks=True):
|
for direntry in scantree(sickgear.CACHE_DIR, ['images|rss|zoneinfo'], follow_symlinks=True):
|
||||||
if direntry.is_file(**direntry_args) and (force or del_time > direntry.stat(**direntry_args).st_mtime):
|
if direntry.is_file(**direntry_args) and (force or del_time > direntry.stat(**direntry_args).st_mtime):
|
||||||
|
@ -1342,7 +1342,7 @@ def delete_not_changed_in(paths, days=30, minutes=0):
|
||||||
:param minutes: Purge files not modified in this number of minutes (default: 0 minutes)
|
:param minutes: Purge files not modified in this number of minutes (default: 0 minutes)
|
||||||
:return: tuple; number of files that qualify for deletion, number of qualifying files that failed to be deleted
|
:return: tuple; number of files that qualify for deletion, number of qualifying files that failed to be deleted
|
||||||
"""
|
"""
|
||||||
del_time = int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=days, minutes=minutes))))
|
del_time = SGDatetime.timestamp_near(td=datetime.timedelta(days=days, minutes=minutes))
|
||||||
errors = 0
|
errors = 0
|
||||||
qualified = 0
|
qualified = 0
|
||||||
for cur_path in (paths, [paths])[not isinstance(paths, list)]:
|
for cur_path in (paths, [paths])[not isinstance(paths, list)]:
|
||||||
|
@ -1367,7 +1367,7 @@ def set_file_timestamp(filename, min_age=3, new_time=None):
|
||||||
:param new_time:
|
:param new_time:
|
||||||
:type new_time: None or int
|
:type new_time: None or int
|
||||||
"""
|
"""
|
||||||
min_time = int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=min_age))))
|
min_time = SGDatetime.timestamp_near(td=datetime.timedelta(days=min_age))
|
||||||
try:
|
try:
|
||||||
if os.path.isfile(filename) and os.path.getmtime(filename) < min_time:
|
if os.path.isfile(filename) and os.path.getmtime(filename) < min_time:
|
||||||
os.utime(filename, new_time)
|
os.utime(filename, new_time)
|
||||||
|
|
|
@ -26,7 +26,7 @@ import sickgear
|
||||||
import sg_helpers
|
import sg_helpers
|
||||||
from . import db, logger
|
from . import db, logger
|
||||||
from .metadata.generic import GenericMetadata
|
from .metadata.generic import GenericMetadata
|
||||||
from .sgdatetime import timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
from .indexers.indexer_config import TVINFO_TVDB, TVINFO_TVMAZE, TVINFO_TMDB, TVINFO_IMDB
|
from .indexers.indexer_config import TVINFO_TVDB, TVINFO_TVMAZE, TVINFO_TMDB, TVINFO_IMDB
|
||||||
|
|
||||||
from six import itervalues, iteritems
|
from six import itervalues, iteritems
|
||||||
|
@ -465,7 +465,7 @@ class ImageCache(object):
|
||||||
minutes_iv = 60 * 3
|
minutes_iv = 60 * 3
|
||||||
# daily_interval = 60 * 60 * 23
|
# daily_interval = 60 * 60 * 23
|
||||||
iv = minutes_iv
|
iv = minutes_iv
|
||||||
now_stamp = int(timestamp_near(datetime.datetime.now()))
|
now_stamp = SGDatetime.timestamp_near()
|
||||||
the_time = int(sql_result[0]['time'])
|
the_time = int(sql_result[0]['time'])
|
||||||
return now_stamp - the_time > iv
|
return now_stamp - the_time > iv
|
||||||
|
|
||||||
|
@ -482,7 +482,7 @@ class ImageCache(object):
|
||||||
"""
|
"""
|
||||||
my_db = db.DBConnection('cache.db')
|
my_db = db.DBConnection('cache.db')
|
||||||
my_db.upsert('lastUpdate',
|
my_db.upsert('lastUpdate',
|
||||||
{'time': int(timestamp_near(datetime.datetime.now()))},
|
{'time': SGDatetime.timestamp_near()},
|
||||||
{'provider': 'imsg_%s_%s' % ((image_type, self.FANART)[None is image_type], provider)})
|
{'provider': 'imsg_%s_%s' % ((image_type, self.FANART)[None is image_type], provider)})
|
||||||
|
|
||||||
def _cache_image_from_file(self, image_path, img_type, tvid, prodid, prefix='', move_file=False):
|
def _cache_image_from_file(self, image_path, img_type, tvid, prodid, prefix='', move_file=False):
|
||||||
|
|
|
@ -31,7 +31,7 @@ from logging.handlers import TimedRotatingFileHandler
|
||||||
|
|
||||||
import sickgear
|
import sickgear
|
||||||
from . import classes
|
from . import classes
|
||||||
from .sgdatetime import timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
from sg_helpers import md5_for_text, remove_file_perm
|
from sg_helpers import md5_for_text, remove_file_perm
|
||||||
|
|
||||||
# noinspection PyUnreachableCode
|
# noinspection PyUnreachableCode
|
||||||
|
@ -198,7 +198,7 @@ class SBRotatingLogHandler(object):
|
||||||
mem_key = 'logger'
|
mem_key = 'logger'
|
||||||
for to_log in log_list:
|
for to_log in log_list:
|
||||||
log_id = md5_for_text(to_log)
|
log_id = md5_for_text(to_log)
|
||||||
now = int(timestamp_near(datetime.datetime.now()))
|
now = SGDatetime.timestamp_near()
|
||||||
expired = now > sickgear.MEMCACHE.get(mem_key, {}).get(log_id, 0)
|
expired = now > sickgear.MEMCACHE.get(mem_key, {}).get(log_id, 0)
|
||||||
sickgear.MEMCACHE[mem_key] = {}
|
sickgear.MEMCACHE[mem_key] = {}
|
||||||
sickgear.MEMCACHE[mem_key][log_id] = 2 + now
|
sickgear.MEMCACHE[mem_key][log_id] = 2 + now
|
||||||
|
|
|
@ -33,7 +33,7 @@ from . import common, db, failedProcessor, helpers, logger, notifiers, postProce
|
||||||
from .common import SNATCHED_ANY
|
from .common import SNATCHED_ANY
|
||||||
from .history import reset_status
|
from .history import reset_status
|
||||||
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
|
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
|
||||||
from .sgdatetime import timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
|
|
||||||
from six import iteritems, iterkeys, string_types, text_type
|
from six import iteritems, iterkeys, string_types, text_type
|
||||||
from sg_helpers import long_path, scantree
|
from sg_helpers import long_path, scantree
|
||||||
|
@ -571,7 +571,7 @@ class ProcessTVShow(object):
|
||||||
archives = [os.path.basename(x) for x in unused_files]
|
archives = [os.path.basename(x) for x in unused_files]
|
||||||
if unused_files:
|
if unused_files:
|
||||||
for f in unused_files:
|
for f in unused_files:
|
||||||
archive_history.setdefault(f, int(timestamp_near(datetime.datetime.utcnow())))
|
archive_history.setdefault(f, SGDatetime.timestamp_near(datetime.datetime.utcnow()))
|
||||||
|
|
||||||
if init_history_cnt != len(archive_history):
|
if init_history_cnt != len(archive_history):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -30,7 +30,7 @@ from .common import ARCHIVED, FAILED, DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER,
|
||||||
NeededQualities, Quality
|
NeededQualities, Quality
|
||||||
from .history import dateFormat
|
from .history import dateFormat
|
||||||
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
|
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
|
||||||
from .sgdatetime import timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
|
|
||||||
from _23 import map_consume
|
from _23 import map_consume
|
||||||
from six import string_types
|
from six import string_types
|
||||||
|
@ -689,10 +689,10 @@ def _set_last_proper_search(when):
|
||||||
|
|
||||||
if 0 == len(sql_result):
|
if 0 == len(sql_result):
|
||||||
my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)',
|
my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)',
|
||||||
[0, 0, int(timestamp_near(when))])
|
[0, 0, SGDatetime.timestamp_near(when)])
|
||||||
else:
|
else:
|
||||||
# noinspection SqlConstantCondition
|
# noinspection SqlConstantCondition
|
||||||
my_db.action('UPDATE info SET last_proper_search=%s WHERE 1=1' % int(timestamp_near(when)))
|
my_db.action('UPDATE info SET last_proper_search=%s WHERE 1=1' % SGDatetime.timestamp_near(when))
|
||||||
|
|
||||||
|
|
||||||
def next_proper_timeleft():
|
def next_proper_timeleft():
|
||||||
|
|
|
@ -39,7 +39,7 @@ from ..helpers import maybe_plural, remove_file_perm
|
||||||
from ..name_parser.parser import InvalidNameException, InvalidShowException, NameParser
|
from ..name_parser.parser import InvalidNameException, InvalidShowException, NameParser
|
||||||
from ..scene_exceptions import has_season_exceptions
|
from ..scene_exceptions import has_season_exceptions
|
||||||
from ..show_name_helpers import get_show_names_all_possible
|
from ..show_name_helpers import get_show_names_all_possible
|
||||||
from ..sgdatetime import SGDatetime, timestamp_near
|
from ..sgdatetime import SGDatetime
|
||||||
from ..tv import TVEpisode, TVShow
|
from ..tv import TVEpisode, TVShow
|
||||||
|
|
||||||
from cfscrape import CloudflareScraper
|
from cfscrape import CloudflareScraper
|
||||||
|
@ -112,7 +112,7 @@ class ProviderFailList(object):
|
||||||
date_time = datetime.datetime.combine(fail_date, datetime.time(hour=fail_hour))
|
date_time = datetime.datetime.combine(fail_date, datetime.time(hour=fail_hour))
|
||||||
if ProviderFailTypes.names[e.fail_type] not in fail_dict.get(date_time, {}):
|
if ProviderFailTypes.names[e.fail_type] not in fail_dict.get(date_time, {}):
|
||||||
if isinstance(e.fail_time, datetime.datetime):
|
if isinstance(e.fail_time, datetime.datetime):
|
||||||
value = timestamp_near(e.fail_time)
|
value = SGDatetime.timestamp_near(e.fail_time, return_int=False)
|
||||||
else:
|
else:
|
||||||
value = SGDatetime.timestamp_far(e.fail_time)
|
value = SGDatetime.timestamp_far(e.fail_time)
|
||||||
default = {'date': str(fail_date), 'date_time': date_time,
|
default = {'date': str(fail_date), 'date_time': date_time,
|
||||||
|
@ -178,7 +178,7 @@ class ProviderFailList(object):
|
||||||
cl = []
|
cl = []
|
||||||
for f in self._fails:
|
for f in self._fails:
|
||||||
if isinstance(f.fail_time, datetime.datetime):
|
if isinstance(f.fail_time, datetime.datetime):
|
||||||
value = int(timestamp_near(f.fail_time))
|
value = SGDatetime.timestamp_near(f.fail_time)
|
||||||
else:
|
else:
|
||||||
value = SGDatetime.timestamp_far(f.fail_time)
|
value = SGDatetime.timestamp_far(f.fail_time)
|
||||||
cl.append(['INSERT OR IGNORE INTO provider_fails (prov_name, fail_type, fail_code, fail_time) '
|
cl.append(['INSERT OR IGNORE INTO provider_fails (prov_name, fail_type, fail_code, fail_time) '
|
||||||
|
@ -211,7 +211,7 @@ class ProviderFailList(object):
|
||||||
my_db = db.DBConnection('cache.db')
|
my_db = db.DBConnection('cache.db')
|
||||||
if my_db.has_table('provider_fails'):
|
if my_db.has_table('provider_fails'):
|
||||||
# noinspection PyCallByClass,PyTypeChecker
|
# noinspection PyCallByClass,PyTypeChecker
|
||||||
time_limit = int(timestamp_near(datetime.datetime.now() - datetime.timedelta(days=28)))
|
time_limit = SGDatetime.timestamp_near(td=datetime.timedelta(days=28))
|
||||||
my_db.action('DELETE FROM provider_fails WHERE fail_time < ?', [time_limit])
|
my_db.action('DELETE FROM provider_fails WHERE fail_time < ?', [time_limit])
|
||||||
except (BaseException, Exception):
|
except (BaseException, Exception):
|
||||||
pass
|
pass
|
||||||
|
@ -340,7 +340,7 @@ class GenericProvider(object):
|
||||||
self._failure_time = value
|
self._failure_time = value
|
||||||
if changed_val:
|
if changed_val:
|
||||||
if isinstance(value, datetime.datetime):
|
if isinstance(value, datetime.datetime):
|
||||||
value = int(timestamp_near(value))
|
value = SGDatetime.timestamp_near(value)
|
||||||
elif value:
|
elif value:
|
||||||
# noinspection PyCallByClass
|
# noinspection PyCallByClass
|
||||||
value = SGDatetime.timestamp_far(value)
|
value = SGDatetime.timestamp_far(value)
|
||||||
|
@ -370,7 +370,7 @@ class GenericProvider(object):
|
||||||
self._tmr_limit_time = value
|
self._tmr_limit_time = value
|
||||||
if changed_val:
|
if changed_val:
|
||||||
if isinstance(value, datetime.datetime):
|
if isinstance(value, datetime.datetime):
|
||||||
value = int(timestamp_near(value))
|
value = SGDatetime.timestamp_near(value)
|
||||||
elif value:
|
elif value:
|
||||||
# noinspection PyCallByClass
|
# noinspection PyCallByClass
|
||||||
value = SGDatetime.timestamp_far(value)
|
value = SGDatetime.timestamp_far(value)
|
||||||
|
|
|
@ -31,7 +31,7 @@ from ..common import NeededQualities, Quality
|
||||||
from ..helpers import remove_non_release_groups
|
from ..helpers import remove_non_release_groups
|
||||||
from ..indexers.indexer_config import *
|
from ..indexers.indexer_config import *
|
||||||
from ..network_timezones import SG_TIMEZONE
|
from ..network_timezones import SG_TIMEZONE
|
||||||
from ..sgdatetime import SGDatetime, timestamp_near
|
from ..sgdatetime import SGDatetime
|
||||||
from ..search import get_aired_in_season, get_wanted_qualities
|
from ..search import get_aired_in_season, get_wanted_qualities
|
||||||
from ..show_name_helpers import get_show_names
|
from ..show_name_helpers import get_show_names
|
||||||
from ..scene_exceptions import has_season_exceptions
|
from ..scene_exceptions import has_season_exceptions
|
||||||
|
@ -217,7 +217,7 @@ class NewznabProvider(generic.NZBProvider):
|
||||||
try:
|
try:
|
||||||
my_db = db.DBConnection('cache.db')
|
my_db = db.DBConnection('cache.db')
|
||||||
if isinstance(value, datetime.datetime):
|
if isinstance(value, datetime.datetime):
|
||||||
save_value = int(timestamp_near(value))
|
save_value = SGDatetime.timestamp_near(value)
|
||||||
else:
|
else:
|
||||||
save_value = SGDatetime.timestamp_far(value, default=0)
|
save_value = SGDatetime.timestamp_far(value, default=0)
|
||||||
my_db.action('INSERT OR REPLACE INTO "lastrecentsearch" (name, datetime) VALUES (?,?)',
|
my_db.action('INSERT OR REPLACE INTO "lastrecentsearch" (name, datetime) VALUES (?,?)',
|
||||||
|
|
|
@ -31,7 +31,7 @@ from . import db, helpers, logger, name_cache
|
||||||
from .anime import create_anidb_obj
|
from .anime import create_anidb_obj
|
||||||
from .classes import OrderedDefaultdict
|
from .classes import OrderedDefaultdict
|
||||||
from .indexers.indexer_config import TVINFO_TVDB
|
from .indexers.indexer_config import TVINFO_TVDB
|
||||||
from .sgdatetime import timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
|
|
||||||
import lib.rarfile.rarfile as rarfile
|
import lib.rarfile.rarfile as rarfile
|
||||||
|
|
||||||
|
@ -68,9 +68,9 @@ def should_refresh(name, max_refresh_age_secs=86400, remaining=False):
|
||||||
if rows:
|
if rows:
|
||||||
last_refresh = int(rows[0]['last_refreshed'])
|
last_refresh = int(rows[0]['last_refreshed'])
|
||||||
if remaining:
|
if remaining:
|
||||||
time_left = (last_refresh + max_refresh_age_secs - int(timestamp_near(datetime.datetime.now())))
|
time_left = (last_refresh + max_refresh_age_secs - SGDatetime.timestamp_near())
|
||||||
return (0, time_left)[time_left > 0]
|
return (0, time_left)[time_left > 0]
|
||||||
return int(timestamp_near(datetime.datetime.now())) > last_refresh + max_refresh_age_secs
|
return SGDatetime.timestamp_near() > last_refresh + max_refresh_age_secs
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ def set_last_refresh(name):
|
||||||
"""
|
"""
|
||||||
my_db = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
my_db.upsert('scene_exceptions_refresh',
|
my_db.upsert('scene_exceptions_refresh',
|
||||||
{'last_refreshed': int(timestamp_near(datetime.datetime.now()))},
|
{'last_refreshed': SGDatetime.timestamp_near()},
|
||||||
{'list': name})
|
{'list': name})
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ import sickgear
|
||||||
from . import db, logger
|
from . import db, logger
|
||||||
from .helpers import try_int
|
from .helpers import try_int
|
||||||
from .scene_exceptions import xem_ids_list
|
from .scene_exceptions import xem_ids_list
|
||||||
from .sgdatetime import timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
|
|
||||||
# noinspection PyUnreachableCode
|
# noinspection PyUnreachableCode
|
||||||
if False:
|
if False:
|
||||||
|
@ -794,7 +794,7 @@ def xem_refresh(tvid, prodid, force=False):
|
||||||
""", [tvid, prodid])
|
""", [tvid, prodid])
|
||||||
if sql_result:
|
if sql_result:
|
||||||
last_refresh = int(sql_result[0]['last_refreshed'])
|
last_refresh = int(sql_result[0]['last_refreshed'])
|
||||||
refresh = int(timestamp_near(datetime.datetime.now())) > last_refresh + max_refresh_age_secs
|
refresh = SGDatetime.timestamp_near() > last_refresh + max_refresh_age_secs
|
||||||
else:
|
else:
|
||||||
refresh = True
|
refresh = True
|
||||||
|
|
||||||
|
@ -803,7 +803,7 @@ def xem_refresh(tvid, prodid, force=False):
|
||||||
|
|
||||||
# mark refreshed
|
# mark refreshed
|
||||||
my_db.upsert('xem_refresh',
|
my_db.upsert('xem_refresh',
|
||||||
dict(last_refreshed=int(timestamp_near(datetime.datetime.now()))),
|
dict(last_refreshed=SGDatetime.timestamp_near()),
|
||||||
dict(indexer=tvid, indexer_id=prodid))
|
dict(indexer=tvid, indexer_id=prodid))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -25,7 +25,7 @@ from . import db, logger, scheduler, search_queue, ui
|
||||||
from .helpers import find_show_by_id
|
from .helpers import find_show_by_id
|
||||||
from .providers.generic import GenericProvider
|
from .providers.generic import GenericProvider
|
||||||
from .search import wanted_episodes
|
from .search import wanted_episodes
|
||||||
from .sgdatetime import SGDatetime, timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
from .tv import TVidProdid, TVEpisode, TVShow
|
from .tv import TVidProdid, TVEpisode, TVShow
|
||||||
|
|
||||||
from six import iteritems, itervalues, moves
|
from six import iteritems, itervalues, moves
|
||||||
|
@ -344,7 +344,7 @@ class BacklogSearcher(object):
|
||||||
last_run_time = 1
|
last_run_time = 1
|
||||||
else:
|
else:
|
||||||
last_run_time = int(sql_result[0]['last_run_backlog'])
|
last_run_time = int(sql_result[0]['last_run_backlog'])
|
||||||
if last_run_time > int(timestamp_near(datetime.datetime.now())):
|
if last_run_time > SGDatetime.timestamp_near():
|
||||||
last_run_time = 1
|
last_run_time = 1
|
||||||
|
|
||||||
return last_run_time
|
return last_run_time
|
||||||
|
@ -356,7 +356,7 @@ class BacklogSearcher(object):
|
||||||
sql_result = my_db.select('SELECT * FROM info')
|
sql_result = my_db.select('SELECT * FROM info')
|
||||||
|
|
||||||
if isinstance(when, datetime.datetime):
|
if isinstance(when, datetime.datetime):
|
||||||
when = int(timestamp_near(when))
|
when = SGDatetime.timestamp_near(when)
|
||||||
else:
|
else:
|
||||||
when = SGDatetime.timestamp_far(when, default=0)
|
when = SGDatetime.timestamp_far(when, default=0)
|
||||||
if 0 == len(sql_result):
|
if 0 == len(sql_result):
|
||||||
|
|
|
@ -282,16 +282,25 @@ class SGDatetime(datetime.datetime):
|
||||||
finally:
|
finally:
|
||||||
return (default, timestamp)[isinstance(timestamp, (float, integer_types))]
|
return (default, timestamp)[isinstance(timestamp, (float, integer_types))]
|
||||||
|
|
||||||
|
@static_or_instance
|
||||||
|
def timestamp_near(self,
|
||||||
|
dt=None, # type: Optional[SGDatetime, datetime.datetime]
|
||||||
|
td=None, # type: Optional[datetime.timedelta]
|
||||||
|
return_int=True # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> Union[float, integer_types]
|
||||||
|
"""
|
||||||
|
Use `timestamp_near` for a timestamp in the near future or near past
|
||||||
|
|
||||||
# noinspection PyUnreachableCode
|
Raises exception if dt cannot be converted to int
|
||||||
if False:
|
|
||||||
# just to trick pycharm in correct type detection
|
|
||||||
# noinspection PyUnusedLocal
|
|
||||||
def timestamp_near(d_t):
|
|
||||||
# type: (datetime.datetime) -> float
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
td is timedelta to subtract from datetime
|
||||||
# py3 native timestamp uses milliseconds
|
"""
|
||||||
# noinspection PyRedeclaration
|
obj = (dt, self)[self is not None] # type: datetime.datetime
|
||||||
timestamp_near = datetime.datetime.timestamp
|
if None is obj:
|
||||||
|
obj = datetime.datetime.now()
|
||||||
|
if isinstance(td, datetime.timedelta):
|
||||||
|
obj -= td
|
||||||
|
if not return_int:
|
||||||
|
return datetime.datetime.timestamp(obj)
|
||||||
|
return int(datetime.datetime.timestamp(obj))
|
||||||
|
|
|
@ -52,7 +52,7 @@ from .helpers import try_float, try_int
|
||||||
from .indexermapper import del_mapping, MapStatus, save_mapping
|
from .indexermapper import del_mapping, MapStatus, save_mapping
|
||||||
from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT, TVINFO_TVDB, TVINFO_TVMAZE, TVINFO_TVRAGE
|
from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT, TVINFO_TVDB, TVINFO_TVMAZE, TVINFO_TVRAGE
|
||||||
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
|
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
|
||||||
from .sgdatetime import SGDatetime, timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
from .tv_base import TVEpisodeBase, TVShowBase
|
from .tv_base import TVEpisodeBase, TVShowBase
|
||||||
|
|
||||||
from lib import imdbpie, subliminal
|
from lib import imdbpie, subliminal
|
||||||
|
@ -1530,7 +1530,7 @@ class TVShow(TVShowBase):
|
||||||
self._last_found_on_indexer = self.last_found_on_indexer
|
self._last_found_on_indexer = self.last_found_on_indexer
|
||||||
my_db = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
last_check = int(timestamp_near(datetime.datetime.now()))
|
last_check = SGDatetime.timestamp_near()
|
||||||
# in case of flag change (+/-) don't change last_check date
|
# in case of flag change (+/-) don't change last_check date
|
||||||
if abs(v) == abs(self._not_found_count):
|
if abs(v) == abs(self._not_found_count):
|
||||||
sql_result = my_db.select(
|
sql_result = my_db.select(
|
||||||
|
|
|
@ -27,7 +27,7 @@ from .classes import SearchResult
|
||||||
from .common import Quality
|
from .common import Quality
|
||||||
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser, ParseResult
|
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser, ParseResult
|
||||||
from .rssfeeds import RSSFeeds
|
from .rssfeeds import RSSFeeds
|
||||||
from .sgdatetime import timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
from .tv import TVEpisode
|
from .tv import TVEpisode
|
||||||
|
|
||||||
# noinspection PyUnreachableCode
|
# noinspection PyUnreachableCode
|
||||||
|
@ -172,7 +172,7 @@ class TVCache(object):
|
||||||
|
|
||||||
if sql_result:
|
if sql_result:
|
||||||
last_time = int(sql_result[0]['time'])
|
last_time = int(sql_result[0]['time'])
|
||||||
if last_time > int(timestamp_near(datetime.datetime.now())):
|
if last_time > SGDatetime.timestamp_near():
|
||||||
last_time = 0
|
last_time = 0
|
||||||
else:
|
else:
|
||||||
last_time = 0
|
last_time = 0
|
||||||
|
@ -190,7 +190,7 @@ class TVCache(object):
|
||||||
|
|
||||||
if sql_result:
|
if sql_result:
|
||||||
last_time = int(sql_result[0]['time'])
|
last_time = int(sql_result[0]['time'])
|
||||||
if last_time > int(timestamp_near(datetime.datetime.now())):
|
if last_time > SGDatetime.timestamp_near():
|
||||||
last_time = 0
|
last_time = 0
|
||||||
else:
|
else:
|
||||||
last_time = 0
|
last_time = 0
|
||||||
|
@ -300,7 +300,7 @@ class TVCache(object):
|
||||||
episode_text = '|%s|' % '|'.join(map(str, episode_numbers))
|
episode_text = '|%s|' % '|'.join(map(str, episode_numbers))
|
||||||
|
|
||||||
# get the current timestamp
|
# get the current timestamp
|
||||||
cur_timestamp = int(timestamp_near(datetime.datetime.now()))
|
cur_timestamp = SGDatetime.timestamp_near()
|
||||||
|
|
||||||
# get quality of release
|
# get quality of release
|
||||||
quality = parse_result.quality
|
quality = parse_result.quality
|
||||||
|
|
|
@ -66,7 +66,7 @@ from .providers import newznab, rsstorrent
|
||||||
from .scene_numbering import get_scene_absolute_numbering_for_show, get_scene_numbering_for_show, \
|
from .scene_numbering import get_scene_absolute_numbering_for_show, get_scene_numbering_for_show, \
|
||||||
get_xem_absolute_numbering_for_show, get_xem_numbering_for_show, set_scene_numbering_helper
|
get_xem_absolute_numbering_for_show, get_xem_numbering_for_show, set_scene_numbering_helper
|
||||||
from .search_backlog import FORCED_BACKLOG
|
from .search_backlog import FORCED_BACKLOG
|
||||||
from .sgdatetime import SGDatetime, timestamp_near
|
from .sgdatetime import SGDatetime
|
||||||
from .show_name_helpers import abbr_showname
|
from .show_name_helpers import abbr_showname
|
||||||
|
|
||||||
from .show_updater import clean_ignore_require_words
|
from .show_updater import clean_ignore_require_words
|
||||||
|
@ -674,7 +674,7 @@ class RepoHandler(BaseStaticFileHandler):
|
||||||
|
|
||||||
def get_watchedstate_updater_addon_xml(self):
|
def get_watchedstate_updater_addon_xml(self):
|
||||||
mem_key = 'kodi_xml'
|
mem_key = 'kodi_xml'
|
||||||
if int(timestamp_near(datetime.datetime.now())) < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0):
|
if SGDatetime.timestamp_near() < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0):
|
||||||
return sickgear.MEMCACHE.get(mem_key).get('data')
|
return sickgear.MEMCACHE.get(mem_key).get('data')
|
||||||
|
|
||||||
filename = 'addon%s.xml' % self.kodi_include
|
filename = 'addon%s.xml' % self.kodi_include
|
||||||
|
@ -682,7 +682,7 @@ class RepoHandler(BaseStaticFileHandler):
|
||||||
'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh:
|
'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh:
|
||||||
xml = fh.read().strip() % dict(ADDON_VERSION=self.get_addon_version(self.kodi_include))
|
xml = fh.read().strip() % dict(ADDON_VERSION=self.get_addon_version(self.kodi_include))
|
||||||
|
|
||||||
sickgear.MEMCACHE[mem_key] = dict(last_update=30 + int(timestamp_near(datetime.datetime.now())), data=xml)
|
sickgear.MEMCACHE[mem_key] = dict(last_update=30 + SGDatetime.timestamp_near(), data=xml)
|
||||||
return xml
|
return xml
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -696,7 +696,7 @@ class RepoHandler(BaseStaticFileHandler):
|
||||||
Must use an arg here instead of `self` due to static call use case from external class
|
Must use an arg here instead of `self` due to static call use case from external class
|
||||||
"""
|
"""
|
||||||
mem_key = 'kodi_ver'
|
mem_key = 'kodi_ver'
|
||||||
if int(timestamp_near(datetime.datetime.now())) < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0):
|
if SGDatetime.timestamp_near() < sickgear.MEMCACHE.get(mem_key, {}).get('last_update', 0):
|
||||||
return sickgear.MEMCACHE.get(mem_key).get('data')
|
return sickgear.MEMCACHE.get(mem_key).get('data')
|
||||||
|
|
||||||
filename = 'service%s.py' % kodi_include
|
filename = 'service%s.py' % kodi_include
|
||||||
|
@ -704,7 +704,7 @@ class RepoHandler(BaseStaticFileHandler):
|
||||||
'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh:
|
'service.sickgear.watchedstate.updater', filename), 'r', encoding='utf8') as fh:
|
||||||
version = re.findall(r'ADDON_VERSION\s*?=\s*?\'([^\']+)', fh.read())[0]
|
version = re.findall(r'ADDON_VERSION\s*?=\s*?\'([^\']+)', fh.read())[0]
|
||||||
|
|
||||||
sickgear.MEMCACHE[mem_key] = dict(last_update=30 + int(timestamp_near(datetime.datetime.now())), data=version)
|
sickgear.MEMCACHE[mem_key] = dict(last_update=30 + SGDatetime.timestamp_near(), data=version)
|
||||||
return version
|
return version
|
||||||
|
|
||||||
def render_kodi_repo_addon_xml(self):
|
def render_kodi_repo_addon_xml(self):
|
||||||
|
@ -1465,7 +1465,7 @@ r.close()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if bname in ep_results:
|
if bname in ep_results:
|
||||||
date_watched = now = int(timestamp_near(datetime.datetime.now()))
|
date_watched = now = SGDatetime.timestamp_near()
|
||||||
if 1500000000 < date_watched:
|
if 1500000000 < date_watched:
|
||||||
date_watched = helpers.try_int(float(v.get('date_watched')))
|
date_watched = helpers.try_int(float(v.get('date_watched')))
|
||||||
|
|
||||||
|
@ -9589,8 +9589,8 @@ class CachedImages(MainHandler):
|
||||||
dummy_file = '%s.%s.dummy' % (os.path.splitext(filename)[0], source)
|
dummy_file = '%s.%s.dummy' % (os.path.splitext(filename)[0], source)
|
||||||
if os.path.isfile(dummy_file):
|
if os.path.isfile(dummy_file):
|
||||||
if os.stat(dummy_file).st_mtime \
|
if os.stat(dummy_file).st_mtime \
|
||||||
< (int(timestamp_near((datetime.datetime.now()
|
< (SGDatetime.timestamp_near(datetime.datetime.now()
|
||||||
- datetime.timedelta(days=days, minutes=minutes))))):
|
- datetime.timedelta(days=days, minutes=minutes))):
|
||||||
CachedImages.delete_dummy_image(dummy_file)
|
CachedImages.delete_dummy_image(dummy_file)
|
||||||
else:
|
else:
|
||||||
result = False
|
result = False
|
||||||
|
@ -9695,7 +9695,7 @@ class CachedImages(MainHandler):
|
||||||
"""
|
"""
|
||||||
if not os.path.isfile(filename) or \
|
if not os.path.isfile(filename) or \
|
||||||
os.stat(filename).st_mtime < \
|
os.stat(filename).st_mtime < \
|
||||||
(int(timestamp_near((datetime.datetime.now() - datetime.timedelta(days=days))))):
|
SGDatetime.timestamp_near(td=datetime.timedelta(days=days)):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue