Merge branch 'main' into dev

This commit is contained in:
JackDandy 2024-10-07 00:35:28 +01:00
commit eef5ddb171
15 changed files with 76 additions and 33 deletions

View file

@ -5,6 +5,19 @@
* Update urllib3 2.2.1 (54d6edf) to 2.2.2 (27e2a5c) * Update urllib3 2.2.1 (54d6edf) to 2.2.2 (27e2a5c)
### 3.32.8 (2024-10-07 00:30:00 UTC)
* Change min required Python version to 3.9
* Change add support for Python 3.9.20, 3.10.15, 3.11.10, 3.12.7
* Change update fallback zoneinfo to 2024b
* Change improve config.ini save failure messages
* Change add '-f' to copy_file command for 'posix' systems
* Fix Fanart.tv and CF data fetch
* Fix TVC images
* Change add '.avif' extensions as valid image type (used by tvc cards)
* Change hide lazy load animation when loading fails (unsupported image format)
### 3.32.7 (2024-08-13 11:30:00 UTC) ### 3.32.7 (2024-08-13 11:30:00 UTC)
* Change to prevent saving config.ini before it's fully loaded * Change to prevent saving config.ini before it's fully loaded

View file

@ -1485,6 +1485,8 @@ home_browseShows.tmpl
border-top-left-radius:5px; border-top-left-radius:5px;
border-top-right-radius:5px; border-top-right-radius:5px;
border-bottom:1px solid #111; border-bottom:1px solid #111;
background-position:50% 50%;
text-decoration: none !important;
background-image:url("../images/poster-dark.jpg") background-image:url("../images/poster-dark.jpg")
} }

View file

@ -1,5 +1,4 @@
function initLazyload(){ function hide_lazy_ani (element){
$.ll = new LazyLoad({elements_selector:'img[data-original]', callback_load:function(element){
if (element.id) { if (element.id) {
var el = document.getElementById('loading-' + element.id), className = 'hide'; var el = document.getElementById('loading-' + element.id), className = 'hide';
if (!!document.body.classList) { if (!!document.body.classList) {
@ -8,7 +7,11 @@ function initLazyload(){
el.className += (el.className ? ' ' : '') + className; el.className += (el.className ? ' ' : '') + className;
} }
} }
}}); }
function initLazyload(){
$.ll = new LazyLoad({elements_selector:'img[data-original]', callback_load: hide_lazy_ani,
callback_error: hide_lazy_ani});
$.ll.handleScroll(); $.ll.handleScroll();
return !0; return !0;
} }

View file

@ -56,8 +56,8 @@ class CloudflareScraper(Session):
and resp.status_code in (503, 429, 403)): and resp.status_code in (503, 429, 403)):
self.start_time = time.time() self.start_time = time.time()
if (re.search('(?i)cloudflare', resp.headers.get('Server', '')) if (re.search('(?i)cloudflare', resp.headers.get('Server', ''))
and b'jschl_vc' in resp.content and b'_cf_chl_' in resp.content
and b'jschl_answer' in resp.content): or (b'jschl_vc' in resp.content and b'jschl_answer' in resp.content)):
resp = self.solve_cf_challenge(resp, url_solver, **kwargs) resp = self.solve_cf_challenge(resp, url_solver, **kwargs)
elif b'ddgu' in resp.content: elif b'ddgu' in resp.content:
resp = self.solve_ddg_challenge(resp, **kwargs) resp = self.solve_ddg_challenge(resp, **kwargs)

View file

@ -7,11 +7,12 @@ from sg_helpers import get_url
class Request(object): class Request(object):
def __init__(self, apikey, tvdb_id, ws=fanart.WS.TV, types=None): def __init__(self, apikey, tvdb_id, ws=fanart.WS.TV, types=None, **kwargs):
self._apikey = apikey self._apikey = apikey
self._tvdb_id = tvdb_id self._tvdb_id = tvdb_id
self._ws = ws self._ws = ws
self._types = types self._types = types
self._kwargs = kwargs
self._response = None self._response = None
self._web_url = 'https://fanart.tv/series/%s' self._web_url = 'https://fanart.tv/series/%s'
self._assets_url = 'https://assets.fanart.tv' self._assets_url = 'https://assets.fanart.tv'
@ -22,7 +23,7 @@ class Request(object):
def response(self): def response(self):
try: try:
rjson = get_url(str(self), parse_json=True) rjson = get_url(str(self), parse_json=True, **self._kwargs)
image_type = self._types or u'showbackground' image_type = self._types or u'showbackground'
rhtml = self.scrape_web(image_type) rhtml = self.scrape_web(image_type)
if not isinstance(rjson, dict) and 0 == len(rhtml[image_type]): if not isinstance(rjson, dict) and 0 == len(rhtml[image_type]):
@ -31,7 +32,7 @@ class Request(object):
if not isinstance(rjson, dict): if not isinstance(rjson, dict):
rjson = {image_type: []} rjson = {image_type: []}
if 0 != len(rhtml[image_type]): if None is not rhtml and 0 != len(rhtml[image_type]):
rjson_ids = map(lambda i: i['id'], rjson[image_type]) rjson_ids = map(lambda i: i['id'], rjson[image_type])
for item in filter(lambda i: i['id'] not in rjson_ids, rhtml[image_type]): for item in filter(lambda i: i['id'] not in rjson_ids, rhtml[image_type]):
rjson[image_type] += [item] rjson[image_type] += [item]
@ -48,7 +49,7 @@ class Request(object):
def scrape_web(self, image_type): def scrape_web(self, image_type):
try: try:
data = get_url(self._web_url % self._tvdb_id) data = get_url(self._web_url % self._tvdb_id, **self._kwargs)
if not data: if not data:
return return

View file

@ -1148,7 +1148,7 @@ def scantree(path, # type: AnyStr
def copy_file(src_file, dest_file): def copy_file(src_file, dest_file):
if os.name.startswith('posix'): if os.name.startswith('posix'):
subprocess.call(['cp', src_file, dest_file]) subprocess.call(['cp', '-f', src_file, dest_file])
else: else:
shutil.copyfile(src_file, dest_file) shutil.copyfile(src_file, dest_file)

View file

@ -36,9 +36,8 @@ warnings.filterwarnings('ignore', module=r'.*ssl_.*', message='.*SSLContext obje
warnings.filterwarnings('ignore', module=r'.*zoneinfo.*', message='.*file or directory.*') warnings.filterwarnings('ignore', module=r'.*zoneinfo.*', message='.*file or directory.*')
warnings.filterwarnings('ignore', message='.*deprecated in cryptography.*') warnings.filterwarnings('ignore', message='.*deprecated in cryptography.*')
versions = [((3, 8, 2), (3, 8, 19)), versions = [((3, 9, 0), (3, 9, 2)), ((3, 9, 4), (3, 9, 20)),
((3, 9, 0), (3, 9, 2)), ((3, 9, 4), (3, 9, 19)), ((3, 10, 0), (3, 12, 7))] # inclusive version ranges
((3, 10, 0), (3, 12, 5))] # inclusive version ranges
if not any(list(map(lambda v: v[0] <= sys.version_info[:3] <= v[1], versions))) and not int(os.environ.get('PYT', 0)): if not any(list(map(lambda v: v[0] <= sys.version_info[:3] <= v[1], versions))) and not int(os.environ.get('PYT', 0)):
major, minor, micro = sys.version_info[:3] major, minor, micro = sys.version_info[:3]
print('Python %s.%s.%s detected.' % (major, minor, micro)) print('Python %s.%s.%s detected.' % (major, minor, micro))

View file

@ -2475,22 +2475,36 @@ def _save_config(force=False, **kwargs):
backup_config = re.sub(r'\.ini$', '.bak', CONFIG_FILE) backup_config = re.sub(r'\.ini$', '.bak', CONFIG_FILE)
from .config import check_valid_config from .config import check_valid_config
try: try:
if check_valid_config(CONFIG_FILE):
for _t in range(0, 3):
copy_file(CONFIG_FILE, backup_config) copy_file(CONFIG_FILE, backup_config)
if not check_valid_config(backup_config): if not check_valid_config(backup_config):
logger.error('config file seams to be invalid, not backing up.') if 2 > _t:
logger.debug('backup config file seems to be invalid, retrying...')
else:
logger.warning('backup config file seems to be invalid, not backing up.')
backup_config = None
remove_file_perm(backup_config) remove_file_perm(backup_config)
2 > _t and time.sleep(3)
else:
break
else:
logger.warning('existing config file is invalid, not backing it up')
backup_config = None backup_config = None
except (BaseException, Exception): except (BaseException, Exception):
backup_config = None backup_config = None
for _ in range(0, 3): for _t in range(0, 3):
new_config.write() new_config.write()
if check_valid_config(CONFIG_FILE): if check_valid_config(CONFIG_FILE):
CONFIG_OLD = copy.deepcopy(new_config) CONFIG_OLD = copy.deepcopy(new_config)
return return
logger.warning('saving config file failed, retrying...') if 2 > _t:
logger.debug('saving config file failed, retrying...')
else:
logger.warning('saving config file failed.')
remove_file_perm(CONFIG_FILE) remove_file_perm(CONFIG_FILE)
time.sleep(3) 2 > _t and time.sleep(3)
# we only get here if the config saving failed multiple times # we only get here if the config saving failed multiple times
if None is not backup_config and os.path.isfile(backup_config): if None is not backup_config and os.path.isfile(backup_config):

View file

@ -484,7 +484,7 @@ def backup_config():
logger.log('backing up config.ini') logger.log('backing up config.ini')
try: try:
if not check_valid_config(sickgear.CONFIG_FILE): if not check_valid_config(sickgear.CONFIG_FILE):
logger.error('config file seams to be invalid, not backing up.') logger.error('config file seems to be invalid, not backing up.')
return return
now = datetime.datetime.now() now = datetime.datetime.now()
d = datetime.datetime.strftime(now, '%Y-%m-%d') d = datetime.datetime.strftime(now, '%Y-%m-%d')
@ -493,7 +493,7 @@ def backup_config():
target = os.path.join(target_base, 'config.ini') target = os.path.join(target_base, 'config.ini')
copy_file(sickgear.CONFIG_FILE, target) copy_file(sickgear.CONFIG_FILE, target)
if not check_valid_config(target): if not check_valid_config(target):
logger.error('config file seams to be invalid, not backing up.') logger.error('config file seems to be invalid, not backing up.')
remove_file_perm(target) remove_file_perm(target)
return return
compress_file(target, 'config.ini') compress_file(target, 'config.ini')

View file

@ -171,7 +171,7 @@ def has_image_ext(filename):
:rtype: bool :rtype: bool
""" """
try: try:
if os.path.splitext(filename)[1].lower() in ['.bmp', '.gif', '.jpeg', '.jpg', '.png', '.webp']: if os.path.splitext(filename)[1].lower() in ['.avif', '.bmp', '.gif', '.jpeg', '.jpg', '.png', '.webp']:
return True return True
except (BaseException, Exception): except (BaseException, Exception):
pass pass

View file

@ -1198,7 +1198,8 @@ class GenericMetadata(object):
try: try:
if tvdb_id: if tvdb_id:
request = fanartRequest(apikey=sickgear.FANART_API_KEY, tvdb_id=tvdb_id, types=types[image_type]) request = fanartRequest(apikey=sickgear.FANART_API_KEY, tvdb_id=tvdb_id, types=types[image_type],
url_solver=sickgear.FLARESOLVERR_HOST)
resp = request.response() resp = request.response()
itemlist = [] itemlist = []
dedupe = [] dedupe = []

View file

@ -1 +1 @@
3.8.2 3.9.0

View file

@ -5936,8 +5936,18 @@ class AddShows(Home):
url_path = info['href'].strip() url_path = info['href'].strip()
title = info.find('h2').get_text(strip=True) title = info.find('h2').get_text(strip=True)
img_uri = None
# try image locations e.g. https://pogd.es/assets/bg/KAOS.jpg
img_name = re.sub(r'[:\s]+', '-', title)
for cur_type in ('jpg', 'jpeg', 'webp', 'png', 'gif', 'bmp', 'avif'):
uri = f'https://pogd.es/assets/bg/{img_name}.{cur_type}'
if helpers.check_url(uri):
img_uri = uri
break
if None is img_uri:
# use alternative avif image fallback as only supported by new browsers
img_uri = info.get('data-original', '').strip() img_uri = info.get('data-original', '').strip()
if not img_uri: if not img_uri: # old image fallback (pre 2024-08-18)
img_uri = re.findall(r'(?i).*?image:\s*url\(([^)]+)', info.attrs['style'])[0].strip() img_uri = re.findall(r'(?i).*?image:\s*url\(([^)]+)', info.attrs['style'])[0].strip()
images = dict(poster=dict(thumb='imagecache?path=browse/thumb/tvc&source=%s' % img_uri)) images = dict(poster=dict(thumb='imagecache?path=browse/thumb/tvc&source=%s' % img_uri))
sickgear.CACHE_IMAGE_URL_LIST.add_url(img_uri) sickgear.CACHE_IMAGE_URL_LIST.add_url(img_uri)

View file

@ -206,7 +206,7 @@ def setup_test_db():
def teardown_test_db(): def teardown_test_db():
"""Deletes the test db """Deletes the test db
although this seams not to work on my system it leaves me with an zero kb file although this seems not to work on my system it leaves me with an zero kb file
""" """
# uncomment next line so leave the db intact between test and at the end # uncomment next line so leave the db intact between test and at the end
# return False # return False