Fix Fanart.tv and CF data fetch.

Fix TVC images.
Change center the default card background and hide hover text decoration.
Change correct typos
This commit is contained in:
JackDandy 2024-10-07 00:30:50 +01:00
parent f2f39568dd
commit d2f7bb6104
8 changed files with 41 additions and 14 deletions

View file

@ -1,4 +1,17 @@
### 3.32.7 (2024-08-13 11:30:00 UTC) ### 3.32.8 (2024-10-07 00:30:00 UTC)
* Change min required Python version to 3.9
* Change add support for Python 3.9.20, 3.10.15, 3.11.10, 3.12.7
* Change update fallback zoneinfo to 2024b
* Change improve config.ini save failure messages
* Change add '-f' to copy_file command for 'posix' systems
* Fix Fanart.tv and CF data fetch
* Fix TVC images
* Change add '.avif' extensions as valid image type (used by tvc cards)
* Change hide lazy load animation when loading fails (unsupported image format)
### 3.32.7 (2024-08-13 11:30:00 UTC)
* Change to prevent saving config.ini before it's fully loaded * Change to prevent saving config.ini before it's fully loaded
* Change login form to be more password manager friendly * Change login form to be more password manager friendly

View file

@ -1485,6 +1485,8 @@ home_browseShows.tmpl
border-top-left-radius:5px; border-top-left-radius:5px;
border-top-right-radius:5px; border-top-right-radius:5px;
border-bottom:1px solid #111; border-bottom:1px solid #111;
background-position:50% 50%;
text-decoration: none !important;
background-image:url("../images/poster-dark.jpg") background-image:url("../images/poster-dark.jpg")
} }

View file

@ -56,8 +56,8 @@ class CloudflareScraper(Session):
and resp.status_code in (503, 429, 403)): and resp.status_code in (503, 429, 403)):
self.start_time = time.time() self.start_time = time.time()
if (re.search('(?i)cloudflare', resp.headers.get('Server', '')) if (re.search('(?i)cloudflare', resp.headers.get('Server', ''))
and b'jschl_vc' in resp.content and b'_cf_chl_' in resp.content
and b'jschl_answer' in resp.content): or (b'jschl_vc' in resp.content and b'jschl_answer' in resp.content)):
resp = self.solve_cf_challenge(resp, url_solver, **kwargs) resp = self.solve_cf_challenge(resp, url_solver, **kwargs)
elif b'ddgu' in resp.content: elif b'ddgu' in resp.content:
resp = self.solve_ddg_challenge(resp, **kwargs) resp = self.solve_ddg_challenge(resp, **kwargs)

View file

@ -7,11 +7,12 @@ from sg_helpers import get_url
class Request(object): class Request(object):
def __init__(self, apikey, tvdb_id, ws=fanart.WS.TV, types=None): def __init__(self, apikey, tvdb_id, ws=fanart.WS.TV, types=None, **kwargs):
self._apikey = apikey self._apikey = apikey
self._tvdb_id = tvdb_id self._tvdb_id = tvdb_id
self._ws = ws self._ws = ws
self._types = types self._types = types
self._kwargs = kwargs
self._response = None self._response = None
self._web_url = 'https://fanart.tv/series/%s' self._web_url = 'https://fanart.tv/series/%s'
self._assets_url = 'https://assets.fanart.tv' self._assets_url = 'https://assets.fanart.tv'
@ -22,7 +23,7 @@ class Request(object):
def response(self): def response(self):
try: try:
rjson = get_url(str(self), parse_json=True) rjson = get_url(str(self), parse_json=True, **self._kwargs)
image_type = self._types or u'showbackground' image_type = self._types or u'showbackground'
rhtml = self.scrape_web(image_type) rhtml = self.scrape_web(image_type)
if not isinstance(rjson, dict) and 0 == len(rhtml[image_type]): if not isinstance(rjson, dict) and 0 == len(rhtml[image_type]):
@ -31,7 +32,7 @@ class Request(object):
if not isinstance(rjson, dict): if not isinstance(rjson, dict):
rjson = {image_type: []} rjson = {image_type: []}
if 0 != len(rhtml[image_type]): if None is not rhtml and 0 != len(rhtml[image_type]):
rjson_ids = map(lambda i: i['id'], rjson[image_type]) rjson_ids = map(lambda i: i['id'], rjson[image_type])
for item in filter(lambda i: i['id'] not in rjson_ids, rhtml[image_type]): for item in filter(lambda i: i['id'] not in rjson_ids, rhtml[image_type]):
rjson[image_type] += [item] rjson[image_type] += [item]
@ -48,7 +49,7 @@ class Request(object):
def scrape_web(self, image_type): def scrape_web(self, image_type):
try: try:
data = get_url(self._web_url % self._tvdb_id) data = get_url(self._web_url % self._tvdb_id, **self._kwargs)
if not data: if not data:
return return

View file

@ -484,7 +484,7 @@ def backup_config():
logger.log('backing up config.ini') logger.log('backing up config.ini')
try: try:
if not check_valid_config(sickgear.CONFIG_FILE): if not check_valid_config(sickgear.CONFIG_FILE):
logger.error('config file seams to be invalid, not backing up.') logger.error('config file seems to be invalid, not backing up.')
return return
now = datetime.datetime.now() now = datetime.datetime.now()
d = datetime.datetime.strftime(now, '%Y-%m-%d') d = datetime.datetime.strftime(now, '%Y-%m-%d')
@ -493,7 +493,7 @@ def backup_config():
target = os.path.join(target_base, 'config.ini') target = os.path.join(target_base, 'config.ini')
copy_file(sickgear.CONFIG_FILE, target) copy_file(sickgear.CONFIG_FILE, target)
if not check_valid_config(target): if not check_valid_config(target):
logger.error('config file seams to be invalid, not backing up.') logger.error('config file seems to be invalid, not backing up.')
remove_file_perm(target) remove_file_perm(target)
return return
compress_file(target, 'config.ini') compress_file(target, 'config.ini')

View file

@ -1198,7 +1198,8 @@ class GenericMetadata(object):
try: try:
if tvdb_id: if tvdb_id:
request = fanartRequest(apikey=sickgear.FANART_API_KEY, tvdb_id=tvdb_id, types=types[image_type]) request = fanartRequest(apikey=sickgear.FANART_API_KEY, tvdb_id=tvdb_id, types=types[image_type],
url_solver=sickgear.FLARESOLVERR_HOST)
resp = request.response() resp = request.response()
itemlist = [] itemlist = []
dedupe = [] dedupe = []

View file

@ -5936,8 +5936,18 @@ class AddShows(Home):
url_path = info['href'].strip() url_path = info['href'].strip()
title = info.find('h2').get_text(strip=True) title = info.find('h2').get_text(strip=True)
img_uri = None
# try image locations e.g. https://pogd.es/assets/bg/KAOS.jpg
img_name = re.sub(r'[:\s]+', '-', title)
for cur_type in ('jpg', 'jpeg', 'webp', 'png', 'gif', 'bmp', 'avif'):
uri = f'https://pogd.es/assets/bg/{img_name}.{cur_type}'
if helpers.check_url(uri):
img_uri = uri
break
if None is img_uri:
# use alternative avif image fallback as only supported by new browsers
img_uri = info.get('data-original', '').strip() img_uri = info.get('data-original', '').strip()
if not img_uri: if not img_uri: # old image fallback (pre 2024-08-18)
img_uri = re.findall(r'(?i).*?image:\s*url\(([^)]+)', info.attrs['style'])[0].strip() img_uri = re.findall(r'(?i).*?image:\s*url\(([^)]+)', info.attrs['style'])[0].strip()
images = dict(poster=dict(thumb='imagecache?path=browse/thumb/tvc&source=%s' % img_uri)) images = dict(poster=dict(thumb='imagecache?path=browse/thumb/tvc&source=%s' % img_uri))
sickgear.CACHE_IMAGE_URL_LIST.add_url(img_uri) sickgear.CACHE_IMAGE_URL_LIST.add_url(img_uri)

View file

@ -206,7 +206,7 @@ def setup_test_db():
def teardown_test_db(): def teardown_test_db():
"""Deletes the test db """Deletes the test db
although this seams not to work on my system it leaves me with an zero kb file although this seems not to work on my system it leaves me with an zero kb file
""" """
# uncomment next line so leave the db intact between test and at the end # uncomment next line so leave the db intact between test and at the end
# return False # return False