mirror of
https://github.com/SickGear/SickGear.git
synced 2024-11-24 13:55:16 +00:00
use context manager for scandir as recommended since python 3.6
This commit is contained in:
parent
6abec1d45d
commit
aeb3f7b0dc
5 changed files with 152 additions and 130 deletions
|
@ -1120,29 +1120,30 @@ def scantree(path, # type: AnyStr
|
|||
(r'(?i)%s', include))]
|
||||
has_exclude, has_exclude_dirs, has_include = bool(exclude), bool(exclude_dirs), bool(include)
|
||||
|
||||
for entry in scandir(path):
|
||||
is_dir = entry.is_dir(follow_symlinks=follow_symlinks)
|
||||
is_file = entry.is_file(follow_symlinks=follow_symlinks)
|
||||
no_filter = any([None is filter_kind, filter_kind and is_dir, not filter_kind and is_file])
|
||||
if ((not has_exclude or rc_exc.search(entry.name))
|
||||
and (not has_exclude_dirs or not is_dir or rc_exc_dir.search(entry.name))
|
||||
and (not has_include or rc_inc.search(entry.name))
|
||||
and (no_filter or (not filter_kind and is_dir and recurse))):
|
||||
if is_dir and exclude_folders_with_files and any(os.path.isfile(os.path.join(entry.path, e_f))
|
||||
for e_f in exclude_folders_with_files):
|
||||
logger.debug(f'Ignoring Folder: "{entry.path}", because it contains a exclude file'
|
||||
f' "{", ".join(exclude_folders_with_files)}"')
|
||||
continue
|
||||
if recurse and is_dir:
|
||||
for subentry in scantree(
|
||||
path=entry.path, exclude=exclude, exclude_dirs=exclude_dirs, include=include,
|
||||
follow_symlinks=follow_symlinks, filter_kind=filter_kind, recurse=recurse,
|
||||
exclude_folders_with_files=exclude_folders_with_files, internal_call=True,
|
||||
rc_exc=rc_exc, rc_exc_dir=rc_exc_dir, rc_inc=rc_inc, has_exclude=has_exclude,
|
||||
has_exclude_dirs=has_exclude_dirs, has_include=has_include):
|
||||
yield subentry
|
||||
if no_filter:
|
||||
yield entry
|
||||
with scandir(path) as s_d:
|
||||
for entry in s_d:
|
||||
is_dir = entry.is_dir(follow_symlinks=follow_symlinks)
|
||||
is_file = entry.is_file(follow_symlinks=follow_symlinks)
|
||||
no_filter = any([None is filter_kind, filter_kind and is_dir, not filter_kind and is_file])
|
||||
if ((not has_exclude or rc_exc.search(entry.name))
|
||||
and (not has_exclude_dirs or not is_dir or rc_exc_dir.search(entry.name))
|
||||
and (not has_include or rc_inc.search(entry.name))
|
||||
and (no_filter or (not filter_kind and is_dir and recurse))):
|
||||
if is_dir and exclude_folders_with_files and any(os.path.isfile(os.path.join(entry.path, e_f))
|
||||
for e_f in exclude_folders_with_files):
|
||||
logger.debug(f'Ignoring Folder: "{entry.path}", because it contains a exclude file'
|
||||
f' "{", ".join(exclude_folders_with_files)}"')
|
||||
continue
|
||||
if recurse and is_dir:
|
||||
for subentry in scantree(
|
||||
path=entry.path, exclude=exclude, exclude_dirs=exclude_dirs, include=include,
|
||||
follow_symlinks=follow_symlinks, filter_kind=filter_kind, recurse=recurse,
|
||||
exclude_folders_with_files=exclude_folders_with_files, internal_call=True,
|
||||
rc_exc=rc_exc, rc_exc_dir=rc_exc_dir, rc_inc=rc_inc, has_exclude=has_exclude,
|
||||
has_exclude_dirs=has_exclude_dirs, has_include=has_include):
|
||||
yield subentry
|
||||
if no_filter:
|
||||
yield entry
|
||||
|
||||
|
||||
def copy_file(src_file, dest_file):
|
||||
|
|
|
@ -1721,9 +1721,10 @@ def init_stage_2():
|
|||
MEMCACHE['history_tab'] = History.menu_tab(MEMCACHE['history_tab_limit'])
|
||||
|
||||
try:
|
||||
for f in scandir(os.path.join(PROG_DIR, 'gui', GUI_NAME, 'images', 'flags')):
|
||||
if f.is_file():
|
||||
MEMCACHE_FLAG_IMAGES[os.path.splitext(f.name)[0].lower()] = True
|
||||
with scandir(os.path.join(PROG_DIR, 'gui', GUI_NAME, 'images', 'flags')) as s_d:
|
||||
for f in s_d:
|
||||
if f.is_file():
|
||||
MEMCACHE_FLAG_IMAGES[os.path.splitext(f.name)[0].lower()] = True
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
|
||||
|
|
|
@ -746,13 +746,13 @@ def cleanup_old_db_backups(filename):
|
|||
d, filename = os.path.split(filename)
|
||||
if not d:
|
||||
d = sickgear.DATA_DIR
|
||||
for f in filter(lambda fn: fn.is_file() and filename in fn.name and
|
||||
re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name),
|
||||
scandir(d)):
|
||||
try:
|
||||
os.unlink(f.path)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
with scandir(d) as s_d:
|
||||
for f in filter(lambda fn: fn.is_file() and filename in fn.name and
|
||||
re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name), s_d):
|
||||
try:
|
||||
os.unlink(f.path)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
|
||||
|
|
|
@ -1567,15 +1567,19 @@ def count_files_dirs(base_dir):
|
|||
"""
|
||||
f = d = 0
|
||||
try:
|
||||
files = scandir(base_dir)
|
||||
with scandir(base_dir) as s_d:
|
||||
try:
|
||||
files = s_d
|
||||
except OSError as e:
|
||||
logger.warning('Unable to count files %s / %s' % (repr(e), ex(e)))
|
||||
else:
|
||||
for e in files:
|
||||
if e.is_file():
|
||||
f += 1
|
||||
elif e.is_dir():
|
||||
d += 1
|
||||
except OSError as e:
|
||||
logger.warning('Unable to count files %s / %s' % (repr(e), ex(e)))
|
||||
else:
|
||||
for e in files:
|
||||
if e.is_file():
|
||||
f += 1
|
||||
elif e.is_dir():
|
||||
d += 1
|
||||
|
||||
return f, d
|
||||
|
||||
|
@ -1620,93 +1624,108 @@ def upgrade_new_naming():
|
|||
cf = 0
|
||||
p_text = 'Upgrading %s' % (d, 'banner/poster')[not d]
|
||||
_set_progress(p_text, 0, 0)
|
||||
for entry in scandir(bd):
|
||||
if entry.is_file():
|
||||
cf += 1
|
||||
_set_progress(p_text, cf, step)
|
||||
b_s = bp_match.search(entry.name)
|
||||
if b_s:
|
||||
old_id = int(b_s.group(1))
|
||||
tvid = show_list.get(old_id)
|
||||
if tvid:
|
||||
nb_dir = os.path.join(sickgear.CACHE_DIR, 'images', 'shows', '%s-%s' % (tvid, old_id), d)
|
||||
if not os.path.isdir(nb_dir):
|
||||
with scandir(bd) as s_d:
|
||||
for entry in scandir(bd):
|
||||
if entry.is_file():
|
||||
cf += 1
|
||||
_set_progress(p_text, cf, step)
|
||||
b_s = bp_match.search(entry.name)
|
||||
if b_s:
|
||||
old_id = int(b_s.group(1))
|
||||
tvid = show_list.get(old_id)
|
||||
if tvid:
|
||||
nb_dir = os.path.join(sickgear.CACHE_DIR, 'images', 'shows', '%s-%s' % (tvid, old_id), d)
|
||||
if not os.path.isdir(nb_dir):
|
||||
try:
|
||||
os.makedirs(nb_dir)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
new_name = os.path.join(nb_dir, bp_match.sub(r'\2', entry.name))
|
||||
try:
|
||||
os.makedirs(nb_dir)
|
||||
move_file(entry.path, new_name)
|
||||
except (BaseException, Exception) as e:
|
||||
logger.warning('Unable to rename %s to %s: %s / %s'
|
||||
% (entry.path, new_name, repr(e), ex(e)))
|
||||
else:
|
||||
# clean up files without reference in db
|
||||
try:
|
||||
os.remove(entry.path)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
new_name = os.path.join(nb_dir, bp_match.sub(r'\2', entry.name))
|
||||
try:
|
||||
move_file(entry.path, new_name)
|
||||
except (BaseException, Exception) as e:
|
||||
logger.warning('Unable to rename %s to %s: %s / %s'
|
||||
% (entry.path, new_name, repr(e), ex(e)))
|
||||
else:
|
||||
# clean up files without reference in db
|
||||
try:
|
||||
os.remove(entry.path)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
elif entry.is_dir():
|
||||
if entry.name in ['shows', 'browse']:
|
||||
continue
|
||||
elif 'fanart' == entry.name:
|
||||
_set_progress(p_text, 0, 1)
|
||||
fc_fan, dc_fan = count_files_dirs(entry.path)
|
||||
step_fan = dc_fan / float(100)
|
||||
cf_fan = 0
|
||||
p_text = 'Upgrading fanart'
|
||||
_set_progress(p_text, 0, 0)
|
||||
try:
|
||||
entries = scandir(entry.path)
|
||||
except OSError as e:
|
||||
logger.warning('Unable to stat dirs %s / %s' % (repr(e), ex(e)))
|
||||
elif entry.is_dir():
|
||||
if entry.name in ['shows', 'browse']:
|
||||
continue
|
||||
for d_entry in entries:
|
||||
if d_entry.is_dir():
|
||||
cf_fan += 1
|
||||
_set_progress(p_text, cf_fan, step_fan)
|
||||
old_id = try_int(d_entry.name)
|
||||
if old_id:
|
||||
new_id = show_list.get(old_id)
|
||||
if new_id:
|
||||
new_dir_name = os.path.join(sickgear.CACHE_DIR, 'images', 'shows',
|
||||
'%s-%s' % (new_id, old_id), 'fanart')
|
||||
try:
|
||||
move_file(d_entry.path, new_dir_name)
|
||||
except (BaseException, Exception) as e:
|
||||
logger.warning(f'Unable to rename {d_entry.path} to {new_dir_name}:'
|
||||
f' {repr(e)} / {ex(e)}')
|
||||
if os.path.isdir(new_dir_name):
|
||||
try:
|
||||
f_n = filter(lambda fn: fn.is_file(), scandir(new_dir_name))
|
||||
except OSError as e:
|
||||
logger.warning('Unable to rename %s / %s' % (repr(e), ex(e)))
|
||||
else:
|
||||
rename_args = []
|
||||
# noinspection PyTypeChecker
|
||||
for f_entry in f_n:
|
||||
rename_args += [(f_entry.path, bp_match.sub(r'\2', f_entry.path))]
|
||||
|
||||
for args in rename_args:
|
||||
elif 'fanart' == entry.name:
|
||||
_set_progress(p_text, 0, 1)
|
||||
fc_fan, dc_fan = count_files_dirs(entry.path)
|
||||
step_fan = dc_fan / float(100)
|
||||
cf_fan = 0
|
||||
p_text = 'Upgrading fanart'
|
||||
_set_progress(p_text, 0, 0)
|
||||
try:
|
||||
with scandir(entry.path) as s_p:
|
||||
try:
|
||||
entries = s_p
|
||||
except OSError as e:
|
||||
logger.warning('Unable to stat dirs %s / %s' % (repr(e), ex(e)))
|
||||
continue
|
||||
for d_entry in entries:
|
||||
if d_entry.is_dir():
|
||||
cf_fan += 1
|
||||
_set_progress(p_text, cf_fan, step_fan)
|
||||
old_id = try_int(d_entry.name)
|
||||
if old_id:
|
||||
new_id = show_list.get(old_id)
|
||||
if new_id:
|
||||
new_dir_name = os.path.join(sickgear.CACHE_DIR, 'images', 'shows',
|
||||
'%s-%s' % (new_id, old_id), 'fanart')
|
||||
try:
|
||||
move_file(*args)
|
||||
move_file(d_entry.path, new_dir_name)
|
||||
except (BaseException, Exception) as e:
|
||||
logger.warning(f'Unable to rename {args[0]} to {args[1]}:'
|
||||
f' {repr(e)} / {ex(e)}')
|
||||
else:
|
||||
try:
|
||||
shutil.rmtree(d_entry.path)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
try:
|
||||
shutil.rmtree(d_entry.path)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
try:
|
||||
os.rmdir(entry.path)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
logger.warning(f'Unable to rename {d_entry.path}'
|
||||
f' to {new_dir_name}: {repr(e)} / {ex(e)}')
|
||||
if os.path.isdir(new_dir_name):
|
||||
try:
|
||||
with scandir(new_dir_name) as s_d_n:
|
||||
try:
|
||||
f_n = filter(lambda fn: fn.is_file(), s_d_n)
|
||||
except OSError as e:
|
||||
logger.warning(
|
||||
f'Unable to rename {repr(e)} / {ex(d)}')
|
||||
else:
|
||||
rename_args = []
|
||||
# noinspection PyTypeChecker
|
||||
for f_entry in f_n:
|
||||
rename_args += [
|
||||
(f_entry.path,
|
||||
bp_match.sub(r'\2', f_entry.path))]
|
||||
|
||||
for args in rename_args:
|
||||
try:
|
||||
move_file(*args)
|
||||
except (BaseException, Exception) as e:
|
||||
logger.warning(
|
||||
f'Unable to rename {args[0]}'
|
||||
f' to {args[1]}: {repr(e)} / {ex(e)}')
|
||||
except OSError as e:
|
||||
logger.warning(
|
||||
'Unable to rename %s / %s' % (repr(e), ex(e)))
|
||||
else:
|
||||
try:
|
||||
shutil.rmtree(d_entry.path)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
try:
|
||||
shutil.rmtree(d_entry.path)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
except OSError as e:
|
||||
logger.warning('Unable to stat dirs %s / %s' % (repr(e), ex(e)))
|
||||
continue
|
||||
try:
|
||||
os.rmdir(entry.path)
|
||||
except (BaseException, Exception):
|
||||
pass
|
||||
if 'thumbnails' == d:
|
||||
try:
|
||||
os.rmdir(bd)
|
||||
|
|
|
@ -592,15 +592,16 @@ class TVInfoTests(test.SickbeardTestDBCase):
|
|||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
super(TVInfoTests, cls).tearDownClass()
|
||||
files = {_f.name for _f in os.scandir(mock_data_dir) if _f.is_file()}
|
||||
unused_files = files - used_files
|
||||
if delete_unused_mock_files:
|
||||
for _u_f in unused_files:
|
||||
full_filename = os.path.join(mock_data_dir, _u_f)
|
||||
try:
|
||||
os.remove(full_filename)
|
||||
except (BaseException, Exception) as e:
|
||||
print('errror deleting: [%s], error: %s' % (full_filename, e))
|
||||
with os.scandir(mock_data_dir) as s_d:
|
||||
files = {_f.name for _f in os.scandir(mock_data_dir) if _f.is_file()}
|
||||
unused_files = files - used_files
|
||||
if delete_unused_mock_files:
|
||||
for _u_f in unused_files:
|
||||
full_filename = os.path.join(mock_data_dir, _u_f)
|
||||
try:
|
||||
os.remove(full_filename)
|
||||
except (BaseException, Exception) as e:
|
||||
print('errror deleting: [%s], error: %s' % (full_filename, e))
|
||||
if unused_files:
|
||||
print('unused files: %s' % unused_files)
|
||||
print('reset mock methods')
|
||||
|
|
Loading…
Reference in a new issue