Skip to content

Commit

Permalink
Make reading from existing files optional with fallback
Browse files Browse the repository at this point in the history
  • Loading branch information
Roman513 committed Dec 25, 2024
1 parent e744357 commit 7866399
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 24 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -459,6 +459,8 @@ optional arguments:
--exclude <prefix> Exclude files starting with <prefix> (case
insensitive)
--install-tag <tag> Only download files with the specified install tag
--read-files Read duplicated parts from already saved files, do not
keep them in RAM
--enable-reordering Enable reordering optimization to reduce RAM
requirements during download (may have adverse results
for some titles)
Expand Down
3 changes: 3 additions & 0 deletions legendary/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -971,6 +971,7 @@ def install_game(self, args):
file_prefix_filter=args.file_prefix,
file_exclude_filter=args.file_exclude_prefix,
file_install_tag=args.install_tag,
read_files=args.read_files,
dl_optimizations=args.order_opt,
dl_timeout=args.dl_timeout,
repair=args.repair_mode,
Expand Down Expand Up @@ -2754,6 +2755,8 @@ def main():
help='Download all files / ignore existing (overwrite)')
install_parser.add_argument('--disable-patching', dest='disable_patching', action='store_true',
help='Do not attempt to patch existing installation (download entire changed files)')
install_parser.add_argument('--read-files', dest='read_files', action='store_true',
help='Read duplicated parts from already saved files, do not keep them in memory')
install_parser.add_argument('--download-only', '--no-install', dest='no_install', action='store_true',
help='Do not install app and do not run prerequisite installers after download')
install_parser.add_argument('--update-only', dest='update_only', action='store_true',
Expand Down
24 changes: 18 additions & 6 deletions legendary/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1327,6 +1327,7 @@ def prepare_download(self, game: Game, base_game: Game = None, base_path: str =
override_old_manifest: str = '', override_base_url: str = '',
platform: str = 'Windows', file_prefix_filter: list = None,
file_exclude_filter: list = None, file_install_tag: list = None,
read_files: bool = False,
dl_optimizations: bool = False, dl_timeout: int = 10,
repair: bool = False, repair_use_latest: bool = False,
disable_delta: bool = False, override_delta_manifest: str = '',
Expand Down Expand Up @@ -1499,12 +1500,23 @@ def prepare_download(self, game: Game, base_game: Game = None, base_path: str =
dlm = DLManager(install_path, base_url, resume_file=resume_file, status_q=status_q,
max_shared_memory=max_shm * 1024 * 1024, max_workers=max_workers,
dl_timeout=dl_timeout, bind_ip=bind_ip)
anlres = dlm.run_analysis(manifest=new_manifest, old_manifest=old_manifest,
patch=not disable_patching, resume=not force,
file_prefix_filter=file_prefix_filter,
file_exclude_filter=file_exclude_filter,
file_install_tag=file_install_tag,
processing_optimization=process_opt)

analysis_kwargs = dict(
old_manifest=old_manifest,
patch=not disable_patching, resume=not force,
file_prefix_filter=file_prefix_filter,
file_exclude_filter=file_exclude_filter,
file_install_tag=file_install_tag,
processing_optimization=process_opt
)

try:
anlres = dlm.run_analysis(manifest=new_manifest, **analysis_kwargs, read_files=read_files)
except MemoryError:
if read_files:
raise
self.log.warning('Memory error encountered, retrying with file read enabled...')
anlres = dlm.run_analysis(manifest=new_manifest, **analysis_kwargs, read_files=True)

prereq = None
if new_manifest.meta.prereq_ids:
Expand Down
40 changes: 22 additions & 18 deletions legendary/downloader/mp/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ def __init__(self, download_dir, base_url, cache_dir=None, status_q=None,
def run_analysis(self, manifest: Manifest, old_manifest: Manifest = None,
patch=True, resume=True, file_prefix_filter=None,
file_exclude_filter=None, file_install_tag=None,
read_files=False,
processing_optimization=False) -> AnalysisResult:
"""
Run analysis on manifest and old manifest (if not None) and return a result
Expand All @@ -94,6 +95,7 @@ def run_analysis(self, manifest: Manifest, old_manifest: Manifest = None,
:param file_prefix_filter: Only download files that start with this prefix
:param file_exclude_filter: Exclude files with this prefix from download
:param file_install_tag: Only install files with the specified tag
:param read_files: Allow reading from already finished files
:param processing_optimization: Attempt to optimize processing order and RAM usage
:return: AnalysisResult
"""
Expand Down Expand Up @@ -320,25 +322,27 @@ def run_analysis(self, manifest: Manifest, old_manifest: Manifest = None,

# determine whether a chunk part is currently in written files
reusable_written = defaultdict(dict)
cur_written_cps = defaultdict(list)
for cur_file in fmlist:
cur_file_cps = dict()
cur_file_offset = 0
for cp in cur_file.chunk_parts:
key = (cp.guid_num, cp.offset, cp.size)
for wr_file_name, wr_file_offset, wr_cp_offset, wr_cp_end_offset in cur_written_cps[cp.guid_num]:
# check if new chunk part is wholly contained in a written chunk part
cur_cp_end_offset = cp.offset + cp.size
if wr_cp_offset <= cp.offset and wr_cp_end_offset >= cur_cp_end_offset:
references[cp.guid_num] -= 1
reuse_offset = wr_file_offset + (cp.offset - wr_cp_offset)
reusable_written[cur_file.filename][key] = (wr_file_name, reuse_offset)
break
cur_file_cps[cp.guid_num] = (cur_file.filename, cur_file_offset, cp.offset, cp.offset + cp.size)
cur_file_offset += cp.size
if read_files:
self.log.debug('Analyzing manifest for re-usable chunks in saved files...')
cur_written_cps = defaultdict(list)
for cur_file in fmlist:
cur_file_cps = dict()
cur_file_offset = 0
for cp in cur_file.chunk_parts:
key = (cp.guid_num, cp.offset, cp.size)
for wr_file_name, wr_file_offset, wr_cp_offset, wr_cp_end_offset in cur_written_cps[cp.guid_num]:
# check if new chunk part is wholly contained in a written chunk part
cur_cp_end_offset = cp.offset + cp.size
if wr_cp_offset <= cp.offset and wr_cp_end_offset >= cur_cp_end_offset:
references[cp.guid_num] -= 1
reuse_offset = wr_file_offset + (cp.offset - wr_cp_offset)
reusable_written[cur_file.filename][key] = (wr_file_name, reuse_offset)
break
cur_file_cps[cp.guid_num] = (cur_file.filename, cur_file_offset, cp.offset, cp.offset + cp.size)
cur_file_offset += cp.size

for guid, value in cur_file_cps.items():
cur_written_cps[guid].append(value)
for guid, value in cur_file_cps.items():
cur_written_cps[guid].append(value)

last_cache_size = current_cache_size = 0
# set to determine whether a file is currently cached or not
Expand Down

0 comments on commit 7866399

Please sign in to comment.