From 3dacec3f5b73ab777bc0c65bb38308f5865b65fe Mon Sep 17 00:00:00 2001 From: Nick Sweeting Date: Tue, 24 Sep 2024 22:01:18 -0700 Subject: [PATCH] prevent redundant supervisord starts --- archivebox/main.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/archivebox/main.py b/archivebox/main.py index 72130d67..3bc07532 100755 --- a/archivebox/main.py +++ b/archivebox/main.py @@ -598,8 +598,8 @@ def add(urls: Union[str, List[str]], """Add a new URL or list of URLs to your archive""" from core.models import Snapshot, Tag - from queues.supervisor_util import start_cli_workers, tail_worker_logs - from queues.tasks import bg_archive_link + # from queues.supervisor_util import start_cli_workers, tail_worker_logs + # from queues.tasks import bg_archive_link assert depth in (0, 1), 'Depth must be 0 or 1 (depth >1 is not supported yet)' @@ -612,7 +612,7 @@ def add(urls: Union[str, List[str]], # Load list of links from the existing index check_data_folder(CONFIG) check_dependencies(CONFIG) - worker = start_cli_workers() + # worker = start_cli_workers() new_links: List[Link] = [] all_links = load_main_index(out_dir=out_dir) @@ -691,7 +691,7 @@ def add(urls: Union[str, List[str]], stderr(f'[*] [{ts}] Archiving {len(new_links)}/{len(all_links)} URLs from added set...', color='green') archive_links(new_links, overwrite=False, **archive_kwargs) - tail_worker_logs(worker['stdout_logfile']) + # tail_worker_logs(worker['stdout_logfile']) if CAN_UPGRADE: hint(f"There's a new version of ArchiveBox available! Your current version is {VERSION}. You can upgrade to {VERSIONS_AVAILABLE['recommended_version']['tag_name']} ({VERSIONS_AVAILABLE['recommended_version']['html_url']}). For more on how to upgrade: https://github.com/ArchiveBox/ArchiveBox/wiki/Upgrading-or-Merging-Archives\n") @@ -796,12 +796,12 @@ def update(resume: Optional[float]=None, from core.models import ArchiveResult from .search import index_links - from .queues.supervisor_util import start_cli_workers + # from .queues.supervisor_util import start_cli_workers check_data_folder(CONFIG) check_dependencies(CONFIG) - start_cli_workers() + # start_cli_workers() new_links: List[Link] = [] # TODO: Remove input argument: only_new extractors = extractors.split(",") if extractors else []