change supervisord to always start non-daemonized by default
Some checks are pending
CodeQL / Analyze (python) (push) Waiting to run
Build Debian package / build (push) Waiting to run
Build Docker image / buildx (push) Waiting to run
Build Homebrew package / build (push) Waiting to run
Build GitHub Pages website / build (push) Waiting to run
Build GitHub Pages website / deploy (push) Blocked by required conditions
Run linters / lint (push) Waiting to run
Build Pip package / build (push) Waiting to run
Run tests / python_tests (ubuntu-22.04, 3.11) (push) Waiting to run
Run tests / docker_tests (push) Waiting to run

This commit is contained in:
Nick Sweeting 2024-09-24 22:22:03 -07:00
parent de2ba890ea
commit 5e4b78d9e0
No known key found for this signature in database
4 changed files with 55 additions and 88 deletions

View file

@ -1098,34 +1098,34 @@ def get_data_locations(config: ConfigDict) -> ConfigValue:
def get_dependency_info(config: ConfigDict) -> ConfigValue:
return {
'PYTHON_BINARY': {
'path': bin_path(config['PYTHON_BINARY']),
'version': config['PYTHON_VERSION'],
'hash': bin_hash(config['PYTHON_BINARY']),
'enabled': True,
'is_valid': bool(config['PYTHON_VERSION']),
},
'SQLITE_BINARY': {
'path': bin_path(config['SQLITE_BINARY']),
'version': config['SQLITE_VERSION'],
'hash': bin_hash(config['SQLITE_BINARY']),
'enabled': True,
'is_valid': bool(config['SQLITE_VERSION']),
},
'DJANGO_BINARY': {
'path': bin_path(config['DJANGO_BINARY']),
'version': config['DJANGO_VERSION'],
'hash': bin_hash(config['DJANGO_BINARY']),
'enabled': True,
'is_valid': bool(config['DJANGO_VERSION']),
},
'ARCHIVEBOX_BINARY': {
'path': bin_path(config['ARCHIVEBOX_BINARY']),
'version': config['VERSION'],
'hash': bin_hash(config['ARCHIVEBOX_BINARY']),
'enabled': True,
'is_valid': True,
},
# 'PYTHON_BINARY': {
# 'path': bin_path(config['PYTHON_BINARY']),
# 'version': config['PYTHON_VERSION'],
# 'hash': bin_hash(config['PYTHON_BINARY']),
# 'enabled': True,
# 'is_valid': bool(config['PYTHON_VERSION']),
# },
# 'SQLITE_BINARY': {
# 'path': bin_path(config['SQLITE_BINARY']),
# 'version': config['SQLITE_VERSION'],
# 'hash': bin_hash(config['SQLITE_BINARY']),
# 'enabled': True,
# 'is_valid': bool(config['SQLITE_VERSION']),
# },
# 'DJANGO_BINARY': {
# 'path': bin_path(config['DJANGO_BINARY']),
# 'version': config['DJANGO_VERSION'],
# 'hash': bin_hash(config['DJANGO_BINARY']),
# 'enabled': True,
# 'is_valid': bool(config['DJANGO_VERSION']),
# },
# 'ARCHIVEBOX_BINARY': {
# 'path': bin_path(config['ARCHIVEBOX_BINARY']),
# 'version': config['VERSION'],
# 'hash': bin_hash(config['ARCHIVEBOX_BINARY']),
# 'enabled': True,
# 'is_valid': True,
# },
'CURL_BINARY': {
'path': bin_path(config['CURL_BINARY']),

View file

@ -1372,51 +1372,11 @@ def server(runserver_args: Optional[List[str]]=None,
print(f' > Starting ArchiveBox webserver on http://{host}:{port}/')
from queues.supervisor_util import get_or_create_supervisord_process, start_worker, stop_worker, watch_worker
from queues.supervisor_util import start_server_workers
print()
supervisor = get_or_create_supervisord_process(daemonize=False)
bg_workers = [
{
"name": "worker_system_tasks",
"command": "archivebox manage djangohuey --queue system_tasks",
"autostart": "true",
"autorestart": "true",
"stdout_logfile": "logs/worker_system_tasks.log",
"redirect_stderr": "true",
},
]
fg_worker = {
"name": "worker_daphne",
"command": f"daphne --bind={host} --port={port} --application-close-timeout=600 archivebox.core.asgi:application",
"autostart": "false",
"autorestart": "true",
"stdout_logfile": "logs/worker_daphne.log",
"redirect_stderr": "true",
}
print()
for worker in bg_workers:
start_worker(supervisor, worker)
print()
start_worker(supervisor, fg_worker)
print()
try:
watch_worker(supervisor, "worker_daphne")
except KeyboardInterrupt:
print("\n[🛑] Got Ctrl+C, stopping gracefully...")
except SystemExit:
pass
except BaseException as e:
print(f"\n[🛑] Got {e.__class__.__name__} exception, stopping web server gracefully...")
raise
finally:
stop_worker(supervisor, "worker_daphne")
time.sleep(0.5)
start_server_workers(host=host, port=port)
print("\n[🟩] ArchiveBox server shut down gracefully.")

View file

@ -57,8 +57,7 @@ supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
files = %(here)s/{WORKER_DIR.name}/*.conf
"""
with open(CONFIG_FILE, "w") as f:
f.write(config_content)
CONFIG_FILE.write_text(config_content)
def create_worker_config(daemon):
Path.mkdir(WORKER_DIR, exist_ok=True)
@ -111,7 +110,7 @@ def stop_existing_supervisord_process():
except FileNotFoundError:
pass
def start_new_supervisord_process(daemonize=True):
def start_new_supervisord_process(daemonize=False):
print(f"[🦸‍♂️] Supervisord starting{' in background' if daemonize else ''}...")
# Create a config file in the current working directory
create_supervisord_config()
@ -142,7 +141,7 @@ def start_new_supervisord_process(daemonize=True):
return get_existing_supervisord_process()
def get_or_create_supervisord_process(daemonize=True):
def get_or_create_supervisord_process(daemonize=False):
supervisor = get_existing_supervisord_process()
if supervisor is None:
stop_existing_supervisord_process()
@ -213,7 +212,7 @@ def watch_worker(supervisor, daemon_name, interval=5):
continue
def tail_worker_logs(log_path: str):
get_or_create_supervisord_process(daemonize=True)
get_or_create_supervisord_process(daemonize=False)
from rich.live import Live
from rich.table import Table
@ -271,9 +270,17 @@ def start_server_workers(host='0.0.0.0', port='8000'):
supervisor = get_or_create_supervisord_process(daemonize=False)
bg_workers = [
{
"name": "worker_scheduler",
"command": "archivebox manage djangohuey --queue system_tasks -w 4 -k thread --disable-health-check --flush-locks",
"autostart": "true",
"autorestart": "true",
"stdout_logfile": "logs/worker_scheduler.log",
"redirect_stderr": "true",
},
{
"name": "worker_system_tasks",
"command": "archivebox manage djangohuey --queue system_tasks",
"command": "archivebox manage djangohuey --queue system_tasks -w 4 -k thread --no-periodic --disable-health-check",
"autostart": "true",
"autorestart": "true",
"stdout_logfile": "logs/worker_system_tasks.log",
@ -289,12 +296,11 @@ def start_server_workers(host='0.0.0.0', port='8000'):
"redirect_stderr": "true",
}
print()
start_worker(supervisor, fg_worker)
print()
for worker in bg_workers:
start_worker(supervisor, worker)
print()
start_worker(supervisor, fg_worker)
print()
try:
@ -337,17 +343,18 @@ def start_cli_workers(watch=False):
raise
finally:
stop_worker(supervisor, "worker_system_tasks")
stop_worker(supervisor, "worker_scheduler")
time.sleep(0.5)
return fg_worker
def main(daemons):
supervisor = get_or_create_supervisord_process(daemonize=True)
# def main(daemons):
# supervisor = get_or_create_supervisord_process(daemonize=False)
worker = start_worker(supervisor, daemons["webworker"])
pprint(worker)
# worker = start_worker(supervisor, daemons["webworker"])
# pprint(worker)
print("All processes started in background.")
# print("All processes started in background.")
# Optionally you can block the main thread until an exit signal is received:
# try:

View file

@ -10,7 +10,7 @@ from .supervisor_util import get_or_create_supervisord_process
@db_task(queue="system_tasks", context=True)
def bg_add(add_kwargs, task=None, parent_task_id=None):
get_or_create_supervisord_process(daemonize=True)
get_or_create_supervisord_process(daemonize=False)
from ..main import add
@ -29,7 +29,7 @@ def bg_add(add_kwargs, task=None, parent_task_id=None):
@task(queue="system_tasks", context=True)
def bg_archive_links(args, kwargs=None, task=None, parent_task_id=None):
get_or_create_supervisord_process(daemonize=True)
get_or_create_supervisord_process(daemonize=False)
from ..extractors import archive_links
@ -50,7 +50,7 @@ def bg_archive_links(args, kwargs=None, task=None, parent_task_id=None):
@task(queue="system_tasks", context=True)
def bg_archive_link(args, kwargs=None,task=None, parent_task_id=None):
get_or_create_supervisord_process(daemonize=True)
get_or_create_supervisord_process(daemonize=False)
from ..extractors import archive_link
@ -71,7 +71,7 @@ def bg_archive_link(args, kwargs=None,task=None, parent_task_id=None):
@task(queue="system_tasks", context=True)
def bg_archive_snapshot(snapshot, overwrite=False, methods=None, task=None, parent_task_id=None):
# get_or_create_supervisord_process(daemonize=True)
# get_or_create_supervisord_process(daemonize=False)
from ..extractors import archive_link