mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-13 06:34:25 -04:00
properly handle chowning DATA_DIR on init when using sudo
This commit is contained in:
parent
2f68a1d476
commit
ad675a8e7c
7 changed files with 46 additions and 19 deletions
|
@ -571,6 +571,18 @@ def setup_django(out_dir: Path | None=None, check_db=False, config: benedict=CON
|
||||||
|
|
||||||
assert isinstance(output_dir, Path) and isinstance(CONSTANTS.PACKAGE_DIR, Path)
|
assert isinstance(output_dir, Path) and isinstance(CONSTANTS.PACKAGE_DIR, Path)
|
||||||
|
|
||||||
|
from archivebox.config.permissions import IS_ROOT, ARCHIVEBOX_USER, ARCHIVEBOX_GROUP, SudoPermission
|
||||||
|
from archivebox.config.paths import _get_collection_id
|
||||||
|
|
||||||
|
# if running as root, chown the data dir to the archivebox user to make sure it's accessible to the archivebox user
|
||||||
|
if IS_ROOT:
|
||||||
|
with SudoPermission(uid=0):
|
||||||
|
os.system(f'chown {ARCHIVEBOX_USER}:{ARCHIVEBOX_GROUP} "{CONSTANTS.DATA_DIR}"')
|
||||||
|
_get_collection_id(DATA_DIR=CONSTANTS.DATA_DIR, force_create=True)
|
||||||
|
if IS_ROOT:
|
||||||
|
with SudoPermission(uid=0):
|
||||||
|
os.system(f'chown {ARCHIVEBOX_USER}:{ARCHIVEBOX_GROUP} "{CONSTANTS.DATA_DIR}"/*')
|
||||||
|
|
||||||
bump_startup_progress_bar()
|
bump_startup_progress_bar()
|
||||||
try:
|
try:
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
|
@ -596,7 +608,7 @@ def setup_django(out_dir: Path | None=None, check_db=False, config: benedict=CON
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
bump_startup_progress_bar(advance=1000)
|
bump_startup_progress_bar(advance=1000)
|
||||||
|
|
||||||
is_using_meta_cmd = any(ignored_subcommand in sys.argv for ignored_subcommand in ('help', 'version', '--help', '--version'))
|
is_using_meta_cmd = any(ignored_subcommand in sys.argv for ignored_subcommand in ('help', 'version', '--help', '--version', 'init'))
|
||||||
if not is_using_meta_cmd:
|
if not is_using_meta_cmd:
|
||||||
# show error message to user only if they're not running a meta command / just trying to get help
|
# show error message to user only if they're not running a meta command / just trying to get help
|
||||||
STDERR.print()
|
STDERR.print()
|
||||||
|
|
|
@ -21,9 +21,7 @@ DATABASE_FILE = DATA_DIR / 'index.sqlite3'
|
||||||
|
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
|
|
||||||
@cache
|
def _get_collection_id(DATA_DIR=DATA_DIR, force_create=False) -> str:
|
||||||
def get_collection_id(DATA_DIR=DATA_DIR) -> str:
|
|
||||||
"""Get a short, stable, unique ID for the current collection (e.g. abc45678)"""
|
|
||||||
collection_id_file = DATA_DIR / '.archivebox_id'
|
collection_id_file = DATA_DIR / '.archivebox_id'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -43,7 +41,7 @@ def get_collection_id(DATA_DIR=DATA_DIR) -> str:
|
||||||
try:
|
try:
|
||||||
# only persist collection_id file if we already have an index.sqlite3 file present
|
# only persist collection_id file if we already have an index.sqlite3 file present
|
||||||
# otherwise we might be running in a directory that is not a collection, no point creating cruft files
|
# otherwise we might be running in a directory that is not a collection, no point creating cruft files
|
||||||
if os.path.isfile(DATABASE_FILE) and os.access(DATA_DIR, os.W_OK):
|
if os.path.isfile(DATABASE_FILE) and os.access(DATA_DIR, os.W_OK) or force_create:
|
||||||
collection_id_file.write_text(collection_id)
|
collection_id_file.write_text(collection_id)
|
||||||
|
|
||||||
# if we're running as root right now, make sure the collection_id file is owned by the archivebox user
|
# if we're running as root right now, make sure the collection_id file is owned by the archivebox user
|
||||||
|
@ -57,6 +55,11 @@ def get_collection_id(DATA_DIR=DATA_DIR) -> str:
|
||||||
pass
|
pass
|
||||||
return collection_id
|
return collection_id
|
||||||
|
|
||||||
|
@cache
|
||||||
|
def get_collection_id(DATA_DIR=DATA_DIR) -> str:
|
||||||
|
"""Get a short, stable, unique ID for the current collection (e.g. abc45678)"""
|
||||||
|
return _get_collection_id(DATA_DIR=DATA_DIR)
|
||||||
|
|
||||||
@cache
|
@cache
|
||||||
def get_machine_id() -> str:
|
def get_machine_id() -> str:
|
||||||
"""Get a short, stable, unique ID for the current machine (e.g. abc45678)"""
|
"""Get a short, stable, unique ID for the current machine (e.g. abc45678)"""
|
||||||
|
|
|
@ -325,15 +325,24 @@ def init(force: bool=False, quick: bool=False, install: bool=False, out_dir: Pat
|
||||||
from core.models import Snapshot
|
from core.models import Snapshot
|
||||||
from rich import print
|
from rich import print
|
||||||
|
|
||||||
out_dir.mkdir(exist_ok=True)
|
from archivebox.config.permissions import IS_ROOT, ARCHIVEBOX_USER, ARCHIVEBOX_GROUP
|
||||||
|
from archivebox.config.paths import _get_collection_id
|
||||||
|
|
||||||
|
# if running as root, chown the data dir to the archivebox user to make sure it's accessible to the archivebox user
|
||||||
|
if IS_ROOT:
|
||||||
|
with SudoPermission(uid=0):
|
||||||
|
os.system(f'chown {ARCHIVEBOX_USER}:{ARCHIVEBOX_GROUP} "{CONSTANTS.DATA_DIR}"')
|
||||||
|
_get_collection_id()
|
||||||
|
if IS_ROOT:
|
||||||
|
with SudoPermission(uid=0):
|
||||||
|
os.system(f'chown {ARCHIVEBOX_USER}:{ARCHIVEBOX_GROUP} "{CONSTANTS.DATA_DIR}"/*')
|
||||||
|
|
||||||
|
# if os.access(out_dir / CONSTANTS.JSON_INDEX_FILENAME, os.F_OK):
|
||||||
|
# print("[red]:warning: This folder contains a JSON index. It is deprecated, and will no longer be kept up to date automatically.[/red]", file=sys.stderr)
|
||||||
|
# print("[red] You can run `archivebox list --json --with-headers > static_index.json` to manually generate it.[/red]", file=sys.stderr)
|
||||||
|
|
||||||
is_empty = not len(set(os.listdir(out_dir)) - CONSTANTS.ALLOWED_IN_DATA_DIR)
|
is_empty = not len(set(os.listdir(out_dir)) - CONSTANTS.ALLOWED_IN_DATA_DIR)
|
||||||
|
existing_index = os.path.isfile(CONSTANTS.DATABASE_FILE)
|
||||||
if os.access(out_dir / CONSTANTS.JSON_INDEX_FILENAME, os.F_OK):
|
|
||||||
print("[red]:warning: This folder contains a JSON index. It is deprecated, and will no longer be kept up to date automatically.[/red]", file=sys.stderr)
|
|
||||||
print("[red] You can run `archivebox list --json --with-headers > static_index.json` to manually generate it.[/red]", file=sys.stderr)
|
|
||||||
|
|
||||||
existing_index = os.access(CONSTANTS.DATABASE_FILE, os.F_OK)
|
|
||||||
|
|
||||||
if is_empty and not existing_index:
|
if is_empty and not existing_index:
|
||||||
print(f'[turquoise4][+] Initializing a new ArchiveBox v{VERSION} collection...[/turquoise4]')
|
print(f'[turquoise4][+] Initializing a new ArchiveBox v{VERSION} collection...[/turquoise4]')
|
||||||
print('[green]----------------------------------------------------------------------[/green]')
|
print('[green]----------------------------------------------------------------------[/green]')
|
||||||
|
@ -376,7 +385,7 @@ def init(force: bool=False, quick: bool=False, install: bool=False, out_dir: Pat
|
||||||
for migration_line in apply_migrations(out_dir):
|
for migration_line in apply_migrations(out_dir):
|
||||||
sys.stdout.write(f' {migration_line}\n')
|
sys.stdout.write(f' {migration_line}\n')
|
||||||
|
|
||||||
assert os.access(CONSTANTS.DATABASE_FILE, os.R_OK)
|
assert os.path.isfile(CONSTANTS.DATABASE_FILE) and os.access(CONSTANTS.DATABASE_FILE, os.R_OK)
|
||||||
print()
|
print()
|
||||||
print(f' √ ./{CONSTANTS.DATABASE_FILE.relative_to(DATA_DIR)}')
|
print(f' √ ./{CONSTANTS.DATABASE_FILE.relative_to(DATA_DIR)}')
|
||||||
|
|
||||||
|
|
|
@ -19,13 +19,16 @@ from .settings import LDAP_CONFIG, get_ldap_lib
|
||||||
|
|
||||||
###################### Config ##########################
|
###################### Config ##########################
|
||||||
|
|
||||||
def get_LDAP_LIB_path(paths):
|
def get_LDAP_LIB_path(paths=()):
|
||||||
LDAP_LIB = get_ldap_lib()[0]
|
LDAP_LIB = get_ldap_lib()[0]
|
||||||
if not LDAP_LIB:
|
if not LDAP_LIB:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# check that LDAP_LIB path is in one of the specified site packages dirs
|
# check that LDAP_LIB path is in one of the specified site packages dirs
|
||||||
lib_path = Path(inspect.getfile(LDAP_LIB))
|
lib_path = Path(inspect.getfile(LDAP_LIB))
|
||||||
|
if not paths:
|
||||||
|
return lib_path
|
||||||
|
|
||||||
for site_packges_dir in paths:
|
for site_packges_dir in paths:
|
||||||
if str(lib_path.parent.parent.resolve()) == str(Path(site_packges_dir).resolve()):
|
if str(lib_path.parent.parent.resolve()) == str(Path(site_packges_dir).resolve()):
|
||||||
return lib_path
|
return lib_path
|
||||||
|
@ -57,7 +60,7 @@ class LdapBinary(BaseBinary):
|
||||||
"packages": lambda: ['python-ldap>=3.4.3', 'django-auth-ldap>=4.1.0'],
|
"packages": lambda: ['python-ldap>=3.4.3', 'django-auth-ldap>=4.1.0'],
|
||||||
},
|
},
|
||||||
apt.name: {
|
apt.name: {
|
||||||
"abspath": lambda: get_LDAP_LIB_path((*USER_SITE_PACKAGES, *SYS_SITE_PACKAGES)),
|
"abspath": lambda: get_LDAP_LIB_path(),
|
||||||
"version": lambda: get_LDAP_LIB_version(),
|
"version": lambda: get_LDAP_LIB_version(),
|
||||||
"packages": lambda: ['libssl-dev', 'libldap2-dev', 'libsasl2-dev', 'python3-ldap', 'python3-msgpack', 'python3-mutagen'],
|
"packages": lambda: ['libssl-dev', 'libldap2-dev', 'libsasl2-dev', 'python3-ldap', 'python3-msgpack', 'python3-mutagen'],
|
||||||
},
|
},
|
||||||
|
|
2
archivebox/vendor/pydantic-pkgr
vendored
2
archivebox/vendor/pydantic-pkgr
vendored
|
@ -1 +1 @@
|
||||||
Subproject commit ec4c2d5f5a034ea6c10a5337c3115fbe1504f52b
|
Subproject commit e2f6b10550f41e64817908eef3feb0aa33071969
|
|
@ -1,6 +1,6 @@
|
||||||
[project]
|
[project]
|
||||||
name = "archivebox"
|
name = "archivebox"
|
||||||
version = "0.8.5rc28"
|
version = "0.8.5rc31"
|
||||||
requires-python = ">=3.10"
|
requires-python = ">=3.10"
|
||||||
description = "Self-hosted internet archiving solution."
|
description = "Self-hosted internet archiving solution."
|
||||||
authors = [{name = "Nick Sweeting", email = "pyproject.toml@archivebox.io"}]
|
authors = [{name = "Nick Sweeting", email = "pyproject.toml@archivebox.io"}]
|
||||||
|
|
2
uv.lock
generated
2
uv.lock
generated
|
@ -41,7 +41,7 @@ wheels = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "archivebox"
|
name = "archivebox"
|
||||||
version = "0.8.5rc28"
|
version = "0.8.5rc31"
|
||||||
source = { editable = "." }
|
source = { editable = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "atomicwrites" },
|
{ name = "atomicwrites" },
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue