mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-13 06:34:25 -04:00
nicer version and help pretty printing with rich
This commit is contained in:
parent
161afc7297
commit
697d0a3566
2 changed files with 33 additions and 34 deletions
|
@ -518,16 +518,15 @@ def pretty_path(path: Union[Path, str], pwd: Union[Path, str]=DATA_DIR) -> str:
|
||||||
return path
|
return path
|
||||||
|
|
||||||
# replace long absolute paths with ./ relative ones to save on terminal output width
|
# replace long absolute paths with ./ relative ones to save on terminal output width
|
||||||
if path.startswith(pwd) and (pwd != '/'):
|
if path.startswith(pwd) and (pwd != '/') and path != pwd:
|
||||||
path = path.replace(pwd, '.', 1)
|
path = path.replace(pwd, '[light_slate_blue].[/light_slate_blue]', 1)
|
||||||
|
|
||||||
# quote paths containing spaces
|
# quote paths containing spaces
|
||||||
if ' ' in path:
|
if ' ' in path:
|
||||||
path = f'"{path}"'
|
path = f'"{path}"'
|
||||||
|
|
||||||
# if path is just a plain dot, replace it back with the absolute path for clarity
|
# replace home directory with ~ for shorter output
|
||||||
if path == '.':
|
path = path.replace(str(Path('~').expanduser()), '~')
|
||||||
path = pwd
|
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
@ -591,8 +590,8 @@ def printable_folder_status(name: str, folder: Dict) -> str:
|
||||||
f'[{color}]',
|
f'[{color}]',
|
||||||
symbol,
|
symbol,
|
||||||
'[/]',
|
'[/]',
|
||||||
name.ljust(21),
|
name.ljust(21).replace('DATA_DIR', '[light_slate_blue]DATA_DIR[/light_slate_blue]'),
|
||||||
num_files.ljust(14),
|
num_files.ljust(14).replace('missing', '[grey53]missing[/grey53]'),
|
||||||
f'[{color}]',
|
f'[{color}]',
|
||||||
note.ljust(8),
|
note.ljust(8),
|
||||||
'[/]',
|
'[/]',
|
||||||
|
|
|
@ -180,13 +180,13 @@ def version(quiet: bool=False,
|
||||||
out_dir: Path=DATA_DIR) -> None:
|
out_dir: Path=DATA_DIR) -> None:
|
||||||
"""Print the ArchiveBox version and dependency information"""
|
"""Print the ArchiveBox version and dependency information"""
|
||||||
|
|
||||||
from rich.console import Console
|
|
||||||
console = Console()
|
|
||||||
print = console.print
|
|
||||||
print(VERSION)
|
print(VERSION)
|
||||||
if quiet:
|
if quiet or '--version' in sys.argv:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
from rich.console import Console
|
||||||
|
console = Console()
|
||||||
|
prnt = console.print
|
||||||
|
|
||||||
from plugins_auth.ldap.apps import LDAP_CONFIG
|
from plugins_auth.ldap.apps import LDAP_CONFIG
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
@ -198,12 +198,12 @@ def version(quiet: bool=False,
|
||||||
# DEBUG=False IS_TTY=True TZ=UTC SEARCH_BACKEND=ripgrep LDAP=False
|
# DEBUG=False IS_TTY=True TZ=UTC SEARCH_BACKEND=ripgrep LDAP=False
|
||||||
|
|
||||||
p = platform.uname()
|
p = platform.uname()
|
||||||
print(
|
prnt(
|
||||||
'ArchiveBox v{}'.format(CONSTANTS.VERSION),
|
'[dark_green]ArchiveBox[/dark_green] [dark_goldenrod]v{}[/dark_goldenrod]'.format(CONSTANTS.VERSION),
|
||||||
f'COMMIT_HASH={SHELL_CONFIG.COMMIT_HASH[:7] if SHELL_CONFIG.COMMIT_HASH else "unknown"}',
|
f'COMMIT_HASH={SHELL_CONFIG.COMMIT_HASH[:7] if SHELL_CONFIG.COMMIT_HASH else "unknown"}',
|
||||||
f'BUILD_TIME={SHELL_CONFIG.BUILD_TIME}',
|
f'BUILD_TIME={SHELL_CONFIG.BUILD_TIME}',
|
||||||
)
|
)
|
||||||
print(
|
prnt(
|
||||||
f'IN_DOCKER={SHELL_CONFIG.IN_DOCKER}',
|
f'IN_DOCKER={SHELL_CONFIG.IN_DOCKER}',
|
||||||
f'IN_QEMU={SHELL_CONFIG.IN_QEMU}',
|
f'IN_QEMU={SHELL_CONFIG.IN_QEMU}',
|
||||||
f'ARCH={p.machine}',
|
f'ARCH={p.machine}',
|
||||||
|
@ -212,13 +212,13 @@ def version(quiet: bool=False,
|
||||||
f'PYTHON={sys.implementation.name.title()}',
|
f'PYTHON={sys.implementation.name.title()}',
|
||||||
)
|
)
|
||||||
OUTPUT_IS_REMOTE_FS = CONSTANTS.DATA_LOCATIONS.DATA_DIR.is_mount or CONSTANTS.DATA_LOCATIONS.ARCHIVE_DIR.is_mount
|
OUTPUT_IS_REMOTE_FS = CONSTANTS.DATA_LOCATIONS.DATA_DIR.is_mount or CONSTANTS.DATA_LOCATIONS.ARCHIVE_DIR.is_mount
|
||||||
print(
|
prnt(
|
||||||
f'FS_ATOMIC={STORAGE_CONFIG.ENFORCE_ATOMIC_WRITES}',
|
f'FS_ATOMIC={STORAGE_CONFIG.ENFORCE_ATOMIC_WRITES}',
|
||||||
f'FS_REMOTE={OUTPUT_IS_REMOTE_FS}',
|
f'FS_REMOTE={OUTPUT_IS_REMOTE_FS}',
|
||||||
f'FS_USER={SHELL_CONFIG.PUID}:{SHELL_CONFIG.PGID}',
|
f'FS_USER={SHELL_CONFIG.PUID}:{SHELL_CONFIG.PGID}',
|
||||||
f'FS_PERMS={STORAGE_CONFIG.OUTPUT_PERMISSIONS}',
|
f'FS_PERMS={STORAGE_CONFIG.OUTPUT_PERMISSIONS}',
|
||||||
)
|
)
|
||||||
print(
|
prnt(
|
||||||
f'DEBUG={SHELL_CONFIG.DEBUG}',
|
f'DEBUG={SHELL_CONFIG.DEBUG}',
|
||||||
f'IS_TTY={SHELL_CONFIG.IS_TTY}',
|
f'IS_TTY={SHELL_CONFIG.IS_TTY}',
|
||||||
f'TZ={CONSTANTS.TIMEZONE}',
|
f'TZ={CONSTANTS.TIMEZONE}',
|
||||||
|
@ -226,10 +226,9 @@ def version(quiet: bool=False,
|
||||||
f'LDAP={LDAP_CONFIG.LDAP_ENABLED}',
|
f'LDAP={LDAP_CONFIG.LDAP_ENABLED}',
|
||||||
#f'DB=django.db.backends.sqlite3 (({CONFIG["SQLITE_JOURNAL_MODE"]})', # add this if we have more useful info to show eventually
|
#f'DB=django.db.backends.sqlite3 (({CONFIG["SQLITE_JOURNAL_MODE"]})', # add this if we have more useful info to show eventually
|
||||||
)
|
)
|
||||||
print()
|
prnt()
|
||||||
|
|
||||||
print()
|
prnt('[pale_green1][i] Dependency versions:[/pale_green1]')
|
||||||
print('[pale_green1][i] Dependency versions:[/pale_green1]')
|
|
||||||
for name, binary in reversed(list(settings.BINARIES.items())):
|
for name, binary in reversed(list(settings.BINARIES.items())):
|
||||||
if binary.name == 'archivebox':
|
if binary.name == 'archivebox':
|
||||||
continue
|
continue
|
||||||
|
@ -240,31 +239,30 @@ def version(quiet: bool=False,
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err = e
|
err = e
|
||||||
loaded_bin = binary
|
loaded_bin = binary
|
||||||
raise
|
|
||||||
provider_summary = f'[dark_sea_green3]{loaded_bin.binprovider.name.ljust(10)}[/dark_sea_green3]' if loaded_bin.binprovider else '[grey23]not found[/grey23]'
|
provider_summary = f'[dark_sea_green3]{loaded_bin.binprovider.name.ljust(10)}[/dark_sea_green3]' if loaded_bin.binprovider else '[grey23]not found[/grey23]'
|
||||||
if loaded_bin.abspath:
|
if loaded_bin.abspath:
|
||||||
abspath = str(loaded_bin.abspath).replace(str(Path('~').expanduser()), '~')
|
abspath = str(loaded_bin.abspath).replace(str(DATA_DIR), '[light_slate_blue].[/light_slate_blue]').replace(str(Path('~').expanduser()), '~')
|
||||||
if ' ' in abspath:
|
if ' ' in abspath:
|
||||||
abspath = abspath.replace(' ', r'\ ')
|
abspath = abspath.replace(' ', r'\ ')
|
||||||
else:
|
else:
|
||||||
abspath = f'[red]{err}[/red]'
|
abspath = f'[red]{err}[/red]'
|
||||||
print('', '[green]√[/green]' if loaded_bin.is_valid else '[red]X[/red]', '', loaded_bin.name.ljust(21), str(loaded_bin.version).ljust(12), provider_summary, abspath, overflow='ignore', crop=False)
|
prnt('', '[green]√[/green]' if loaded_bin.is_valid else '[red]X[/red]', '', loaded_bin.name.ljust(21), str(loaded_bin.version).ljust(12), provider_summary, abspath, overflow='ignore', crop=False)
|
||||||
|
|
||||||
print()
|
prnt()
|
||||||
print('[deep_sky_blue3][i] Source-code locations:[/deep_sky_blue3]')
|
prnt('[deep_sky_blue3][i] Source-code locations:[/deep_sky_blue3]')
|
||||||
for name, path in CONSTANTS.CODE_LOCATIONS.items():
|
for name, path in CONSTANTS.CODE_LOCATIONS.items():
|
||||||
print(printable_folder_status(name, path), overflow='ignore', crop=False)
|
prnt(printable_folder_status(name, path), overflow='ignore', crop=False)
|
||||||
|
|
||||||
print()
|
prnt()
|
||||||
if CONSTANTS.DATABASE_FILE.exists() or CONSTANTS.ARCHIVE_DIR.exists() or CONSTANTS.CONFIG_FILE.exists():
|
if CONSTANTS.DATABASE_FILE.exists() or CONSTANTS.ARCHIVE_DIR.exists() or CONSTANTS.CONFIG_FILE.exists():
|
||||||
print('[bright_yellow][i] Data locations:[/bright_yellow]')
|
prnt('[bright_yellow][i] Data locations:[/bright_yellow]')
|
||||||
for name, path in CONSTANTS.DATA_LOCATIONS.items():
|
for name, path in CONSTANTS.DATA_LOCATIONS.items():
|
||||||
print(printable_folder_status(name, path), overflow='ignore', crop=False)
|
prnt(printable_folder_status(name, path), overflow='ignore', crop=False)
|
||||||
else:
|
else:
|
||||||
print()
|
prnt()
|
||||||
print('[red][i] Data locations:[/red] (not in a data directory)')
|
prnt('[red][i] Data locations:[/red] (not in a data directory)')
|
||||||
|
|
||||||
print()
|
prnt()
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
|
@ -959,8 +957,10 @@ def install(out_dir: Path=DATA_DIR) -> None:
|
||||||
stderr('\n[+] Installing ArchiveBox dependencies automatically...', color='green')
|
stderr('\n[+] Installing ArchiveBox dependencies automatically...', color='green')
|
||||||
|
|
||||||
for binary in reversed(list(settings.BINARIES.values())):
|
for binary in reversed(list(settings.BINARIES.values())):
|
||||||
|
providers = ' [grey53]or[/grey53] '.join(provider.name for provider in binary.binproviders_supported)
|
||||||
|
print(f'[+] Locating / Installing [yellow]{binary.name}[/yellow] using [red]{providers}[/red]...')
|
||||||
try:
|
try:
|
||||||
print(binary.load_or_install().model_dump(exclude={'binproviders_supported', 'loaded_binprovider', 'provider_overrides', 'loaded_abspaths', 'bin_dir', 'loaded_respath'}))
|
print(binary.load_or_install().model_dump(exclude={'binproviders_supported', 'loaded_binprovider', 'provider_overrides', 'loaded_abspaths', 'bin_dir', 'loaded_respath', 'hook_type'}))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f'[X] Failed to install {binary.name}: {e}')
|
print(f'[X] Failed to install {binary.name}: {e}')
|
||||||
|
|
||||||
|
@ -977,7 +977,7 @@ def install(out_dir: Path=DATA_DIR) -> None:
|
||||||
|
|
||||||
from plugins_pkg.pip.apps import ARCHIVEBOX_BINARY
|
from plugins_pkg.pip.apps import ARCHIVEBOX_BINARY
|
||||||
|
|
||||||
run_shell([ARCHIVEBOX_BINARY.load().abspath, '--version'], capture_output=False, cwd=out_dir)
|
run_shell([ARCHIVEBOX_BINARY.load().abspath, 'version'], capture_output=False, cwd=out_dir)
|
||||||
|
|
||||||
# backwards-compatibility:
|
# backwards-compatibility:
|
||||||
setup = install
|
setup = install
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue