mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-17 00:24:26 -04:00
add BUILD_TIME to archivebox version output
This commit is contained in:
parent
951bba52a0
commit
8a306dbf6f
2 changed files with 34 additions and 15 deletions
|
@ -391,12 +391,20 @@ def get_version(config):
|
||||||
|
|
||||||
raise Exception('Failed to detect installed archivebox version!')
|
raise Exception('Failed to detect installed archivebox version!')
|
||||||
|
|
||||||
def get_commit_hash(config):
|
def get_commit_hash(config) -> Optional[str]:
|
||||||
try:
|
try:
|
||||||
return list((config['PACKAGE_DIR'] / '../.git/refs/heads/').glob('*'))[0].read_text().strip()
|
return list((config['PACKAGE_DIR'] / '../.git/refs/heads/').glob('*'))[0].read_text().strip()
|
||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def get_build_time(config) -> str:
|
||||||
|
if config['IN_DOCKER']:
|
||||||
|
docker_build_end_time = Path('/VERSION.txt').read_text().rsplit('BUILD_END_TIME=')[-1].split('\n', 1)[0]
|
||||||
|
return docker_build_end_time
|
||||||
|
|
||||||
|
src_last_modified_unix_timestamp = (config['PACKAGE_DIR'] / 'config.py').stat().st_mtime
|
||||||
|
return datetime.fromtimestamp(src_last_modified_unix_timestamp).strftime('%Y-%m-%d %H:%M:%S %s')
|
||||||
|
|
||||||
############################## Derived Config ##################################
|
############################## Derived Config ##################################
|
||||||
|
|
||||||
|
|
||||||
|
@ -425,6 +433,7 @@ DYNAMIC_CONFIG_SCHEMA: ConfigDefaultDict = {
|
||||||
'ARCHIVEBOX_BINARY': {'default': lambda c: sys.argv[0] or bin_path('archivebox')},
|
'ARCHIVEBOX_BINARY': {'default': lambda c: sys.argv[0] or bin_path('archivebox')},
|
||||||
'VERSION': {'default': lambda c: get_version(c)},
|
'VERSION': {'default': lambda c: get_version(c)},
|
||||||
'COMMIT_HASH': {'default': lambda c: get_commit_hash(c)},
|
'COMMIT_HASH': {'default': lambda c: get_commit_hash(c)},
|
||||||
|
'BUILD_TIME': {'default': lambda c: get_build_time(c)},
|
||||||
|
|
||||||
'PYTHON_BINARY': {'default': lambda c: sys.executable},
|
'PYTHON_BINARY': {'default': lambda c: sys.executable},
|
||||||
'PYTHON_ENCODING': {'default': lambda c: sys.stdout.encoding.upper()},
|
'PYTHON_ENCODING': {'default': lambda c: sys.stdout.encoding.upper()},
|
||||||
|
|
|
@ -93,11 +93,13 @@ from .config import (
|
||||||
SQL_INDEX_FILENAME,
|
SQL_INDEX_FILENAME,
|
||||||
ALLOWED_IN_OUTPUT_DIR,
|
ALLOWED_IN_OUTPUT_DIR,
|
||||||
SEARCH_BACKEND_ENGINE,
|
SEARCH_BACKEND_ENGINE,
|
||||||
|
LDAP,
|
||||||
check_dependencies,
|
check_dependencies,
|
||||||
check_data_folder,
|
check_data_folder,
|
||||||
write_config_file,
|
write_config_file,
|
||||||
VERSION,
|
VERSION,
|
||||||
COMMIT_HASH,
|
COMMIT_HASH,
|
||||||
|
BUILD_TIME,
|
||||||
CODE_LOCATIONS,
|
CODE_LOCATIONS,
|
||||||
EXTERNAL_LOCATIONS,
|
EXTERNAL_LOCATIONS,
|
||||||
DATA_LOCATIONS,
|
DATA_LOCATIONS,
|
||||||
|
@ -218,31 +220,39 @@ def version(quiet: bool=False,
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
# 0.7.1
|
# 0.7.1
|
||||||
# ArchiveBox v0.7.1 Cpython Linux Linux-4.19.121-linuxkit-x86_64-with-glibc2.28 x86_64 (in Docker) (in TTY)
|
# ArchiveBox v0.7.1+editable COMMIT_HASH=951bba5 BUILD_TIME=2023-12-17 16:46:05 1702860365
|
||||||
# DEBUG=False IN_DOCKER=True IN_QEMU=False IS_TTY=True TZ=UTC FS_ATOMIC=True FS_REMOTE=False FS_PERMS=644 FS_USER=501:20 SEARCH_BACKEND=ripgrep
|
# IN_DOCKER=False IN_QEMU=False ARCH=arm64 OS=Darwin PLATFORM=macOS-14.2-arm64-arm-64bit PYTHON=Cpython
|
||||||
|
# FS_ATOMIC=True FS_REMOTE=False FS_USER=501:20 FS_PERMS=644
|
||||||
|
# DEBUG=False IS_TTY=True TZ=UTC SEARCH_BACKEND=ripgrep LDAP=False
|
||||||
|
|
||||||
p = platform.uname()
|
p = platform.uname()
|
||||||
print(
|
print(
|
||||||
'ArchiveBox v{}'.format(VERSION),
|
'ArchiveBox v{}'.format(VERSION),
|
||||||
*((COMMIT_HASH[:7],) if COMMIT_HASH else ()),
|
*((f'COMMIT_HASH={COMMIT_HASH[:7]}',) if COMMIT_HASH else ()),
|
||||||
sys.implementation.name.title(),
|
f'BUILD_TIME={BUILD_TIME}',
|
||||||
p.system,
|
)
|
||||||
platform.platform(),
|
print(
|
||||||
p.machine,
|
f'IN_DOCKER={IN_DOCKER}',
|
||||||
|
f'IN_QEMU={IN_QEMU}',
|
||||||
|
f'ARCH={p.machine}',
|
||||||
|
f'OS={p.system}',
|
||||||
|
f'PLATFORM={platform.platform()}',
|
||||||
|
f'PYTHON={sys.implementation.name.title()}',
|
||||||
)
|
)
|
||||||
OUTPUT_IS_REMOTE_FS = DATA_LOCATIONS['OUTPUT_DIR']['is_mount'] or DATA_LOCATIONS['ARCHIVE_DIR']['is_mount']
|
OUTPUT_IS_REMOTE_FS = DATA_LOCATIONS['OUTPUT_DIR']['is_mount'] or DATA_LOCATIONS['ARCHIVE_DIR']['is_mount']
|
||||||
print(
|
print(
|
||||||
f'DEBUG={DEBUG}',
|
|
||||||
f'IN_DOCKER={IN_DOCKER}',
|
|
||||||
f'IN_QEMU={IN_QEMU}',
|
|
||||||
f'IS_TTY={IS_TTY}',
|
|
||||||
f'TZ={TIMEZONE}',
|
|
||||||
#f'DB=django.db.backends.sqlite3 (({CONFIG["SQLITE_JOURNAL_MODE"]})', # add this if we have more useful info to show eventually
|
|
||||||
f'FS_ATOMIC={ENFORCE_ATOMIC_WRITES}',
|
f'FS_ATOMIC={ENFORCE_ATOMIC_WRITES}',
|
||||||
f'FS_REMOTE={OUTPUT_IS_REMOTE_FS}',
|
f'FS_REMOTE={OUTPUT_IS_REMOTE_FS}',
|
||||||
f'FS_USER={PUID}:{PGID}',
|
f'FS_USER={PUID}:{PGID}',
|
||||||
f'FS_PERMS={OUTPUT_PERMISSIONS}',
|
f'FS_PERMS={OUTPUT_PERMISSIONS}',
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f'DEBUG={DEBUG}',
|
||||||
|
f'IS_TTY={IS_TTY}',
|
||||||
|
f'TZ={TIMEZONE}',
|
||||||
f'SEARCH_BACKEND={SEARCH_BACKEND_ENGINE}',
|
f'SEARCH_BACKEND={SEARCH_BACKEND_ENGINE}',
|
||||||
|
f'LDAP={LDAP}',
|
||||||
|
#f'DB=django.db.backends.sqlite3 (({CONFIG["SQLITE_JOURNAL_MODE"]})', # add this if we have more useful info to show eventually
|
||||||
)
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
@ -271,7 +281,7 @@ def version(quiet: bool=False,
|
||||||
print(printable_folder_status(name, path))
|
print(printable_folder_status(name, path))
|
||||||
else:
|
else:
|
||||||
print()
|
print()
|
||||||
print('{white}[i] Data locations:{reset}'.format(**ANSI))
|
print('{white}[i] Data locations:{reset} (not in a data directory)'.format(**ANSI))
|
||||||
|
|
||||||
print()
|
print()
|
||||||
check_dependencies()
|
check_dependencies()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue