check data folder on startup

This commit is contained in:
Nick Sweeting 2019-04-16 23:19:21 -04:00
parent fafdef1e6d
commit 6e5a77e1ad
3 changed files with 6 additions and 2 deletions

View file

@ -428,6 +428,7 @@ def check_dependencies() -> None:
stderr('{red}[X] Missing some required dependencies.{reset}'.format(**ANSI)) stderr('{red}[X] Missing some required dependencies.{reset}'.format(**ANSI))
raise SystemExit(1) raise SystemExit(1)
def check_data_folder() -> None:
if HAS_INVALID_DB: if HAS_INVALID_DB:
stderr('{red}[X] No archive data found in:{reset} {}'.format(OUTPUT_DIR, **ANSI)) stderr('{red}[X] No archive data found in:{reset} {}'.format(OUTPUT_DIR, **ANSI))
stderr(' Are you running archivebox in the right folder?') stderr(' Are you running archivebox in the right folder?')

View file

@ -12,10 +12,10 @@ from .index import (
) )
from .archive_methods import archive_link from .archive_methods import archive_link
from .config import ( from .config import (
ANSI,
ONLY_NEW, ONLY_NEW,
OUTPUT_DIR, OUTPUT_DIR,
check_dependencies, check_dependencies,
check_data_folder,
) )
from .logs import ( from .logs import (
log_archiving_started, log_archiving_started,
@ -33,6 +33,7 @@ def update_archive_data(import_path: Optional[str]=None, resume: Optional[float]
"""The main ArchiveBox entrancepoint. Everything starts here.""" """The main ArchiveBox entrancepoint. Everything starts here."""
check_dependencies() check_dependencies()
check_data_folder()
# Step 1: Load list of links from the existing index # Step 1: Load list of links from the existing index
# merge in and dedupe new links from import_path # merge in and dedupe new links from import_path
@ -107,6 +108,8 @@ def remove_archive_links(filter_patterns: List[str], filter_type: str='exact',
yes: bool=False, delete: bool=False) -> List[Link]: yes: bool=False, delete: bool=False) -> List[Link]:
check_dependencies() check_dependencies()
check_data_folder()
log_list_started(filter_patterns, filter_type) log_list_started(filter_patterns, filter_type)
timer = TimedProgress(360, prefix=' ') timer = TimedProgress(360, prefix=' ')
try: try:

View file

@ -69,7 +69,7 @@ class ArchiveResult:
cols = cols or self.field_names() cols = cols or self.field_names()
return separator.join( return separator.join(
to_json(getattr(self, col), indent=False).ljust(ljust) to_json(getattr(self, col), indent=None).ljust(ljust)
for col in cols for col in cols
) )