mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-17 00:24:26 -04:00
test: Update more init_tests that were failing because of wording or Link->Snapshot changes
This commit is contained in:
parent
49b5241a7c
commit
ce5979de45
3 changed files with 7 additions and 6 deletions
|
@ -42,16 +42,17 @@ MAIN_INDEX_HEADER = {
|
|||
|
||||
@enforce_types
|
||||
def generate_json_index_from_snapshots(snapshots: List[Model], with_headers: bool):
|
||||
snapshots_json = [snapshot.as_json() for snapshot in snapshots]
|
||||
if with_headers:
|
||||
output = {
|
||||
**MAIN_INDEX_HEADER,
|
||||
'num_links': len(snapshots),
|
||||
'updated': datetime.now(),
|
||||
'last_run_cmd': sys.argv,
|
||||
'links': snapshots,
|
||||
'links': snapshots_json,
|
||||
}
|
||||
else:
|
||||
output = snapshots
|
||||
output = snapshots_json
|
||||
return to_json(output, indent=4, sort_keys=True)
|
||||
|
||||
|
||||
|
|
|
@ -353,7 +353,7 @@ def init(force: bool=False, out_dir: Path=OUTPUT_DIR) -> None:
|
|||
orphaned_json_snapshots = {
|
||||
snapshot.url: snapshot
|
||||
for snapshot in parse_json_main_index(out_dir)
|
||||
if not all_snapshots.filter(url=link.url).exists()
|
||||
if not all_snapshots.filter(url=snapshot.url).exists()
|
||||
}
|
||||
if orphaned_json_snapshots:
|
||||
pending_snapshots.update(orphaned_json_snapshots)
|
||||
|
|
|
@ -107,7 +107,7 @@ def test_collision_timestamps_different_urls(tmp_path, process, disable_extracto
|
|||
json.dump(link_details, f)
|
||||
|
||||
init_process = subprocess.run(['archivebox', 'init'], capture_output=True, env=disable_extractors_dict)
|
||||
assert "Skipped adding 1 invalid link data directories" in init_process.stdout.decode("utf-8")
|
||||
assert "Skipped adding 1 invalid snapshot data directories" in init_process.stdout.decode("utf-8")
|
||||
assert init_process.returncode == 0
|
||||
|
||||
def test_orphaned_folders(tmp_path, process, disable_extractors_dict):
|
||||
|
@ -124,7 +124,7 @@ def test_orphaned_folders(tmp_path, process, disable_extractors_dict):
|
|||
conn.close()
|
||||
|
||||
init_process = subprocess.run(['archivebox', 'init'], capture_output=True, env=disable_extractors_dict)
|
||||
assert "Added 1 orphaned links from existing JSON index" in init_process.stdout.decode("utf-8")
|
||||
assert "Added 1 orphaned snapshots from deprecated JSON index" in init_process.stdout.decode("utf-8")
|
||||
assert init_process.returncode == 0
|
||||
|
||||
def test_unrecognized_folders(tmp_path, process, disable_extractors_dict):
|
||||
|
@ -134,7 +134,7 @@ def test_unrecognized_folders(tmp_path, process, disable_extractors_dict):
|
|||
(tmp_path / "archive" / "some_random_folder").mkdir()
|
||||
|
||||
init_process = subprocess.run(['archivebox', 'init'], capture_output=True, env=disable_extractors_dict)
|
||||
assert "Skipped adding 1 invalid link data directories" in init_process.stdout.decode("utf-8")
|
||||
assert "Skipped adding 1 invalid snapshot data directories" in init_process.stdout.decode("utf-8")
|
||||
assert init_process.returncode == 0
|
||||
|
||||
def test_tags_migration(tmp_path, disable_extractors_dict):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue