feat: Remove index.json and index.html generation from the regular process

This commit is contained in:
Cristian 2020-10-08 11:02:26 -05:00 committed by Cristian Vargas
parent 494af5f2e1
commit ae1484b8bf
5 changed files with 25 additions and 30 deletions

View file

@ -1,5 +1,6 @@
import subprocess
import json
import sqlite3
from .fixtures import *
@ -43,11 +44,16 @@ def test_depth_flag_1_crawls_the_page_AND_links(tmp_path, process, disable_extra
capture_output=True,
env=disable_extractors_dict,
)
with open(tmp_path / "index.json", "r") as f:
archive_file = f.read()
assert "http://127.0.0.1:8080/static/example.com.html" in archive_file
assert "http://127.0.0.1:8080/static/iana.org.html" in archive_file
conn = sqlite3.connect("index.sqlite3")
c = conn.cursor()
urls = c.execute("SELECT url from core_snapshot").fetchall()
conn.commit()
conn.close()
urls = list(map(lambda x: x[0], urls))
assert "http://127.0.0.1:8080/static/example.com.html" in urls
assert "http://127.0.0.1:8080/static/iana.org.html" in urls
def test_overwrite_flag_is_accepted(process, disable_extractors_dict):
@ -71,6 +77,8 @@ def test_add_updates_history_json_index(tmp_path, process, disable_extractors_di
env=disable_extractors_dict,
)
with open(tmp_path / "index.json", "r") as f:
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert output_json["links"][0]["history"] != {}
assert output_json["history"] != {}

View file

@ -32,10 +32,11 @@ def test_add_link(tmp_path, process, disable_extractors_dict):
output_json = json.load(f)
assert "Example Domain" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
with open(archived_item_path / "index.html", "r") as f:
output_html = f.read()
assert "Example Domain" in output_html
def test_add_link_support_stdin(tmp_path, process, disable_extractors_dict):
disable_extractors_dict.update({"USE_WGET": "true"})
os.chdir(tmp_path)
@ -51,7 +52,7 @@ def test_add_link_support_stdin(tmp_path, process, disable_extractors_dict):
assert "Example Domain" == output_json['history']['title'][0]['output']
def test_correct_permissions_output_folder(tmp_path, process):
index_files = ['index.json', 'index.html', 'index.sqlite3', 'archive']
index_files = ['index.sqlite3', 'archive']
for file in index_files:
file_path = tmp_path / file
assert oct(file_path.stat().st_mode)[-3:] == OUTPUT_PERMISSIONS
@ -113,6 +114,9 @@ def test_orphaned_folders(tmp_path, process, disable_extractors_dict):
os.chdir(tmp_path)
subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/example.com.html'], capture_output=True,
env=disable_extractors_dict)
list_process = subprocess.run(["archivebox", "list", "--json", "--with-headers"], capture_output=True)
with open(tmp_path / "index.json", "wb") as f:
f.write(list_process.stdout)
conn = sqlite3.connect("index.sqlite3")
c = conn.cursor()
c.execute("DELETE from core_snapshot")

View file

@ -6,10 +6,8 @@ def test_title_is_htmlencoded_in_index_html(tmp_path, process, disable_extractor
Unencoded content should not be rendered as it facilitates xss injections
and breaks the layout.
"""
add_process = subprocess.run(['archivebox', 'add', 'http://localhost:8080/static/title_with_html.com.html'],
subprocess.run(['archivebox', 'add', 'http://localhost:8080/static/title_with_html.com.html'],
capture_output=True, env=disable_extractors_dict)
list_process = subprocess.run(["archivebox", "list", "--html"], capture_output=True)
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "<textarea>" not in output_html
assert "<textarea>" not in list_process.stdout.decode("utf-8")