mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-09 12:21:57 -04:00
move almost all config into new archivebox.CONSTANTS
Some checks are pending
CodeQL / Analyze (python) (push) Waiting to run
Build Debian package / build (push) Waiting to run
Build Docker image / buildx (push) Waiting to run
Build Homebrew package / build (push) Waiting to run
Build GitHub Pages website / build (push) Waiting to run
Build GitHub Pages website / deploy (push) Blocked by required conditions
Run linters / lint (push) Waiting to run
Build Pip package / build (push) Waiting to run
Run tests / python_tests (ubuntu-22.04, 3.11) (push) Waiting to run
Run tests / docker_tests (push) Waiting to run
Some checks are pending
CodeQL / Analyze (python) (push) Waiting to run
Build Debian package / build (push) Waiting to run
Build Docker image / buildx (push) Waiting to run
Build Homebrew package / build (push) Waiting to run
Build GitHub Pages website / build (push) Waiting to run
Build GitHub Pages website / deploy (push) Blocked by required conditions
Run linters / lint (push) Waiting to run
Build Pip package / build (push) Waiting to run
Run tests / python_tests (ubuntu-22.04, 3.11) (push) Waiting to run
Run tests / docker_tests (push) Waiting to run
This commit is contained in:
parent
f5e8d99fdf
commit
bb65b2dbec
32 changed files with 982 additions and 840 deletions
|
@ -14,7 +14,6 @@ from ..config import (
|
|||
SAVE_ALLOWLIST_PTN,
|
||||
SAVE_DENYLIST_PTN,
|
||||
)
|
||||
from ..core.settings import ERROR_LOG
|
||||
from ..index.schema import ArchiveResult, Link
|
||||
from ..index.sql import write_link_to_sql_index
|
||||
from ..index import (
|
||||
|
@ -109,6 +108,8 @@ def ignore_methods(to_ignore: List[str]) -> Iterable[str]:
|
|||
def archive_link(link: Link, overwrite: bool=False, methods: Optional[Iterable[str]]=None, out_dir: Optional[Path]=None, created_by_id: int | None=None) -> Link:
|
||||
"""download the DOM, PDF, and a screenshot into a folder named after the link's timestamp"""
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from ..search import write_search_index
|
||||
|
||||
# TODO: Remove when the input is changed to be a snapshot. Suboptimal approach.
|
||||
|
@ -169,7 +170,7 @@ def archive_link(link: Link, overwrite: bool=False, methods: Optional[Iterable[s
|
|||
stats['skipped'] += 1
|
||||
except Exception as e:
|
||||
# https://github.com/ArchiveBox/ArchiveBox/issues/984#issuecomment-1150541627
|
||||
with open(ERROR_LOG, "a", encoding='utf-8') as f:
|
||||
with open(settings.ERROR_LOG, "a", encoding='utf-8') as f:
|
||||
command = ' '.join(sys.argv)
|
||||
ts = datetime.now(timezone.utc).strftime('%Y-%m-%d__%H:%M:%S')
|
||||
f.write(("\n" + 'Exception in archive_methods.save_{}(Link(url={})) command={}; ts={}'.format(
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
__package__ = 'archivebox.extractors'
|
||||
|
||||
import archivebox
|
||||
|
||||
from html.parser import HTMLParser
|
||||
import io
|
||||
from pathlib import Path
|
||||
|
@ -8,7 +10,6 @@ from typing import Optional
|
|||
from ..config import (
|
||||
SAVE_HTMLTOTEXT,
|
||||
TIMEOUT,
|
||||
VERSION,
|
||||
)
|
||||
from ..index.schema import Link, ArchiveResult, ArchiveError
|
||||
from ..logging_util import TimedProgress
|
||||
|
@ -153,7 +154,7 @@ def save_htmltotext(link: Link, out_dir: Optional[Path]=None, timeout: int=TIMEO
|
|||
return ArchiveResult(
|
||||
cmd=cmd,
|
||||
pwd=str(out_dir),
|
||||
cmd_version=VERSION,
|
||||
cmd_version=archivebox.__version__,
|
||||
output=output,
|
||||
status=status,
|
||||
index_texts=[extracted_text] if extracted_text else [],
|
||||
|
|
|
@ -8,17 +8,7 @@ import json
|
|||
|
||||
from ..index.schema import Link, ArchiveResult, ArchiveError
|
||||
from ..system import run, atomic_write
|
||||
from ..util import (
|
||||
enforce_types,
|
||||
is_static_file,
|
||||
)
|
||||
from ..config import (
|
||||
TIMEOUT,
|
||||
CURL_BINARY,
|
||||
SAVE_READABILITY,
|
||||
DEPENDENCIES,
|
||||
READABILITY_VERSION,
|
||||
)
|
||||
from ..util import enforce_types, is_static_file
|
||||
from ..logging_util import TimedProgress
|
||||
from .title import get_html
|
||||
|
||||
|
@ -31,22 +21,29 @@ def get_embed_path(archiveresult=None):
|
|||
|
||||
@enforce_types
|
||||
def should_save_readability(link: Link, out_dir: Optional[str]=None, overwrite: Optional[bool]=False) -> bool:
|
||||
from plugins_extractor.readability.apps import READABILITY_CONFIG
|
||||
|
||||
if is_static_file(link.url):
|
||||
return False
|
||||
|
||||
out_dir = out_dir or Path(link.link_dir)
|
||||
if not overwrite and (out_dir / get_output_path()).exists():
|
||||
output_subdir = (Path(out_dir or link.link_dir) / get_output_path())
|
||||
if not overwrite and output_subdir.exists():
|
||||
return False
|
||||
|
||||
return SAVE_READABILITY
|
||||
return READABILITY_CONFIG.SAVE_READABILITY
|
||||
|
||||
|
||||
@enforce_types
|
||||
def save_readability(link: Link, out_dir: Optional[str]=None, timeout: int=TIMEOUT) -> ArchiveResult:
|
||||
def save_readability(link: Link, out_dir: Optional[str]=None, timeout: int=0) -> ArchiveResult:
|
||||
"""download reader friendly version using @mozilla/readability"""
|
||||
|
||||
from plugins_extractor.readability.apps import READABILITY_CONFIG, READABILITY_BINARY
|
||||
|
||||
READABILITY_BIN = READABILITY_BINARY.load()
|
||||
assert READABILITY_BIN.abspath and READABILITY_BIN.version
|
||||
|
||||
out_dir = Path(out_dir or link.link_dir)
|
||||
output_folder = out_dir.absolute() / get_output_path()
|
||||
timeout = timeout or READABILITY_CONFIG.READABILITY_TIMEOUT
|
||||
output_subdir = Path(out_dir or link.link_dir).absolute() / get_output_path()
|
||||
output = get_output_path()
|
||||
|
||||
# Readability Docs: https://github.com/mozilla/readability
|
||||
|
@ -54,13 +51,14 @@ def save_readability(link: Link, out_dir: Optional[str]=None, timeout: int=TIMEO
|
|||
status = 'succeeded'
|
||||
# fake command to show the user so they have something to try debugging if get_html fails
|
||||
cmd = [
|
||||
CURL_BINARY,
|
||||
link.url
|
||||
str(READABILITY_BIN.abspath),
|
||||
'{dom,singlefile}.html',
|
||||
link.url,
|
||||
]
|
||||
readability_content = None
|
||||
timer = TimedProgress(timeout, prefix=' ')
|
||||
try:
|
||||
document = get_html(link, out_dir)
|
||||
document = get_html(link, Path(out_dir or link.link_dir))
|
||||
temp_doc = NamedTemporaryFile(delete=False)
|
||||
temp_doc.write(document.encode("utf-8"))
|
||||
temp_doc.close()
|
||||
|
@ -69,26 +67,26 @@ def save_readability(link: Link, out_dir: Optional[str]=None, timeout: int=TIMEO
|
|||
raise ArchiveError('Readability could not find HTML to parse for article text')
|
||||
|
||||
cmd = [
|
||||
DEPENDENCIES['READABILITY_BINARY']['path'],
|
||||
str(READABILITY_BIN.abspath),
|
||||
temp_doc.name,
|
||||
link.url,
|
||||
]
|
||||
result = run(cmd, cwd=out_dir, timeout=timeout)
|
||||
result = run(cmd, cwd=out_dir, timeout=timeout, text=True)
|
||||
try:
|
||||
result_json = json.loads(result.stdout)
|
||||
assert result_json and 'content' in result_json, 'Readability output is not valid JSON'
|
||||
except json.JSONDecodeError:
|
||||
raise ArchiveError('Readability was not able to archive the page (invalid JSON)', result.stdout + result.stderr)
|
||||
|
||||
output_folder.mkdir(exist_ok=True)
|
||||
output_subdir.mkdir(exist_ok=True)
|
||||
readability_content = result_json.pop("textContent")
|
||||
atomic_write(str(output_folder / "content.html"), result_json.pop("content"))
|
||||
atomic_write(str(output_folder / "content.txt"), readability_content)
|
||||
atomic_write(str(output_folder / "article.json"), result_json)
|
||||
atomic_write(str(output_subdir / "content.html"), result_json.pop("content"))
|
||||
atomic_write(str(output_subdir / "content.txt"), readability_content)
|
||||
atomic_write(str(output_subdir / "article.json"), result_json)
|
||||
|
||||
output_tail = [
|
||||
line.strip()
|
||||
for line in (result.stdout + result.stderr).decode().rsplit('\n', 5)[-5:]
|
||||
for line in (result.stdout + result.stderr).rsplit('\n', 5)[-5:]
|
||||
if line.strip()
|
||||
]
|
||||
hints = (
|
||||
|
@ -111,7 +109,7 @@ def save_readability(link: Link, out_dir: Optional[str]=None, timeout: int=TIMEO
|
|||
return ArchiveResult(
|
||||
cmd=cmd,
|
||||
pwd=str(out_dir),
|
||||
cmd_version=READABILITY_VERSION,
|
||||
cmd_version=str(READABILITY_BIN.version),
|
||||
output=output,
|
||||
status=status,
|
||||
index_texts=[readability_content] if readability_content else [],
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue