mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-15 07:34:27 -04:00
Optionally import only new links
When importing a huge list of links periodically (from a big dump of links from a bookmark service for example) with a lot of broken links, this links will always be rechecked. To skip this, the environment variable ONLY_NEW can be used to only import new links and skip the rest altogether. This partially fixes #95.
This commit is contained in:
parent
bf6e8f03e4
commit
69c007ce85
4 changed files with 37 additions and 4 deletions
|
@ -10,7 +10,10 @@ from datetime import datetime
|
|||
from subprocess import run
|
||||
|
||||
from parse import parse_links
|
||||
from links import validate_links
|
||||
from links import (
|
||||
new_links,
|
||||
validate_links
|
||||
)
|
||||
from archive_methods import archive_links, _RESULTS_TOTALS
|
||||
from index import (
|
||||
write_links_index,
|
||||
|
@ -19,6 +22,7 @@ from index import (
|
|||
parse_json_link_index,
|
||||
)
|
||||
from config import (
|
||||
ONLY_NEW,
|
||||
OUTPUT_PERMISSIONS,
|
||||
OUTPUT_DIR,
|
||||
ANSI,
|
||||
|
@ -45,7 +49,7 @@ def print_help():
|
|||
print(" ./bin/bookmark-archiver ~/Downloads/bookmarks_export.html\n")
|
||||
|
||||
|
||||
def merge_links(archive_path=OUTPUT_DIR, import_path=None):
|
||||
def merge_links(archive_path=OUTPUT_DIR, import_path=None, only_new=False):
|
||||
"""get new links from file and optionally append them to links in existing archive"""
|
||||
all_links = []
|
||||
if import_path:
|
||||
|
@ -76,6 +80,9 @@ def merge_links(archive_path=OUTPUT_DIR, import_path=None):
|
|||
# **ANSI,
|
||||
# ))
|
||||
|
||||
if only_new:
|
||||
return new_links(all_links, existing_links)
|
||||
|
||||
return all_links
|
||||
|
||||
def update_archive(archive_path, links, source=None, resume=None, append=True):
|
||||
|
@ -158,7 +165,7 @@ if __name__ == '__main__':
|
|||
source = download_url(source)
|
||||
|
||||
# Step 1: Parse the links and dedupe them with existing archive
|
||||
links = merge_links(archive_path=out_dir, import_path=source)
|
||||
links = merge_links(archive_path=out_dir, import_path=source, only_new=False)
|
||||
|
||||
# Step 2: Write new index
|
||||
write_links_index(out_dir=out_dir, links=links)
|
||||
|
@ -167,4 +174,8 @@ if __name__ == '__main__':
|
|||
# cleanup_archive(out_dir, links)
|
||||
|
||||
# Step 4: Run the archive methods for each link
|
||||
update_archive(out_dir, links, source=source, resume=resume, append=True)
|
||||
if ONLY_NEW:
|
||||
new_links = merge_links(archive_path=out_dir, import_path=source, only_new=True)
|
||||
update_archive(out_dir, new_links, source=source, resume=resume, append=True)
|
||||
else:
|
||||
update_archive(out_dir, links, source=source, resume=resume, append=True)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue