mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-13 14:44:29 -04:00
working consistent list and remove with filtering
This commit is contained in:
parent
4ca9a0beac
commit
d8d8f7c2cc
6 changed files with 242 additions and 31 deletions
|
@ -1,10 +1,10 @@
|
|||
import re
|
||||
import json
|
||||
import shutil
|
||||
|
||||
from typing import List, Optional, Iterable
|
||||
|
||||
from .schema import Link
|
||||
from .util import enforce_types, ExtendedEncoder
|
||||
from .util import enforce_types, TimedProgress, to_csv
|
||||
from .index import (
|
||||
links_after_timestamp,
|
||||
load_links_index,
|
||||
|
@ -12,6 +12,7 @@ from .index import (
|
|||
)
|
||||
from .archive_methods import archive_link
|
||||
from .config import (
|
||||
ANSI,
|
||||
ONLY_NEW,
|
||||
OUTPUT_DIR,
|
||||
check_dependencies,
|
||||
|
@ -61,23 +62,91 @@ def update_archive_data(import_path: Optional[str]=None, resume: Optional[float]
|
|||
return all_links
|
||||
|
||||
|
||||
LINK_FILTERS = {
|
||||
'exact': lambda link, pattern: (link.url == pattern) or (link.base_url == pattern),
|
||||
'substring': lambda link, pattern: pattern in link.url,
|
||||
'regex': lambda link, pattern: bool(re.match(pattern, link.url)),
|
||||
'domain': lambda link, pattern: link.domain == pattern,
|
||||
}
|
||||
|
||||
def link_matches_filter(link: Link, filter_patterns: List[str], filter_type: str='exact') -> bool:
|
||||
for pattern in filter_patterns:
|
||||
if LINK_FILTERS[filter_type](link, pattern):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@enforce_types
|
||||
def list_archive_data(filter_regex: Optional[str]=None, after: Optional[float]=None, before: Optional[float]=None) -> Iterable[Link]:
|
||||
def list_archive_data(filter_patterns: Optional[List[str]]=None, filter_type: str='exact',
|
||||
after: Optional[float]=None, before: Optional[float]=None) -> Iterable[Link]:
|
||||
|
||||
all_links, _ = load_links_index(out_dir=OUTPUT_DIR)
|
||||
|
||||
pattern = re.compile(filter_regex, re.IGNORECASE) if filter_regex else None
|
||||
|
||||
for link in all_links:
|
||||
if pattern and not pattern.match(link.url):
|
||||
continue
|
||||
if after is not None and float(link.timestamp) < after:
|
||||
continue
|
||||
if before is not None and float(link.timestamp) > before:
|
||||
continue
|
||||
|
||||
yield link
|
||||
|
||||
if filter_patterns:
|
||||
if link_matches_filter(link, filter_patterns, filter_type):
|
||||
yield link
|
||||
else:
|
||||
yield link
|
||||
|
||||
|
||||
def csv_format(link: Link, csv_cols: str) -> str:
|
||||
return ','.join(json.dumps(getattr(link, col), cls=ExtendedEncoder) for col in csv_cols.split(','))
|
||||
@enforce_types
|
||||
def remove_archive_links(filter_patterns: List[str], filter_type: str='exact',
|
||||
after: Optional[float]=None, before: Optional[float]=None,
|
||||
yes: bool=False, delete: bool=False):
|
||||
|
||||
check_dependencies()
|
||||
|
||||
print('[*] Finding links in the archive index matching these {} patterns:'.format(filter_type))
|
||||
print(' {}'.format(' '.join(filter_patterns)))
|
||||
timer = TimedProgress(360, prefix=' ')
|
||||
try:
|
||||
links = list(list_archive_data(
|
||||
filter_patterns=filter_patterns,
|
||||
filter_type=filter_type,
|
||||
after=after,
|
||||
before=before,
|
||||
))
|
||||
finally:
|
||||
timer.end()
|
||||
if not len(links):
|
||||
print()
|
||||
print('{red}[X] No matching links found.{reset}'.format(**ANSI))
|
||||
raise SystemExit(1)
|
||||
|
||||
print()
|
||||
print('-------------------------------------------------------------------')
|
||||
print(to_csv(links, csv_cols=['link_dir', 'url', 'is_archived', 'num_outputs']))
|
||||
print('-------------------------------------------------------------------')
|
||||
print()
|
||||
if not yes:
|
||||
resp = input('{lightyellow}[?] Are you sure you want to permanently remove these {} archived links? N/y: {reset}'.format(len(links), **ANSI))
|
||||
|
||||
if not resp.lower() == 'y':
|
||||
raise SystemExit(0)
|
||||
|
||||
all_links, _ = load_links_index(out_dir=OUTPUT_DIR)
|
||||
to_keep = []
|
||||
|
||||
for link in all_links:
|
||||
should_remove = (
|
||||
(after is not None and float(link.timestamp) < after)
|
||||
or (before is not None and float(link.timestamp) > before)
|
||||
or link_matches_filter(link, filter_patterns, filter_type)
|
||||
)
|
||||
if not should_remove:
|
||||
to_keep.append(link)
|
||||
elif should_remove and delete:
|
||||
shutil.rmtree(link.link_dir)
|
||||
|
||||
num_removed = len(all_links) - len(to_keep)
|
||||
write_links_index(links=to_keep, out_dir=OUTPUT_DIR, finished=True)
|
||||
print()
|
||||
print('{red}[√] Removed {} out of {} links from the archive index.{reset}'.format(num_removed, len(all_links), **ANSI))
|
||||
print(' Index now contains {} links.'.format(len(to_keep)))
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue