mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-23 03:06:55 -04:00
Merge branch 'v0.5.0' of github.com:ArchiveBox/ArchiveBox into feat-snapshots-grid
This commit is contained in:
commit
7b66e1514d
32 changed files with 401 additions and 237 deletions
|
@ -10,11 +10,22 @@ CHOICES = (
|
|||
('1', 'depth = 1 (archive these URLs and all URLs one hop away)'),
|
||||
)
|
||||
|
||||
from ..extractors import get_default_archive_methods
|
||||
|
||||
ARCHIVE_METHODS = [
|
||||
(name, name)
|
||||
for name, _, _ in get_default_archive_methods()
|
||||
]
|
||||
|
||||
|
||||
class AddLinkForm(forms.Form):
|
||||
url = forms.RegexField(label="URLs (one per line)", regex=URL_REGEX, min_length='6', strip=True, widget=forms.Textarea, required=True)
|
||||
depth = forms.ChoiceField(label="Archive depth", choices=CHOICES, widget=forms.RadioSelect, initial='0')
|
||||
|
||||
|
||||
archive_methods = forms.MultipleChoiceField(
|
||||
required=False,
|
||||
widget=forms.SelectMultiple,
|
||||
choices=ARCHIVE_METHODS,
|
||||
)
|
||||
class TagWidgetMixin:
|
||||
def format_value(self, value):
|
||||
if value is not None and not isinstance(value, str):
|
||||
|
|
|
@ -9,6 +9,12 @@ import django.db.models.deletion
|
|||
from config import CONFIG
|
||||
from index.json import to_json
|
||||
|
||||
try:
|
||||
JSONField = models.JSONField
|
||||
except AttributeError:
|
||||
import jsonfield
|
||||
JSONField = jsonfield.JSONField
|
||||
|
||||
|
||||
def forwards_func(apps, schema_editor):
|
||||
from core.models import EXTRACTORS
|
||||
|
@ -76,7 +82,7 @@ class Migration(migrations.Migration):
|
|||
name='ArchiveResult',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('cmd', models.JSONField()),
|
||||
('cmd', JSONField()),
|
||||
('pwd', models.CharField(max_length=256)),
|
||||
('cmd_version', models.CharField(max_length=32)),
|
||||
('status', models.CharField(choices=[('succeeded', 'succeeded'), ('failed', 'failed'), ('skipped', 'skipped')], max_length=16)),
|
||||
|
|
|
@ -18,6 +18,12 @@ STATUS_CHOICES = [
|
|||
("skipped", "skipped")
|
||||
]
|
||||
|
||||
try:
|
||||
JSONField = models.JSONField
|
||||
except AttributeError:
|
||||
import jsonfield
|
||||
JSONField = jsonfield.JSONField
|
||||
|
||||
|
||||
class Tag(models.Model):
|
||||
"""
|
||||
|
@ -173,7 +179,7 @@ class ArchiveResultManager(models.Manager):
|
|||
|
||||
class ArchiveResult(models.Model):
|
||||
snapshot = models.ForeignKey(Snapshot, on_delete=models.CASCADE)
|
||||
cmd = models.JSONField()
|
||||
cmd = JSONField()
|
||||
pwd = models.CharField(max_length=256)
|
||||
cmd_version = models.CharField(max_length=32)
|
||||
output = models.CharField(max_length=512)
|
||||
|
|
|
@ -12,6 +12,7 @@ from ..config import (
|
|||
ALLOWED_HOSTS,
|
||||
PACKAGE_DIR,
|
||||
ACTIVE_THEME,
|
||||
TEMPLATES_DIR_NAME,
|
||||
SQL_INDEX_FILENAME,
|
||||
OUTPUT_DIR,
|
||||
)
|
||||
|
@ -68,14 +69,14 @@ AUTHENTICATION_BACKENDS = [
|
|||
STATIC_URL = '/static/'
|
||||
|
||||
STATICFILES_DIRS = [
|
||||
str(Path(PACKAGE_DIR) / 'themes' / ACTIVE_THEME / 'static'),
|
||||
str(Path(PACKAGE_DIR) / 'themes' / 'default' / 'static'),
|
||||
str(Path(PACKAGE_DIR) / TEMPLATES_DIR_NAME / ACTIVE_THEME / 'static'),
|
||||
str(Path(PACKAGE_DIR) / TEMPLATES_DIR_NAME / 'default' / 'static'),
|
||||
]
|
||||
|
||||
TEMPLATE_DIRS = [
|
||||
str(Path(PACKAGE_DIR) / 'themes' / ACTIVE_THEME),
|
||||
str(Path(PACKAGE_DIR) / 'themes' / 'default'),
|
||||
str(Path(PACKAGE_DIR) / 'themes'),
|
||||
str(Path(PACKAGE_DIR) / TEMPLATES_DIR_NAME / ACTIVE_THEME),
|
||||
str(Path(PACKAGE_DIR) / TEMPLATES_DIR_NAME / 'default'),
|
||||
str(Path(PACKAGE_DIR) / TEMPLATES_DIR_NAME),
|
||||
]
|
||||
|
||||
TEMPLATES = [
|
||||
|
|
|
@ -150,12 +150,15 @@ class AddView(UserPassesTestMixin, FormView):
|
|||
url = form.cleaned_data["url"]
|
||||
print(f'[+] Adding URL: {url}')
|
||||
depth = 0 if form.cleaned_data["depth"] == "0" else 1
|
||||
extractors = ','.join(form.cleaned_data["archive_methods"])
|
||||
input_kwargs = {
|
||||
"urls": url,
|
||||
"depth": depth,
|
||||
"update_all": False,
|
||||
"out_dir": OUTPUT_DIR,
|
||||
}
|
||||
if extractors:
|
||||
input_kwargs.update({"extractors": extractors})
|
||||
add_stdout = StringIO()
|
||||
with redirect_stdout(add_stdout):
|
||||
add(**input_kwargs)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue