mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-15 07:34:27 -04:00
Merge branch 'dev' into link-removal2
This commit is contained in:
commit
1fe95474c2
52 changed files with 896 additions and 550 deletions
12
.github/CONTRIBUTING.md
vendored
12
.github/CONTRIBUTING.md
vendored
|
@ -9,15 +9,15 @@
|
||||||
|
|
||||||
**Useful links:**
|
**Useful links:**
|
||||||
|
|
||||||
- https://github.com/pirate/ArchiveBox/issues
|
- https://github.com/ArchiveBox/ArchiveBox/issues
|
||||||
- https://github.com/pirate/ArchiveBox/pulls
|
- https://github.com/ArchiveBox/ArchiveBox/pulls
|
||||||
- https://github.com/pirate/ArchiveBox/wiki/Roadmap
|
- https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap
|
||||||
- https://github.com/pirate/ArchiveBox/wiki/Install#manual-setup
|
- https://github.com/ArchiveBox/ArchiveBox/wiki/Install#manual-setup
|
||||||
|
|
||||||
### Development Setup
|
### Development Setup
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/pirate/ArchiveBox
|
git clone https://github.com/ArchiveBox/ArchiveBox
|
||||||
cd ArchiveBox
|
cd ArchiveBox
|
||||||
# Ideally do this in a virtualenv
|
# Ideally do this in a virtualenv
|
||||||
pip install -e '.[dev]' # or use: pipenv install --dev
|
pip install -e '.[dev]' # or use: pipenv install --dev
|
||||||
|
@ -31,6 +31,8 @@ pip install -e '.[dev]' # or use: pipenv install --dev
|
||||||
./bin/build.sh
|
./bin/build.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
|
For more common tasks see the `Development` section at the bottom of the README.
|
||||||
|
|
||||||
### Getting Help
|
### Getting Help
|
||||||
|
|
||||||
Open issues on Github or message me https://sweeting.me/#contact.
|
Open issues on Github or message me https://sweeting.me/#contact.
|
||||||
|
|
15
.github/ISSUE_TEMPLATE/bug_report.md
vendored
15
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -10,7 +10,8 @@ assignees: ''
|
||||||
<!--
|
<!--
|
||||||
Please fill out the following information,
|
Please fill out the following information,
|
||||||
feel free to delete sections if they're not applicable
|
feel free to delete sections if they're not applicable
|
||||||
or if long issue templates annoy you :)
|
or if long issue templates annoy you.
|
||||||
|
(the only required section is the version information)
|
||||||
-->
|
-->
|
||||||
|
|
||||||
#### Describe the bug
|
#### Describe the bug
|
||||||
|
@ -35,9 +36,11 @@ If applicable, post any relevant screenshots or copy/pasted terminal output from
|
||||||
If you're reporting a parsing / importing error, **you must paste a copy of your redacted import file here**.
|
If you're reporting a parsing / importing error, **you must paste a copy of your redacted import file here**.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
#### Software versions
|
#### ArchiveBox version
|
||||||
|
|
||||||
- OS: ([e.g. macOS 10.14] the operating system you're running ArchiveBox on)
|
<!-- Run the `archivebox version` command locally then copy paste the result here: -->
|
||||||
- ArchiveBox version: (`git rev-parse HEAD | head -c7` [e.g. d798117] commit ID of the version you're running)
|
```logs
|
||||||
- Python version: (`python3 --version` [e.g. 3.7.0])
|
replace this line with the *full*, unshortened output of running `archivebox version`
|
||||||
- Chrome version: (`chromium-browser --version` [e.g. 73.1.2.3] if relevant to bug)
|
```
|
||||||
|
<!-- Tickets without full version info will closed until it is provided,
|
||||||
|
we need the full output here to help you solve your issue -->
|
||||||
|
|
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -45,6 +45,6 @@ workarounds, or other software you've considered using to fix the problem.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
- [ ] I'm willing to contribute dev time / money to fix this issue
|
- [ ] I'm willing to contribute [dev time](https://github.com/ArchiveBox/ArchiveBox#archivebox-development) / [money](https://github.com/sponsors/pirate) to fix this issue
|
||||||
- [ ] I like ArchiveBox so far / would recommend it to a friend
|
- [ ] I like ArchiveBox so far / would recommend it to a friend
|
||||||
- [ ] I've had a lot of difficulty getting ArchiveBox set up
|
- [ ] I've had a lot of difficulty getting ArchiveBox set up
|
||||||
|
|
32
.github/workflows/codeql-analysis.yml
vendored
Normal file
32
.github/workflows/codeql-analysis.yml
vendored
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ dev ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ dev ]
|
||||||
|
schedule:
|
||||||
|
- cron: '43 1 * * 2'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'python' ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v1
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
queries: security-extended
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v1
|
5
.github/workflows/debian.yml
vendored
5
.github/workflows/debian.yml
vendored
|
@ -19,7 +19,8 @@ jobs:
|
||||||
|
|
||||||
- name: Install packaging dependencies
|
- name: Install packaging dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt install -y \
|
sudo apt-get update -qq
|
||||||
|
sudo apt-get install -y \
|
||||||
python3 python3-dev python3-pip python3-venv python3-all \
|
python3 python3-dev python3-pip python3-venv python3-all \
|
||||||
dh-python debhelper devscripts dput software-properties-common \
|
dh-python debhelper devscripts dput software-properties-common \
|
||||||
python3-distutils python3-setuptools python3-wheel python3-stdeb
|
python3-distutils python3-setuptools python3-wheel python3-stdeb
|
||||||
|
@ -36,7 +37,7 @@ jobs:
|
||||||
- name: Install archivebox from deb
|
- name: Install archivebox from deb
|
||||||
run: |
|
run: |
|
||||||
cd deb_dist/
|
cd deb_dist/
|
||||||
sudo apt install ./archivebox*.deb
|
sudo apt-get install ./archivebox*.deb
|
||||||
|
|
||||||
- name: Check ArchiveBox version
|
- name: Check ArchiveBox version
|
||||||
run: |
|
run: |
|
||||||
|
|
39
.github/workflows/docker.yml
vendored
39
.github/workflows/docker.yml
vendored
|
@ -1,9 +1,7 @@
|
||||||
name: Build Docker image
|
name: Build Docker image
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
on: workflow_dispatch
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
release:
|
release:
|
||||||
types:
|
types:
|
||||||
- created
|
- created
|
||||||
|
@ -16,12 +14,6 @@ jobs:
|
||||||
buildx:
|
buildx:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Docker Login
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
|
@ -51,6 +43,23 @@ jobs:
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
|
|
||||||
|
- name: Docker Login
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Collect Docker tags
|
||||||
|
id: docker_meta
|
||||||
|
uses: crazy-max/ghaction-docker-meta@v1
|
||||||
|
with:
|
||||||
|
images: archivebox/archivebox,nikisweeting/archivebox
|
||||||
|
tag-sha: true
|
||||||
|
tag-semver: |
|
||||||
|
{{version}}
|
||||||
|
{{major}}.{{minor}}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: docker_build
|
id: docker_build
|
||||||
|
@ -59,15 +68,11 @@ jobs:
|
||||||
context: ./
|
context: ./
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
builder: ${{ steps.buildx.outputs.name }}
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
push: true
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: |
|
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||||
${{ secrets.DOCKER_USERNAME }}/archivebox:latest
|
|
||||||
${{ secrets.DOCKER_USERNAME }}/archivebox:${{ github.sha }}
|
|
||||||
archivebox/archivebox:latest
|
|
||||||
archivebox/archivebox:${{ github.sha }}
|
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
platforms: linux/amd64,linux/386,linux/arm64,linux/arm/v7
|
||||||
|
|
||||||
- name: Image digest
|
- name: Image digest
|
||||||
run: echo ${{ steps.docker_build.outputs.digest }}
|
run: echo ${{ steps.docker_build.outputs.digest }}
|
||||||
|
|
2
.github/workflows/pip.yml
vendored
2
.github/workflows/pip.yml
vendored
|
@ -1,4 +1,4 @@
|
||||||
name: Build pip package
|
name: Build Pip package
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
423
README.md
423
README.md
|
@ -1,6 +1,6 @@
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<em><img src="https://i.imgur.com/5B48E3N.png" height="90px"></em>
|
<em><img src="https://i.imgur.com/5B48E3N.png" height="90px"></em>
|
||||||
<h1>ArchiveBox<br/><sub>The open-source self-hosted web archive.</sub></h1>
|
<h1>ArchiveBox<br/><sub>Open-source self-hosted web archiving.</sub></h1>
|
||||||
|
|
||||||
▶️ <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart">Quickstart</a> |
|
▶️ <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart">Quickstart</a> |
|
||||||
<a href="https://archivebox.zervice.io/">Demo</a> |
|
<a href="https://archivebox.zervice.io/">Demo</a> |
|
||||||
|
@ -17,96 +17,145 @@
|
||||||
<!--<a href="http://webchat.freenode.net?channels=ArchiveBox&uio=d4"><img src="https://img.shields.io/badge/Community_chat-IRC-%2328A745.svg"/></a>-->
|
<!--<a href="http://webchat.freenode.net?channels=ArchiveBox&uio=d4"><img src="https://img.shields.io/badge/Community_chat-IRC-%2328A745.svg"/></a>-->
|
||||||
|
|
||||||
<a href="https://github.com/ArchiveBox/ArchiveBox/blob/master/LICENSE"><img src="https://img.shields.io/badge/Open_source-MIT-green.svg?logo=git&logoColor=green"/></a>
|
<a href="https://github.com/ArchiveBox/ArchiveBox/blob/master/LICENSE"><img src="https://img.shields.io/badge/Open_source-MIT-green.svg?logo=git&logoColor=green"/></a>
|
||||||
<a href="https://github.com/ArchiveBox/ArchiveBox/commits/dev"><img src="https://img.shields.io/github/last-commit/ArchiveBox/ArchiveBox.svg?logo=Sublime+Text&logoColor=green&label=Active"/></a>
|
|
||||||
<a href="https://github.com/ArchiveBox/ArchiveBox"><img src="https://img.shields.io/github/stars/ArchiveBox/ArchiveBox.svg?logo=github&label=Stars&logoColor=blue"/></a>
|
<a href="https://github.com/ArchiveBox/ArchiveBox"><img src="https://img.shields.io/github/stars/ArchiveBox/ArchiveBox.svg?logo=github&label=Stars&logoColor=blue"/></a>
|
||||||
<a href="https://test.pypi.org/project/archivebox/"><img src="https://img.shields.io/badge/Python-%3E%3D3.7-yellow.svg?logo=python&logoColor=yellow"/></a>
|
<a href="https://test.pypi.org/project/archivebox/"><img src="https://img.shields.io/badge/Python-%3E%3D3.7-yellow.svg?logo=python&logoColor=yellow"/></a>
|
||||||
<a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Install#dependencies"><img src="https://img.shields.io/badge/Chromium-%3E%3D59-orange.svg?logo=Google+Chrome&logoColor=orange"/></a>
|
<a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Install#dependencies"><img src="https://img.shields.io/badge/Chromium-%3E%3D59-orange.svg?logo=Google+Chrome&logoColor=orange"/></a>
|
||||||
<a href="https://hub.docker.com/r/archivebox/archivebox"><img src="https://img.shields.io/badge/Docker-all%20platforms-lightblue.svg?logo=docker&logoColor=lightblue"/></a>
|
<a href="https://hub.docker.com/r/archivebox/archivebox"><img src="https://img.shields.io/badge/Docker-all%20platforms-lightblue.svg?logo=docker&logoColor=lightblue"/></a><br/>
|
||||||
|
<a href="https://github.com/ArchiveBox/ArchiveBox/commits/dev"><img src="https://img.shields.io/github/last-commit/ArchiveBox/ArchiveBox.svg?logo=Sublime+Text&logoColor=green&label=active"/></a>
|
||||||
|
<a href="https://lgtm.com/projects/g/ArchiveBox/ArchiveBox/context:python"><img alt="Language grade: Python" src="https://img.shields.io/lgtm/grade/python/g/ArchiveBox/ArchiveBox.svg?logo=lgtm&logoWidth=18"/></a>
|
||||||
|
<a href="https://lgtm.com/projects/g/ArchiveBox/ArchiveBox/context:javascript"><img alt="Language grade: JavaScript" src="https://img.shields.io/lgtm/grade/javascript/g/ArchiveBox/ArchiveBox.svg?logo=lgtm&logoWidth=18"/></a>
|
||||||
|
<a href="https://lgtm.com/projects/g/ArchiveBox/ArchiveBox/alerts/"><img alt="Total alerts" src="https://img.shields.io/lgtm/alerts/g/ArchiveBox/ArchiveBox.svg?logo=lgtm&logoWidth=18"/></a>
|
||||||
|
|
||||||
|
|
||||||
<hr/>
|
<hr/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
ArchiveBox is a powerful self-hosted internet archiving solution written in Python 3. You feed it URLs of pages you want to archive, and it saves them to disk in a variety of formats depending on the configuration and the content it detects.
|
ArchiveBox is a powerful self-hosted internet archiving solution written in Python. You feed it URLs of pages you want to archive, and it saves them to disk in a variety of formats depending on setup and content within.
|
||||||
|
|
||||||
Your archive can be managed through the command line with commands like `archivebox add`, through the built-in Web UI `archivebox server`, or via the Python library API (beta). It can ingest bookmarks from a browser or service like Pocket/Pinboard, your entire browsing history, RSS feeds, or URLs one at a time. You can also schedule regular/realtime imports with `archivebox schedule`.
|
**🔢 Run ArchiveBox via [Docker Compose (recommended)](#Quickstart), Docker, Apt, Brew, or Pip ([see below](#Quickstart)).**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
apt/brew/pip3 install archivebox
|
||||||
|
|
||||||
|
archivebox init # run this in an empty folder
|
||||||
|
archivebox add 'https://example.com' # start adding URLs to archive
|
||||||
|
curl https://example.com/rss.xml | archivebox add # or add via stdin
|
||||||
|
archivebox schedule --every=day https://example.com/rss.xml
|
||||||
|
```
|
||||||
|
|
||||||
|
For each URL added, ArchiveBox saves several types of HTML snapshot (wget, Chrome headless, singlefile), a PDF, a screenshot, a WARC archive, any git repositories, images, audio, video, subtitles, article text, [and more...](#output-formats).
|
||||||
|
|
||||||
|
```bash
|
||||||
|
archivebox server --createsuperuser 0.0.0.0:8000 # use the interactive web UI
|
||||||
|
archivebox list 'https://example.com' # use the CLI commands (--help for more)
|
||||||
|
ls ./archive/*/index.json # or browse directly via the filesystem
|
||||||
|
```
|
||||||
|
|
||||||
|
You can then manage your snapshots via the [filesystem](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#disk-layout), [CLI](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage), [Web UI](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#UI-Usage), [SQLite DB](https://github.com/ArchiveBox/ArchiveBox/blob/dev/archivebox/core/models.py) (`./index.sqlite3`), [Python API](https://docs.archivebox.io/en/latest/modules.html) (alpha), [REST API](https://github.com/ArchiveBox/ArchiveBox/issues/496) (alpha), or [desktop app](https://github.com/ArchiveBox/electron-archivebox) (alpha).
|
||||||
|
|
||||||
|
At the end of the day, the goal is to sleep soundly knowing that the part of the internet you care about will be automatically preserved in multiple, durable long-term formats that will be accessible for decades (or longer).
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<br/><br/>
|
||||||
|
<img src="https://i.imgur.com/PAzXZE8.png" height="70px" alt="bookshelf graphic"> <img src="https://i.imgur.com/asPNk8n.png" height="75px" alt="logo" align="top"/> <img src="https://i.imgur.com/PAzXZE8.png" height="70px" alt="bookshelf graphic">
|
||||||
|
<br/><br/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
#### ⚡️ CLI Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# archivebox [subcommand] [--args]
|
||||||
|
archivebox --version
|
||||||
|
archivebox help
|
||||||
|
```
|
||||||
|
|
||||||
|
- `archivebox init/version/status/config/manage` to administer your collection
|
||||||
|
- `archivebox add/remove/update/list` to manage Snapshots in the archive
|
||||||
|
- `archivebox schedule` to pull in fresh URLs in regularly from [boorkmarks/history/Pocket/Pinboard/RSS/etc.](#input-formats)
|
||||||
|
- `archivebox oneshot` archive single URLs without starting a whole collection
|
||||||
|
- `archivebox shell` open a REPL to use the [Python API](https://docs.archivebox.io/en/latest/modules.html) (alpha)
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<br/>
|
||||||
|
<sup><a href="https://archivebox.zervice.io/">Demo</a> | <a href="#screenshots">Screenshots</a> | <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Usage">Usage</a></sup>
|
||||||
|
<br/>
|
||||||
|
<sub>. . . . . . . . . . . . . . . . . . . . . . . . . . . .</sub>
|
||||||
|
<br/><br/>
|
||||||
|
<img src="https://i.imgur.com/njxgSbl.png" width="22%" alt="cli init screenshot" align="top">
|
||||||
|
<img src="https://i.imgur.com/p6wK6KM.png" width="22%" alt="server snapshot admin screenshot" align="top">
|
||||||
|
<img src="https://i.imgur.com/RefWsXB.jpg" width="28.6%" alt="server snapshot details page screenshot" align="top"/>
|
||||||
|
<br/>
|
||||||
|
<br/>
|
||||||
|
<img src="https://i.imgur.com/T2UAGUD.png" width="49%" alt="grass"/><img src="https://i.imgur.com/T2UAGUD.png" width="49%" alt="grass"/>
|
||||||
|
</div>
|
||||||
|
|
||||||
The main index is a self-contained `index.sqlite3` file, and each snapshot is stored as a folder `data/archive/<timestamp>/`, with an easy-to-read `index.html` and `index.json` within. For each page, ArchiveBox auto-extracts many types of assets/media and saves them in standard formats, with out-of-the-box support for: several types of HTML snapshots (wget, Chrome headless, singlefile), PDF snapshotting, screenshotting, WARC archiving, git repositories, images, audio, video, subtitles, article text, and more. The snapshots are browseable and managable offline through the filesystem, the built-in webserver, or the Python library API.
|
|
||||||
|
|
||||||
### Quickstart
|
### Quickstart
|
||||||
|
|
||||||
It works on Linux/BSD (Intel and ARM CPUs with `docker`/`apt`/`pip3`), macOS (with `docker`/`brew`/`pip3`), and Windows (beta with `docker`/`pip3`).
|
**🖥 Supported OSs:** Linux/BSD, macOS, Windows **🎮 CPU Architectures:** x86, amd64, arm7, arm8 (raspi >=3)
|
||||||
|
**📦 Distributions:** `docker`/`apt`/`brew`/`pip3`/`npm` (in order of completeness)
|
||||||
|
|
||||||
```bash
|
*(click to expand your preferred **► `distribution`** below for full setup instructions)*
|
||||||
pip3 install archivebox
|
|
||||||
archivebox --version
|
|
||||||
# install extras as-needed, or use one of full setup methods below to get everything out-of-the-box
|
|
||||||
|
|
||||||
mkdir ~/archivebox && cd ~/archivebox # this can be anywhere
|
|
||||||
archivebox init
|
|
||||||
|
|
||||||
archivebox add 'https://example.com'
|
|
||||||
archivebox add --depth=1 'https://example.com'
|
|
||||||
archivebox schedule --every=day https://getpocket.com/users/USERNAME/feed/all
|
|
||||||
archivebox oneshot --extract=title,favicon,media https://www.youtube.com/watch?v=dQw4w9WgXcQ
|
|
||||||
archivebox help # to see more options
|
|
||||||
```
|
|
||||||
|
|
||||||
*(click to expand the sections below for full setup instructions)*
|
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary><b>Get ArchiveBox with <code>docker-compose</code> on any platform (recommended, everything included out-of-the-box)</b></summary>
|
<summary><b>Get ArchiveBox with <code>docker-compose</code> on any platform (recommended, everything included out-of-the-box)</b></summary>
|
||||||
|
|
||||||
First make sure you have Docker installed: https://docs.docker.com/get-docker/
|
<i>First make sure you have Docker installed: https://docs.docker.com/get-docker/</i>
|
||||||
<br/><br/>
|
|
||||||
This is the recommended way to run ArchiveBox because it includes *all* the extractors like chrome, wget, youtube-dl, git, etc., as well as full-text search with sonic, and many other great features.
|
|
||||||
|
|
||||||
```bash
|
<pre lang="bash"><code>
|
||||||
# create a new empty directory and initalize your collection (can be anywhere)
|
# create a new empty directory and initalize your collection (can be anywhere)
|
||||||
mkdir ~/archivebox && cd ~/archivebox
|
mkdir ~/archivebox && cd ~/archivebox
|
||||||
curl -O https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/master/docker-compose.yml
|
curl -O 'https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/master/docker-compose.yml'
|
||||||
docker-compose run archivebox init
|
docker-compose run archivebox init
|
||||||
docker-compose run archivebox --version
|
docker-compose run archivebox --version
|
||||||
|
|
||||||
# start the webserver and open the UI (optional)
|
# start the webserver and open the UI (optional)
|
||||||
docker-compose run archivebox manage createsuperuser
|
docker-compose run archivebox manage createsuperuser
|
||||||
docker-compose up -d
|
docker-compose up -d
|
||||||
open http://127.0.0.1:8000
|
open 'http://127.0.0.1:8000'
|
||||||
|
|
||||||
# you can also add links and manage your archive via the CLI:
|
# you can also add links and manage your archive via the CLI:
|
||||||
docker-compose run archivebox add 'https://example.com'
|
docker-compose run archivebox add 'https://example.com'
|
||||||
docker-compose run archivebox status
|
docker-compose run archivebox status
|
||||||
docker-compose run archivebox help # to see more options
|
docker-compose run archivebox help # to see more options
|
||||||
```
|
</code></pre>
|
||||||
|
|
||||||
|
This is the recommended way to run ArchiveBox because it includes <i>all</i> the extractors like:<br/>
|
||||||
|
chrome, wget, youtube-dl, git, etc., full-text search w/ sonic, and many other great features.
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary><b>Get ArchiveBox with <code>docker</code> on any platform</b></summary>
|
<summary><b>Get ArchiveBox with <code>docker</code> on any platform</b></summary>
|
||||||
|
|
||||||
First make sure you have Docker installed: https://docs.docker.com/get-docker/<br/>
|
<i>First make sure you have Docker installed: https://docs.docker.com/get-docker/</i>
|
||||||
```bash
|
|
||||||
|
<pre lang="bash"><code>
|
||||||
# create a new empty directory and initalize your collection (can be anywhere)
|
# create a new empty directory and initalize your collection (can be anywhere)
|
||||||
mkdir ~/archivebox && cd ~/archivebox
|
mkdir ~/archivebox && cd ~/archivebox
|
||||||
docker run -v $PWD:/data -it archivebox/archivebox init
|
docker run -v $PWD:/data -it archivebox/archivebox init
|
||||||
docker run -v $PWD:/data -it archivebox/archivebox --version
|
docker run -v $PWD:/data -it archivebox/archivebox --version
|
||||||
|
|
||||||
# start the webserver and open the UI (optional)
|
# start the webserver and open the UI (optional)
|
||||||
docker run -v $PWD:/data -it archivebox/archivebox manage createsuperuser
|
docker run -v $PWD:/data -it -p 8000:8000 archivebox/archivebox server --createsuperuser 0.0.0.0:8000
|
||||||
docker run -v $PWD:/data -p 8000:8000 archivebox/archivebox server 0.0.0.0:8000
|
|
||||||
open http://127.0.0.1:8000
|
open http://127.0.0.1:8000
|
||||||
|
|
||||||
# you can also add links and manage your archive via the CLI:
|
# you can also add links and manage your archive via the CLI:
|
||||||
docker run -v $PWD:/data -it archivebox/archivebox add 'https://example.com'
|
docker run -v $PWD:/data -it archivebox/archivebox add 'https://example.com'
|
||||||
docker run -v $PWD:/data -it archivebox/archivebox status
|
docker run -v $PWD:/data -it archivebox/archivebox status
|
||||||
docker run -v $PWD:/data -it archivebox/archivebox help # to see more options
|
docker run -v $PWD:/data -it archivebox/archivebox help # to see more options
|
||||||
```
|
</code></pre>
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary><b>Get ArchiveBox with <code>apt</code> on Ubuntu >=20.04</b></summary>
|
<summary><b>Get ArchiveBox with <code>apt</code> on Ubuntu >=20.04</b></summary>
|
||||||
|
|
||||||
```bash
|
<i>First make sure you're on Ubuntu >= 20.04, or scroll down for older/non-Ubuntu instructions.</i>
|
||||||
|
|
||||||
|
<pre lang="bash"><code>
|
||||||
|
# add the repo to your sources and install the archivebox package using apt
|
||||||
|
sudo apt install software-properties-common
|
||||||
sudo add-apt-repository -u ppa:archivebox/archivebox
|
sudo add-apt-repository -u ppa:archivebox/archivebox
|
||||||
sudo apt install archivebox
|
sudo apt install archivebox
|
||||||
|
|
||||||
|
@ -117,8 +166,7 @@ archivebox init
|
||||||
archivebox --version
|
archivebox --version
|
||||||
|
|
||||||
# start the webserver and open the web UI (optional)
|
# start the webserver and open the web UI (optional)
|
||||||
archivebox manage createsuperuser
|
archivebox server --createsuperuser 0.0.0.0:8000
|
||||||
archivebox server 0.0.0.0:8000
|
|
||||||
open http://127.0.0.1:8000
|
open http://127.0.0.1:8000
|
||||||
|
|
||||||
# you can also add URLs and manage the archive via the CLI and filesystem:
|
# you can also add URLs and manage the archive via the CLI and filesystem:
|
||||||
|
@ -127,13 +175,17 @@ archivebox status
|
||||||
archivebox list --html --with-headers > index.html
|
archivebox list --html --with-headers > index.html
|
||||||
archivebox list --json --with-headers > index.json
|
archivebox list --json --with-headers > index.json
|
||||||
archivebox help # to see more options
|
archivebox help # to see more options
|
||||||
```
|
</code></pre>
|
||||||
|
|
||||||
For other Debian-based systems or older Ubuntu systems you can add these sources to `/etc/apt/sources.list`:
|
For other Debian-based systems or older Ubuntu systems you can add these sources to `/etc/apt/sources.list`:
|
||||||
```bash
|
|
||||||
|
<pre lang="bash"><code>
|
||||||
deb http://ppa.launchpad.net/archivebox/archivebox/ubuntu focal main
|
deb http://ppa.launchpad.net/archivebox/archivebox/ubuntu focal main
|
||||||
deb-src http://ppa.launchpad.net/archivebox/archivebox/ubuntu focal main
|
deb-src http://ppa.launchpad.net/archivebox/archivebox/ubuntu focal main
|
||||||
```
|
</code></pre>
|
||||||
|
|
||||||
|
Then run `apt update; apt install archivebox; archivebox --version`.
|
||||||
|
|
||||||
(you may need to install some other dependencies manually however)
|
(you may need to install some other dependencies manually however)
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
@ -141,7 +193,10 @@ deb-src http://ppa.launchpad.net/archivebox/archivebox/ubuntu focal main
|
||||||
<details>
|
<details>
|
||||||
<summary><b>Get ArchiveBox with <code>brew</code> on macOS >=10.13</b></summary>
|
<summary><b>Get ArchiveBox with <code>brew</code> on macOS >=10.13</b></summary>
|
||||||
|
|
||||||
```bash
|
<i>First make sure you have Homebrew installed: https://brew.sh/#install</i>
|
||||||
|
|
||||||
|
<pre lang="bash"><code>
|
||||||
|
# install the archivebox package using homebrew
|
||||||
brew install archivebox/archivebox/archivebox
|
brew install archivebox/archivebox/archivebox
|
||||||
|
|
||||||
# create a new empty directory and initalize your collection (can be anywhere)
|
# create a new empty directory and initalize your collection (can be anywhere)
|
||||||
|
@ -151,8 +206,7 @@ archivebox init
|
||||||
archivebox --version
|
archivebox --version
|
||||||
|
|
||||||
# start the webserver and open the web UI (optional)
|
# start the webserver and open the web UI (optional)
|
||||||
archivebox manage createsuperuser
|
archivebox server --createsuperuser 0.0.0.0:8000
|
||||||
archivebox server 0.0.0.0:8000
|
|
||||||
open http://127.0.0.1:8000
|
open http://127.0.0.1:8000
|
||||||
|
|
||||||
# you can also add URLs and manage the archive via the CLI and filesystem:
|
# you can also add URLs and manage the archive via the CLI and filesystem:
|
||||||
|
@ -161,14 +215,17 @@ archivebox status
|
||||||
archivebox list --html --with-headers > index.html
|
archivebox list --html --with-headers > index.html
|
||||||
archivebox list --json --with-headers > index.json
|
archivebox list --json --with-headers > index.json
|
||||||
archivebox help # to see more options
|
archivebox help # to see more options
|
||||||
```
|
</code></pre>
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary><b>Get ArchiveBox with <code>pip</code> on any platform</b></summary>
|
<summary><b>Get ArchiveBox with <code>pip</code> on any platform</b></summary>
|
||||||
|
|
||||||
```bash
|
<i>First make sure you have Python >= 3.7 installed: https://realpython.com/installing-python/</i>
|
||||||
|
|
||||||
|
<pre lang="bash"><code>
|
||||||
|
# install the archivebox package using pip3
|
||||||
pip3 install archivebox
|
pip3 install archivebox
|
||||||
|
|
||||||
# create a new empty directory and initalize your collection (can be anywhere)
|
# create a new empty directory and initalize your collection (can be anywhere)
|
||||||
|
@ -179,8 +236,7 @@ archivebox --version
|
||||||
# Install any missing extras like wget/git/chrome/etc. manually as needed
|
# Install any missing extras like wget/git/chrome/etc. manually as needed
|
||||||
|
|
||||||
# start the webserver and open the web UI (optional)
|
# start the webserver and open the web UI (optional)
|
||||||
archivebox manage createsuperuser
|
archivebox server --createsuperuser 0.0.0.0:8000
|
||||||
archivebox server 0.0.0.0:8000
|
|
||||||
open http://127.0.0.1:8000
|
open http://127.0.0.1:8000
|
||||||
|
|
||||||
# you can also add URLs and manage the archive via the CLI and filesystem:
|
# you can also add URLs and manage the archive via the CLI and filesystem:
|
||||||
|
@ -189,56 +245,58 @@ archivebox status
|
||||||
archivebox list --html --with-headers > index.html
|
archivebox list --html --with-headers > index.html
|
||||||
archivebox list --json --with-headers > index.json
|
archivebox list --json --with-headers > index.json
|
||||||
archivebox help # to see more options
|
archivebox help # to see more options
|
||||||
```
|
</code></pre>
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<div align="center">
|
|
||||||
<img src="https://i.imgur.com/lUuicew.png" width="400px">
|
|
||||||
<br/>
|
|
||||||
|
|
||||||
<a href="https://archivebox.zervice.io">DEMO: archivebox.zervice.io/</a>
|
No matter which install method you choose, they all roughly follow this 3-step process and all provide the same CLI, Web UI, and on-disk data format.
|
||||||
For more information, see the <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart">full Quickstart guide</a>, <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Usage">Usage</a>, and <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration">Configuration</a> docs.
|
|
||||||
|
<small>
|
||||||
|
|
||||||
|
1. Install ArchiveBox: `apt/brew/pip3 install archivebox`
|
||||||
|
2. Start a collection: `archivebox init`
|
||||||
|
3. Start archiving: `archivebox add 'https://example.com'`
|
||||||
|
|
||||||
|
</small>
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
<div align="center">
|
||||||
|
<img src="https://i.imgur.com/6AmOGJT.png" width="49%" alt="grass"/><img src="https://i.imgur.com/6AmOGJT.png" width="49%" alt="grass"/>
|
||||||
</div>
|
</div>
|
||||||
|
<br/>
|
||||||
---
|
|
||||||
|
|
||||||
|
|
||||||
# Overview
|
|
||||||
|
|
||||||
ArchiveBox is a command line tool, self-hostable web-archiving server, and Python library all-in-one. It can be installed on Docker, macOS, and Linux/BSD, and Windows. You can download and install it as a Debian/Ubuntu package, Homebrew package, Python3 package, or a Docker image. No matter which install method you choose, they all provide the same CLI, Web UI, and on-disk data format.
|
|
||||||
|
|
||||||
To use ArchiveBox you start by creating a folder for your data to live in (it can be anywhere on your system), and running `archivebox init` inside of it. That will create a sqlite3 index and an `ArchiveBox.conf` file. After that, you can continue to add/export/manage/etc using the CLI `archivebox help`, or you can run the Web UI (recommended). If you only want to archive a single site, you can run `archivebox oneshot` to avoid having to create a whole collection.
|
|
||||||
|
|
||||||
The CLI is considered "stable", the ArchiveBox Python API and REST APIs are "beta", and the [desktop app](https://github.com/ArchiveBox/desktop) is "alpha".
|
|
||||||
|
|
||||||
At the end of the day, the goal is to sleep soundly knowing that the part of the internet you care about will be automatically preserved in multiple, durable long-term formats that will be accessible for decades (or longer). You can also self-host your archivebox server on a public domain to provide archive.org-style public access to your site snapshots.
|
|
||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<img src="https://i.imgur.com/3tBL7PU.png" width="22%" alt="CLI Screenshot" align="top">
|
<img src="https://i.imgur.com/lUuicew.png" width="22.4%" align="top">
|
||||||
<img src="https://i.imgur.com/viklZNG.png" width="22%" alt="Desktop index screenshot" align="top">
|
<img src="https://i.imgur.com/p6wK6KM.png" width="35.9%" align="top">
|
||||||
<img src="https://i.imgur.com/RefWsXB.jpg" width="22%" alt="Desktop details page Screenshot"/>
|
<img src="https://i.imgur.com/pzq4uXq.png" width="29.7%" align="top">
|
||||||
<img src="https://i.imgur.com/M6HhzVx.png" width="22%" alt="Desktop details page Screenshot"/><br/>
|
<br/><br/>
|
||||||
<sup><a href="https://archive.sweeting.me/">Demo</a> | <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Usage">Usage</a></sup>
|
|
||||||
<br/>
|
|
||||||
<sub>. . . . . . . . . . . . . . . . . . . . . . . . . . . .</sub>
|
<sub>. . . . . . . . . . . . . . . . . . . . . . . . . . . .</sub>
|
||||||
</div><br/>
|
<br/><br/>
|
||||||
|
<a href="https://archivebox.zervice.io">DEMO: <code>https://archivebox.zervice.io</code></a><br/>
|
||||||
|
<a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart">Quickstart</a> | <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Usage">Usage</a> | <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration">Configuration</a>
|
||||||
|
<br/>
|
||||||
|
</div>
|
||||||
|
|
||||||
## Key Features
|
## Key Features
|
||||||
|
|
||||||
- [**Free & open source**](https://github.com/ArchiveBox/ArchiveBox/blob/master/LICENSE), doesn't require signing up for anything, stores all data locally
|
- [**Free & open source**](https://github.com/ArchiveBox/ArchiveBox/blob/master/LICENSE), doesn't require signing up for anything, stores all data locally
|
||||||
- [**Few dependencies**](https://github.com/ArchiveBox/ArchiveBox/wiki/Install#dependencies) and [simple command line interface](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage)
|
- [**Powerful, intuitive command line interface**](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage) with [modular optional dependencies](#dependencies)
|
||||||
- [**Comprehensive documentation**](https://github.com/ArchiveBox/ArchiveBox/wiki), [active development](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap), and [rich community](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community)
|
- [**Comprehensive documentation**](https://github.com/ArchiveBox/ArchiveBox/wiki), [active development](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap), and [rich community](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community)
|
||||||
- Easy to set up **[scheduled importing](https://github.com/ArchiveBox/ArchiveBox/wiki/Scheduled-Archiving) from multiple sources**
|
- [**Extracts a wide variety of content out-of-the-box**](https://github.com/ArchiveBox/ArchiveBox/issues/51): [media (youtube-dl), articles (readability), code (git), etc.](#output-formats)
|
||||||
- Uses common, **durable, [long-term formats](#saves-lots-of-useful-stuff-for-each-imported-link)** like HTML, JSON, PDF, PNG, and WARC
|
- [**Supports scheduled/realtime importing**](https://github.com/ArchiveBox/ArchiveBox/wiki/Scheduled-Archiving) from [many types of sources](#input-formats)
|
||||||
- ~~**Suitable for paywalled / [authenticated content](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#chrome_user_data_dir)** (can use your cookies)~~ (do not do this until v0.5 is released with some security fixes)
|
- [**Uses standard, durable, long-term formats**](#saves-lots-of-useful-stuff-for-each-imported-link) like HTML, JSON, PDF, PNG, and WARC
|
||||||
- **Doesn't require a constantly-running daemon**, proxy, or native app
|
- [**Usable as a oneshot CLI**](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage), [**self-hosted web UI**](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#UI-Usage), [Python API](https://docs.archivebox.io/en/latest/modules.html) (BETA), [REST API](https://github.com/ArchiveBox/ArchiveBox/issues/496) (ALPHA), or [desktop app](https://github.com/ArchiveBox/electron-archivebox) (ALPHA)
|
||||||
- Provides a CLI, Python API, self-hosted web UI, and REST API (WIP)
|
- [**Saves all pages to archive.org as well**](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#submit_archive_dot_org) by default for redundancy (can be [disabled](https://github.com/ArchiveBox/ArchiveBox/wiki/Security-Overview#stealth-mode) for local-only mode)
|
||||||
- Architected to be able to run [**many varieties of scripts during archiving**](https://github.com/ArchiveBox/ArchiveBox/issues/51), e.g. to extract media, summarize articles, [scroll pages](https://github.com/ArchiveBox/ArchiveBox/issues/80), [close modals](https://github.com/ArchiveBox/ArchiveBox/issues/175), expand comment threads, etc.
|
- Planned: support for archiving [content requiring a login/paywall/cookies](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#chrome_user_data_dir) (working, but ill-advised until some pending fixes are released)
|
||||||
- Can also [**mirror content to 3rd-party archiving services**](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#submit_archive_dot_org) automatically for redundancy
|
- Planned: support for running [JS scripts during archiving](https://github.com/ArchiveBox/ArchiveBox/issues/51), e.g. adblock, [autoscroll](https://github.com/ArchiveBox/ArchiveBox/issues/80), [modal-hiding](https://github.com/ArchiveBox/ArchiveBox/issues/175), [thread-expander](https://github.com/ArchiveBox/ArchiveBox/issues/345), etc.
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<img src="https://i.imgur.com/OUmgdlH.png" width="96%" alt="lego">
|
||||||
|
</div>
|
||||||
|
|
||||||
## Input formats
|
## Input formats
|
||||||
|
|
||||||
|
@ -253,9 +311,10 @@ archivebox add --depth=1 'https://example.com/some/downloads.html'
|
||||||
archivebox add --depth=1 'https://news.ycombinator.com#2020-12-12'
|
archivebox add --depth=1 'https://news.ycombinator.com#2020-12-12'
|
||||||
```
|
```
|
||||||
|
|
||||||
- <img src="https://nicksweeting.com/images/bookmarks.png" height="22px"/> Browser history or bookmarks exports (Chrome, Firefox, Safari, IE, Opera, and more)
|
|
||||||
- <img src="https://nicksweeting.com/images/rss.svg" height="22px"/> RSS, XML, JSON, CSV, SQL, HTML, Markdown, TXT, or any other text-based format
|
- <img src="https://nicksweeting.com/images/rss.svg" height="22px"/> TXT, RSS, XML, JSON, CSV, SQL, HTML, Markdown, or [any other text-based format...](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#Import-a-list-of-URLs-from-a-text-file)
|
||||||
- <img src="https://getpocket.com/favicon.ico" height="22px"/> Pocket, Pinboard, Instapaper, Shaarli, Delicious, Reddit Saved Posts, Wallabag, Unmark.it, OneTab, and more
|
- <img src="https://nicksweeting.com/images/bookmarks.png" height="22px"/> [Browser history](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart#2-get-your-list-of-urls-to-archive) or [browser bookmarks](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart#2-get-your-list-of-urls-to-archive) (see instructions for: [Chrome](https://support.google.com/chrome/answer/96816?hl=en), [Firefox](https://support.mozilla.org/en-US/kb/export-firefox-bookmarks-to-backup-or-transfer), [Safari](http://i.imgur.com/AtcvUZA.png), [IE](https://support.microsoft.com/en-us/help/211089/how-to-import-and-export-the-internet-explorer-favorites-folder-to-a-32-bit-version-of-windows), [Opera](http://help.opera.com/Windows/12.10/en/importexport.html), [and more...](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart#2-get-your-list-of-urls-to-archive))
|
||||||
|
- <img src="https://getpocket.com/favicon.ico" height="22px"/> [Pocket](https://getpocket.com/export), [Pinboard](https://pinboard.in/export/), [Instapaper](https://www.instapaper.com/user/export), [Shaarli](https://shaarli.readthedocs.io/en/master/Usage/#importexport), [Delicious](https://www.groovypost.com/howto/howto/export-delicious-bookmarks-xml/), [Reddit Saved](https://github.com/csu/export-saved-reddit), [Wallabag](https://doc.wallabag.org/en/user/import/wallabagv2.html), [Unmark.it](http://help.unmark.it/import-export), [OneTab](https://www.addictivetips.com/web/onetab-save-close-all-chrome-tabs-to-restore-export-or-import/), [and more...](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart#2-get-your-list-of-urls-to-archive)
|
||||||
|
|
||||||
See the [Usage: CLI](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage) page for documentation and examples.
|
See the [Usage: CLI](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage) page for documentation and examples.
|
||||||
|
|
||||||
|
@ -272,34 +331,51 @@ The on-disk layout is optimized to be easy to browse by hand and durable long-te
|
||||||
```
|
```
|
||||||
|
|
||||||
- **Index:** `index.html` & `index.json` HTML and JSON index files containing metadata and details
|
- **Index:** `index.html` & `index.json` HTML and JSON index files containing metadata and details
|
||||||
- **Title:** `title` title of the site
|
- **Title**, **Favicon**, **Headers** Response headers, site favicon, and parsed site title
|
||||||
- **Favicon:** `favicon.ico` favicon of the site
|
- **Wget Clone:** `example.com/page-name.html` wget clone of the site with `warc/<timestamp>.gz`
|
||||||
- **Headers:** `headers.json` Any HTTP headers the site returns are saved in a json file
|
- Chrome Headless
|
||||||
- **SingleFile:** `singlefile.html` HTML snapshot rendered with headless Chrome using SingleFile
|
- **SingleFile:** `singlefile.html` HTML snapshot rendered with headless Chrome using SingleFile
|
||||||
- **WGET Clone:** `example.com/page-name.html` wget clone of the site, with .html appended if not present
|
- **PDF:** `output.pdf` Printed PDF of site using headless chrome
|
||||||
- **WARC:** `warc/<timestamp>.gz` gzipped WARC of all the resources fetched while archiving
|
- **Screenshot:** `screenshot.png` 1440x900 screenshot of site using headless chrome
|
||||||
- **PDF:** `output.pdf` Printed PDF of site using headless chrome
|
- **DOM Dump:** `output.html` DOM Dump of the HTML after rendering using headless chrome
|
||||||
- **Screenshot:** `screenshot.png` 1440x900 screenshot of site using headless chrome
|
- **Readability:** `article.html/json` Article text extraction using Readability
|
||||||
- **DOM Dump:** `output.html` DOM Dump of the HTML after rendering using headless chrome
|
- **Archive.org Permalink:** `archive.org.txt` A link to the saved site on archive.org
|
||||||
- **Readability:** `article.html/json` Article text extraction using Readability
|
|
||||||
- **URL to Archive.org:** `archive.org.txt` A link to the saved site on archive.org
|
|
||||||
- **Audio & Video:** `media/` all audio/video files + playlists, including subtitles & metadata with youtube-dl
|
- **Audio & Video:** `media/` all audio/video files + playlists, including subtitles & metadata with youtube-dl
|
||||||
- **Source Code:** `git/` clone of any repository found on github, bitbucket, or gitlab links
|
- **Source Code:** `git/` clone of any repository found on github, bitbucket, or gitlab links
|
||||||
- _More coming soon! See the [Roadmap](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap)..._
|
- _More coming soon! See the [Roadmap](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap)..._
|
||||||
|
|
||||||
It does everything out-of-the-box by default, but you can disable or tweak [individual archive methods](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration) via environment variables or config file.
|
It does everything out-of-the-box by default, but you can disable or tweak [individual archive methods](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration) via environment variables or config file.
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<img src="https://i.imgur.com/ucyimDX.png" width="96%" alt="lego graphic">
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
## Dependencies
|
## Dependencies
|
||||||
|
|
||||||
You don't need to install all the dependencies, ArchiveBox will automatically enable the relevant modules based on whatever you have available, but it's recommended to use the official [Docker image](https://github.com/ArchiveBox/ArchiveBox/wiki/Docker) with everything preinstalled.
|
You don't need to install all the dependencies, ArchiveBox will automatically enable the relevant modules based on whatever you have available, but it's recommended to use the official [Docker image](https://github.com/ArchiveBox/ArchiveBox/wiki/Docker) with everything preinstalled.
|
||||||
|
|
||||||
If you so choose, you can also install ArchiveBox and its dependencies directly on any Linux or macOS systems using the [automated setup script](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart) or the [system package manager](https://github.com/ArchiveBox/ArchiveBox/wiki/Install).
|
If you so choose, you can also install ArchiveBox and its dependencies directly on any Linux or macOS systems using the [system package manager](https://github.com/ArchiveBox/ArchiveBox/wiki/Install) or by running the [automated setup script](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart).
|
||||||
|
|
||||||
ArchiveBox is written in Python 3 so it requires `python3` and `pip3` available on your system. It also uses a set of optional, but highly recommended external dependencies for archiving sites: `wget` (for plain HTML, static files, and WARC saving), `chromium` (for screenshots, PDFs, JS execution, and more), `youtube-dl` (for audio and video), `git` (for cloning git repos), and `nodejs` (for readability and singlefile), and more.
|
ArchiveBox is written in Python 3 so it requires `python3` and `pip3` available on your system. It also uses a set of optional, but highly recommended external dependencies for archiving sites: `wget` (for plain HTML, static files, and WARC saving), `chromium` (for screenshots, PDFs, JS execution, and more), `youtube-dl` (for audio and video), `git` (for cloning git repos), and `nodejs` (for readability and singlefile), and more.
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<img src="https://docs.monadical.com/uploads/upload_b6900afc422ae699bfefa2dcda3306f3.png" width="100%" alt="security graphic"/>
|
||||||
|
</div>
|
||||||
|
|
||||||
## Caveats
|
## Caveats
|
||||||
|
|
||||||
If you're importing URLs containing secret slugs or pages with private content (e.g Google Docs, CodiMD notepads, etc), you may want to disable some of the extractor modules to avoid leaking private URLs to 3rd party APIs during the archiving process.
|
If you're importing URLs containing secret slugs or pages with private content (e.g Google Docs, CodiMD notepads, etc), you may want to disable some of the extractor modules to avoid leaking private URLs to 3rd party APIs during the archiving process.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# don't do this:
|
# don't do this:
|
||||||
archivebox add 'https://docs.google.com/document/d/12345somelongsecrethere'
|
archivebox add 'https://docs.google.com/document/d/12345somelongsecrethere'
|
||||||
|
@ -312,6 +388,7 @@ archivebox config --set CHROME_BINARY=chromium # optional: switch to chromium t
|
||||||
```
|
```
|
||||||
|
|
||||||
Be aware that malicious archived JS can also read the contents of other pages in your archive due to snapshot CSRF and XSS protections being imperfect. See the [Security Overview](https://github.com/ArchiveBox/ArchiveBox/wiki/Security-Overview#stealth-mode) page for more details.
|
Be aware that malicious archived JS can also read the contents of other pages in your archive due to snapshot CSRF and XSS protections being imperfect. See the [Security Overview](https://github.com/ArchiveBox/ArchiveBox/wiki/Security-Overview#stealth-mode) page for more details.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# visiting an archived page with malicious JS:
|
# visiting an archived page with malicious JS:
|
||||||
https://127.0.0.1:8000/archive/1602401954/example.com/index.html
|
https://127.0.0.1:8000/archive/1602401954/example.com/index.html
|
||||||
|
@ -323,20 +400,67 @@ https://127.0.0.1:8000/archive/*
|
||||||
```
|
```
|
||||||
|
|
||||||
Support for saving multiple snapshots of each site over time will be [added soon](https://github.com/ArchiveBox/ArchiveBox/issues/179) (along with the ability to view diffs of the changes between runs). For now ArchiveBox is designed to only archive each URL with each extractor type once. A workaround to take multiple snapshots of the same URL is to make them slightly different by adding a hash:
|
Support for saving multiple snapshots of each site over time will be [added soon](https://github.com/ArchiveBox/ArchiveBox/issues/179) (along with the ability to view diffs of the changes between runs). For now ArchiveBox is designed to only archive each URL with each extractor type once. A workaround to take multiple snapshots of the same URL is to make them slightly different by adding a hash:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
archivebox add 'https://example.com#2020-10-24'
|
archivebox add 'https://example.com#2020-10-24'
|
||||||
...
|
...
|
||||||
archivebox add 'https://example.com#2020-10-25'
|
archivebox add 'https://example.com#2020-10-25'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
## Screenshots
|
||||||
|
|
||||||
|
<div align="center" width="80%">
|
||||||
|
<img src="https://i.imgur.com/PVO88AZ.png" width="80%"/>
|
||||||
|
<table>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<img src="https://i.imgur.com/npareKG.png" alt="brew install archivebox"><br/>
|
||||||
|
<img src="https://i.imgur.com/5vSBO2R.png" alt="archivebox version">
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<img src="https://i.imgur.com/JXXxFzB.png" alt="archivebox init"><br/>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<img src="https://i.imgur.com/wNYtV3v.jpg" alt="archivebox add">
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<img src="https://i.imgur.com/uZcIOn9.png" alt="archivebox data dir">
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<img src="https://i.imgur.com/H08eaia.png" alt="archivebox server">
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<img src="https://i.imgur.com/zM4z1aU.png" alt="archivebox server add">
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<img src="https://i.imgur.com/p6wK6KM.png" alt="archivebox server list">
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<img src="https://i.imgur.com/pzq4uXq.png" alt="archivebox server detail">
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<img src="https://i.imgur.com/PVO88AZ.png" width="80%"/>
|
<img src="https://i.imgur.com/ZSUm9mr.png" width="100%" alt="paisley graphic">
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Background & Motivation
|
# Background & Motivation
|
||||||
|
|
||||||
Vast treasure troves of knowledge are lost every day on the internet to link rot. As a society, we have an imperative to preserve some important parts of that treasure, just like we preserve our books, paintings, and music in physical libraries long after the originals go out of print or fade into obscurity.
|
Vast treasure troves of knowledge are lost every day on the internet to link rot. As a society, we have an imperative to preserve some important parts of that treasure, just like we preserve our books, paintings, and music in physical libraries long after the originals go out of print or fade into obscurity.
|
||||||
|
@ -376,6 +500,11 @@ Unlike crawler software that starts from a seed URL and works outwards, or publi
|
||||||
|
|
||||||
Because ArchiveBox is designed to ingest a firehose of browser history and bookmark feeds to a local disk, it can be much more disk-space intensive than a centralized service like the Internet Archive or Archive.today. However, as storage space gets cheaper and compression improves, you should be able to use it continuously over the years without having to delete anything. In my experience, ArchiveBox uses about 5gb per 1000 articles, but your milage may vary depending on which options you have enabled and what types of sites you're archiving. By default, it archives everything in as many formats as possible, meaning it takes more space than a using a single method, but more content is accurately replayable over extended periods of time. Storage requirements can be reduced by using a compressed/deduplicated filesystem like ZFS/BTRFS, or by setting `SAVE_MEDIA=False` to skip audio & video files.
|
Because ArchiveBox is designed to ingest a firehose of browser history and bookmark feeds to a local disk, it can be much more disk-space intensive than a centralized service like the Internet Archive or Archive.today. However, as storage space gets cheaper and compression improves, you should be able to use it continuously over the years without having to delete anything. In my experience, ArchiveBox uses about 5gb per 1000 articles, but your milage may vary depending on which options you have enabled and what types of sites you're archiving. By default, it archives everything in as many formats as possible, meaning it takes more space than a using a single method, but more content is accurately replayable over extended periods of time. Storage requirements can be reduced by using a compressed/deduplicated filesystem like ZFS/BTRFS, or by setting `SAVE_MEDIA=False` to skip audio & video files.
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<br/>
|
||||||
|
<img src="https://i.imgur.com/q0Oe36M.png" width="100%" alt="dependencies graphic">
|
||||||
|
</div>
|
||||||
|
|
||||||
## Learn more
|
## Learn more
|
||||||
|
|
||||||
Whether you want to learn which organizations are the big players in the web archiving space, want to find a specific open-source tool for your web archiving need, or just want to see where archivists hang out online, our Community Wiki page serves as an index of the broader web archiving community. Check it out to learn about some of the coolest web archiving projects and communities on the web!
|
Whether you want to learn which organizations are the big players in the web archiving space, want to find a specific open-source tool for your web archiving need, or just want to see where archivists hang out online, our Community Wiki page serves as an index of the broader web archiving community. Check it out to learn about some of the coolest web archiving projects and communities on the web!
|
||||||
|
@ -383,20 +512,26 @@ Whether you want to learn which organizations are the big players in the web arc
|
||||||
<img src="https://i.imgur.com/0ZOmOvN.png" width="14%" align="right"/>
|
<img src="https://i.imgur.com/0ZOmOvN.png" width="14%" align="right"/>
|
||||||
|
|
||||||
- [Community Wiki](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community)
|
- [Community Wiki](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community)
|
||||||
- [The Master Lists](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#The-Master-Lists)
|
- [The Master Lists](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#the-master-lists)
|
||||||
_Community-maintained indexes of archiving tools and institutions._
|
_Community-maintained indexes of archiving tools and institutions._
|
||||||
- [Web Archiving Software](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#Web-Archiving-Projects)
|
- [Web Archiving Software](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#web-archiving-projects)
|
||||||
_Open source tools and projects in the internet archiving space._
|
_Open source tools and projects in the internet archiving space._
|
||||||
- [Reading List](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#Reading-List)
|
- [Reading List](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#reading-list)
|
||||||
_Articles, posts, and blogs relevant to ArchiveBox and web archiving in general._
|
_Articles, posts, and blogs relevant to ArchiveBox and web archiving in general._
|
||||||
- [Communities](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#Communities)
|
- [Communities](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#communities)
|
||||||
_A collection of the most active internet archiving communities and initiatives._
|
_A collection of the most active internet archiving communities and initiatives._
|
||||||
- Check out the ArchiveBox [Roadmap](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap) and [Changelog](https://github.com/ArchiveBox/ArchiveBox/wiki/Changelog)
|
- Check out the ArchiveBox [Roadmap](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap) and [Changelog](https://github.com/ArchiveBox/ArchiveBox/wiki/Changelog)
|
||||||
- Learn why archiving the internet is important by reading the "[On the Importance of Web Archiving](https://parameters.ssrc.org/2018/09/on-the-importance-of-web-archiving/)" blog post.
|
- Learn why archiving the internet is important by reading the "[On the Importance of Web Archiving](https://parameters.ssrc.org/2018/09/on-the-importance-of-web-archiving/)" blog post.
|
||||||
- Or reach out to me for questions and comments via [@ArchiveBoxApp](https://twitter.com/ArchiveBoxApp) or [@theSquashSH](https://twitter.com/thesquashSH) on Twitter.
|
- Or reach out to me for questions and comments via [@ArchiveBoxApp](https://twitter.com/ArchiveBoxApp) or [@theSquashSH](https://twitter.com/thesquashSH) on Twitter.
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<img src="https://i.imgur.com/SMkGW0L.png" width="100%" alt="documentation graphic">
|
||||||
|
</div>
|
||||||
|
|
||||||
# Documentation
|
# Documentation
|
||||||
|
|
||||||
<img src="https://read-the-docs-guidelines.readthedocs-hosted.com/_images/logo-dark.png" width="13%" align="right"/>
|
<img src="https://read-the-docs-guidelines.readthedocs-hosted.com/_images/logo-dark.png" width="13%" align="right"/>
|
||||||
|
@ -422,8 +557,8 @@ You can also access the docs locally by looking in the [`ArchiveBox/docs/`](http
|
||||||
- [Chromium Install](https://github.com/ArchiveBox/ArchiveBox/wiki/Chromium-Install)
|
- [Chromium Install](https://github.com/ArchiveBox/ArchiveBox/wiki/Chromium-Install)
|
||||||
- [Security Overview](https://github.com/ArchiveBox/ArchiveBox/wiki/Security-Overview)
|
- [Security Overview](https://github.com/ArchiveBox/ArchiveBox/wiki/Security-Overview)
|
||||||
- [Troubleshooting](https://github.com/ArchiveBox/ArchiveBox/wiki/Troubleshooting)
|
- [Troubleshooting](https://github.com/ArchiveBox/ArchiveBox/wiki/Troubleshooting)
|
||||||
- [Python API](https://docs.archivebox.io/en/latest/modules.html)
|
- [Python API](https://docs.archivebox.io/en/latest/modules.html) (alpha)
|
||||||
- REST API (coming soon...)
|
- [REST API](https://github.com/ArchiveBox/ArchiveBox/issues/496) (alpha)
|
||||||
|
|
||||||
## More Info
|
## More Info
|
||||||
|
|
||||||
|
@ -434,37 +569,58 @@ You can also access the docs locally by looking in the [`ArchiveBox/docs/`](http
|
||||||
- [Background & Motivation](https://github.com/ArchiveBox/ArchiveBox#background--motivation)
|
- [Background & Motivation](https://github.com/ArchiveBox/ArchiveBox#background--motivation)
|
||||||
- [Web Archiving Community](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community)
|
- [Web Archiving Community](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community)
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<img src="https://i.imgur.com/EGWjbD4.png" width="100%" alt="development">
|
||||||
|
</div>
|
||||||
|
|
||||||
# ArchiveBox Development
|
# ArchiveBox Development
|
||||||
|
|
||||||
All contributions to ArchiveBox are welcomed! Check our [issues](https://github.com/ArchiveBox/ArchiveBox/issues) and [Roadmap](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap) for things to work on, and please open an issue to discuss your proposed implementation before working on things! Otherwise we may have to close your PR if it doesn't align with our roadmap.
|
All contributions to ArchiveBox are welcomed! Check our [issues](https://github.com/ArchiveBox/ArchiveBox/issues) and [Roadmap](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap) for things to work on, and please open an issue to discuss your proposed implementation before working on things! Otherwise we may have to close your PR if it doesn't align with our roadmap.
|
||||||
|
|
||||||
|
Low hanging fruit / easy first tickets:<br/>
|
||||||
|
<a href="https://lgtm.com/projects/g/ArchiveBox/ArchiveBox/alerts/"><img alt="Total alerts" src="https://img.shields.io/lgtm/alerts/g/ArchiveBox/ArchiveBox.svg?logo=lgtm&logoWidth=18"/></a>
|
||||||
|
|
||||||
### Setup the dev environment
|
### Setup the dev environment
|
||||||
|
|
||||||
First, install the system dependencies from the "Bare Metal" section above.
|
#### 1. Clone the main code repo (making sure to pull the submodules as well)
|
||||||
Then you can clone the ArchiveBox repo and install
|
|
||||||
```python3
|
```bash
|
||||||
git clone https://github.com/ArchiveBox/ArchiveBox && cd ArchiveBox
|
git clone --recurse-submodules https://github.com/ArchiveBox/ArchiveBox
|
||||||
git checkout master # or the branch you want to test
|
cd ArchiveBox
|
||||||
|
git checkout dev # or the branch you want to test
|
||||||
git submodule update --init --recursive
|
git submodule update --init --recursive
|
||||||
git pull --recurse-submodules
|
git pull --recurse-submodules
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2. Option A: Install the Python, JS, and system dependencies directly on your machine
|
||||||
|
|
||||||
|
```bash
|
||||||
# Install ArchiveBox + python dependencies
|
# Install ArchiveBox + python dependencies
|
||||||
python3 -m venv .venv && source .venv/bin/activate && pip install -e .[dev]
|
python3 -m venv .venv && source .venv/bin/activate && pip install -e '.[dev]'
|
||||||
# or with pipenv: pipenv install --dev && pipenv shell
|
# or: pipenv install --dev && pipenv shell
|
||||||
|
|
||||||
# Install node dependencies
|
# Install node dependencies
|
||||||
npm install
|
npm install
|
||||||
|
|
||||||
# Optional: install extractor dependencies manually or with helper script
|
# Check to see if anything is missing
|
||||||
|
archivebox --version
|
||||||
|
# install any missing dependencies manually, or use the helper script:
|
||||||
./bin/setup.sh
|
./bin/setup.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2. Option B: Build the docker container and use that for development instead
|
||||||
|
|
||||||
|
```bash
|
||||||
# Optional: develop via docker by mounting the code dir into the container
|
# Optional: develop via docker by mounting the code dir into the container
|
||||||
# if you edit e.g. ./archivebox/core/models.py on the docker host, runserver
|
# if you edit e.g. ./archivebox/core/models.py on the docker host, runserver
|
||||||
# inside the container will reload and pick up your changes
|
# inside the container will reload and pick up your changes
|
||||||
docker build . -t archivebox
|
docker build . -t archivebox
|
||||||
docker run -it -p 8000:8000 \
|
docker run -it --rm archivebox version
|
||||||
|
docker run -it --rm -p 8000:8000 \
|
||||||
-v $PWD/data:/data \
|
-v $PWD/data:/data \
|
||||||
-v $PWD/archivebox:/app/archivebox \
|
-v $PWD/archivebox:/app/archivebox \
|
||||||
archivebox server 0.0.0.0:8000 --debug --reload
|
archivebox server 0.0.0.0:8000 --debug --reload
|
||||||
|
@ -475,6 +631,21 @@ docker run -it -p 8000:8000 \
|
||||||
See the `./bin/` folder and read the source of the bash scripts within.
|
See the `./bin/` folder and read the source of the bash scripts within.
|
||||||
You can also run all these in Docker. For more examples see the Github Actions CI/CD tests that are run: `.github/workflows/*.yaml`.
|
You can also run all these in Docker. For more examples see the Github Actions CI/CD tests that are run: `.github/workflows/*.yaml`.
|
||||||
|
|
||||||
|
#### Run in DEBUG mode
|
||||||
|
|
||||||
|
```bash
|
||||||
|
archivebox config --set DEBUG=True
|
||||||
|
# or
|
||||||
|
archivebox server --debug ...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Build and run a Github branch
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker build -t archivebox:dev https://github.com/ArchiveBox/ArchiveBox.git#dev
|
||||||
|
docker run -it -v $PWD:/data archivebox:dev ...
|
||||||
|
```
|
||||||
|
|
||||||
#### Run the linters
|
#### Run the linters
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
@ -491,17 +662,19 @@ You can also run all these in Docker. For more examples see the Github Actions C
|
||||||
|
|
||||||
#### Make migrations or enter a django shell
|
#### Make migrations or enter a django shell
|
||||||
|
|
||||||
|
Make sure to run this whenever you change things in `models.py`.
|
||||||
```bash
|
```bash
|
||||||
cd archivebox/
|
cd archivebox/
|
||||||
./manage.py makemigrations
|
./manage.py makemigrations
|
||||||
|
|
||||||
cd data/
|
cd path/to/test/data/
|
||||||
archivebox shell
|
archivebox shell
|
||||||
```
|
```
|
||||||
(uses `pytest -s`)
|
(uses `pytest -s`)
|
||||||
|
|
||||||
#### Build the docs, pip package, and docker image
|
#### Build the docs, pip package, and docker image
|
||||||
|
|
||||||
|
(Normally CI takes care of this, but these scripts can be run to do it manually)
|
||||||
```bash
|
```bash
|
||||||
./bin/build.sh
|
./bin/build.sh
|
||||||
|
|
||||||
|
@ -515,11 +688,17 @@ archivebox shell
|
||||||
|
|
||||||
#### Roll a release
|
#### Roll a release
|
||||||
|
|
||||||
|
(Normally CI takes care of this, but these scripts can be run to do it manually)
|
||||||
```bash
|
```bash
|
||||||
./bin/release.sh
|
./bin/release.sh
|
||||||
```
|
|
||||||
(bumps the version, builds, and pushes a release to PyPI, Docker Hub, and Github Packages)
|
|
||||||
|
|
||||||
|
# or individually:
|
||||||
|
./bin/release_docs.sh
|
||||||
|
./bin/release_pip.sh
|
||||||
|
./bin/release_deb.sh
|
||||||
|
./bin/release_brew.sh
|
||||||
|
./bin/release_docker.sh
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|
|
@ -42,6 +42,7 @@ def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--depth', # '-d',
|
'--depth', # '-d',
|
||||||
type=int,
|
type=int,
|
||||||
|
choices=[0, 1],
|
||||||
default=0,
|
default=0,
|
||||||
help='Depth to archive to [0] or 1, see "add" command help for more info.',
|
help='Depth to archive to [0] or 1, see "add" command help for more info.',
|
||||||
)
|
)
|
||||||
|
|
|
@ -43,6 +43,11 @@ def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Run archivebox init before starting the server',
|
help='Run archivebox init before starting the server',
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--createsuperuser',
|
||||||
|
action='store_true',
|
||||||
|
help='Run archivebox manage createsuperuser before starting the server',
|
||||||
|
)
|
||||||
command = parser.parse_args(args or ())
|
command = parser.parse_args(args or ())
|
||||||
reject_stdin(__command__, stdin)
|
reject_stdin(__command__, stdin)
|
||||||
|
|
||||||
|
@ -51,6 +56,7 @@ def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional
|
||||||
reload=command.reload,
|
reload=command.reload,
|
||||||
debug=command.debug,
|
debug=command.debug,
|
||||||
init=command.init,
|
init=command.init,
|
||||||
|
createsuperuser=command.createsuperuser,
|
||||||
out_dir=pwd or OUTPUT_DIR,
|
out_dir=pwd or OUTPUT_DIR,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -116,16 +116,15 @@ CONFIG_SCHEMA: Dict[str, ConfigDefaultDict] = {
|
||||||
'--write-annotations',
|
'--write-annotations',
|
||||||
'--write-thumbnail',
|
'--write-thumbnail',
|
||||||
'--no-call-home',
|
'--no-call-home',
|
||||||
'--user-agent',
|
|
||||||
'--all-subs',
|
'--all-subs',
|
||||||
'--extract-audio',
|
'--yes-playlist',
|
||||||
'--keep-video',
|
'--continue',
|
||||||
'--ignore-errors',
|
'--ignore-errors',
|
||||||
'--geo-bypass',
|
'--geo-bypass',
|
||||||
'--audio-format', 'mp3',
|
'--add-metadata',
|
||||||
'--audio-quality', '320K',
|
'--max-filesize=750m',
|
||||||
'--embed-thumbnail',
|
]},
|
||||||
'--add-metadata']},
|
|
||||||
|
|
||||||
'WGET_ARGS': {'type': list, 'default': ['--no-verbose',
|
'WGET_ARGS': {'type': list, 'default': ['--no-verbose',
|
||||||
'--adjust-extension',
|
'--adjust-extension',
|
||||||
|
@ -775,7 +774,7 @@ def get_dependency_info(config: ConfigDict) -> ConfigValue:
|
||||||
'version': config['PYTHON_VERSION'],
|
'version': config['PYTHON_VERSION'],
|
||||||
'hash': bin_hash(config['PYTHON_BINARY']),
|
'hash': bin_hash(config['PYTHON_BINARY']),
|
||||||
'enabled': True,
|
'enabled': True,
|
||||||
'is_valid': bool(config['DJANGO_VERSION']),
|
'is_valid': bool(config['PYTHON_VERSION']),
|
||||||
},
|
},
|
||||||
'DJANGO_BINARY': {
|
'DJANGO_BINARY': {
|
||||||
'path': bin_path(config['DJANGO_BINARY']),
|
'path': bin_path(config['DJANGO_BINARY']),
|
||||||
|
@ -787,7 +786,7 @@ def get_dependency_info(config: ConfigDict) -> ConfigValue:
|
||||||
'CURL_BINARY': {
|
'CURL_BINARY': {
|
||||||
'path': bin_path(config['CURL_BINARY']),
|
'path': bin_path(config['CURL_BINARY']),
|
||||||
'version': config['CURL_VERSION'],
|
'version': config['CURL_VERSION'],
|
||||||
'hash': bin_hash(config['PYTHON_BINARY']),
|
'hash': bin_hash(config['CURL_BINARY']),
|
||||||
'enabled': config['USE_CURL'],
|
'enabled': config['USE_CURL'],
|
||||||
'is_valid': bool(config['CURL_VERSION']),
|
'is_valid': bool(config['CURL_VERSION']),
|
||||||
},
|
},
|
||||||
|
@ -803,7 +802,7 @@ def get_dependency_info(config: ConfigDict) -> ConfigValue:
|
||||||
'version': config['NODE_VERSION'],
|
'version': config['NODE_VERSION'],
|
||||||
'hash': bin_hash(config['NODE_BINARY']),
|
'hash': bin_hash(config['NODE_BINARY']),
|
||||||
'enabled': config['USE_NODE'],
|
'enabled': config['USE_NODE'],
|
||||||
'is_valid': bool(config['SINGLEFILE_VERSION']),
|
'is_valid': bool(config['NODE_VERSION']),
|
||||||
},
|
},
|
||||||
'SINGLEFILE_BINARY': {
|
'SINGLEFILE_BINARY': {
|
||||||
'path': bin_path(config['SINGLEFILE_BINARY']),
|
'path': bin_path(config['SINGLEFILE_BINARY']),
|
||||||
|
|
|
@ -11,13 +11,14 @@ from django.shortcuts import render, redirect
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
from django import forms
|
from django import forms
|
||||||
|
|
||||||
|
from ..util import htmldecode, urldecode, ansi_to_html
|
||||||
|
|
||||||
from core.models import Snapshot, Tag
|
from core.models import Snapshot, Tag
|
||||||
from core.forms import AddLinkForm, TagField
|
from core.forms import AddLinkForm, TagField
|
||||||
|
|
||||||
from core.mixins import SearchResultsAdminMixin
|
from core.mixins import SearchResultsAdminMixin
|
||||||
|
|
||||||
from index.html import snapshot_icons
|
from index.html import snapshot_icons
|
||||||
from util import htmldecode, urldecode, ansi_to_html
|
|
||||||
from logging_util import printable_filesize
|
from logging_util import printable_filesize
|
||||||
from main import add, remove
|
from main import add, remove
|
||||||
from config import OUTPUT_DIR
|
from config import OUTPUT_DIR
|
||||||
|
|
|
@ -22,10 +22,32 @@ class AddLinkForm(forms.Form):
|
||||||
url = forms.RegexField(label="URLs (one per line)", regex=URL_REGEX, min_length='6', strip=True, widget=forms.Textarea, required=True)
|
url = forms.RegexField(label="URLs (one per line)", regex=URL_REGEX, min_length='6', strip=True, widget=forms.Textarea, required=True)
|
||||||
depth = forms.ChoiceField(label="Archive depth", choices=CHOICES, widget=forms.RadioSelect, initial='0')
|
depth = forms.ChoiceField(label="Archive depth", choices=CHOICES, widget=forms.RadioSelect, initial='0')
|
||||||
archive_methods = forms.MultipleChoiceField(
|
archive_methods = forms.MultipleChoiceField(
|
||||||
|
label="Archive methods (select at least 1, otherwise all will be used by default)",
|
||||||
required=False,
|
required=False,
|
||||||
widget=forms.SelectMultiple,
|
widget=forms.SelectMultiple,
|
||||||
choices=ARCHIVE_METHODS,
|
choices=ARCHIVE_METHODS,
|
||||||
)
|
)
|
||||||
|
# TODO: hook these up to the view and put them
|
||||||
|
# in a collapsible UI section labeled "Advanced"
|
||||||
|
#
|
||||||
|
# exclude_patterns = forms.CharField(
|
||||||
|
# label="Exclude patterns",
|
||||||
|
# min_length='1',
|
||||||
|
# required=False,
|
||||||
|
# initial=URL_BLACKLIST,
|
||||||
|
# )
|
||||||
|
# timeout = forms.IntegerField(
|
||||||
|
# initial=TIMEOUT,
|
||||||
|
# )
|
||||||
|
# overwrite = forms.BooleanField(
|
||||||
|
# label="Overwrite any existing Snapshots",
|
||||||
|
# initial=False,
|
||||||
|
# )
|
||||||
|
# index_only = forms.BooleanField(
|
||||||
|
# label="Add URLs to index without Snapshotting",
|
||||||
|
# initial=False,
|
||||||
|
# )
|
||||||
|
|
||||||
class TagWidgetMixin:
|
class TagWidgetMixin:
|
||||||
def format_value(self, value):
|
def format_value(self, value):
|
||||||
if value is not None and not isinstance(value, str):
|
if value is not None and not isinstance(value, str):
|
||||||
|
|
|
@ -36,7 +36,7 @@ def forwards_func(apps, schema_editor):
|
||||||
|
|
||||||
for extractor in history:
|
for extractor in history:
|
||||||
for result in history[extractor]:
|
for result in history[extractor]:
|
||||||
ArchiveResult.objects.create(extractor=extractor, snapshot=snapshot, cmd=result["cmd"], cmd_version=result["cmd_version"],
|
ArchiveResult.objects.create(extractor=extractor, snapshot=snapshot, cmd=result["cmd"], cmd_version=result["cmd_version"] or 'unknown',
|
||||||
start_ts=result["start_ts"], end_ts=result["end_ts"], status=result["status"], pwd=result["pwd"], output=result["output"])
|
start_ts=result["start_ts"], end_ts=result["end_ts"], status=result["status"], pwd=result["pwd"], output=result["output"])
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -101,7 +101,7 @@ TEMPLATES = [
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
DATABASE_FILE = Path(OUTPUT_DIR) / SQL_INDEX_FILENAME
|
DATABASE_FILE = Path(OUTPUT_DIR) / SQL_INDEX_FILENAME
|
||||||
DATABASE_NAME = os.environ.get("ARCHIVEBOX_DATABASE_NAME", DATABASE_FILE)
|
DATABASE_NAME = os.environ.get("ARCHIVEBOX_DATABASE_NAME", str(DATABASE_FILE))
|
||||||
|
|
||||||
DATABASES = {
|
DATABASES = {
|
||||||
'default': {
|
'default': {
|
||||||
|
|
|
@ -9,6 +9,7 @@ from django.http import HttpResponse
|
||||||
from django.views import View, static
|
from django.views import View, static
|
||||||
from django.views.generic.list import ListView
|
from django.views.generic.list import ListView
|
||||||
from django.views.generic import FormView
|
from django.views.generic import FormView
|
||||||
|
from django.db.models import Q
|
||||||
from django.contrib.auth.mixins import UserPassesTestMixin
|
from django.contrib.auth.mixins import UserPassesTestMixin
|
||||||
|
|
||||||
from core.models import Snapshot
|
from core.models import Snapshot
|
||||||
|
@ -108,7 +109,7 @@ class PublicArchiveView(ListView):
|
||||||
qs = super().get_queryset(**kwargs)
|
qs = super().get_queryset(**kwargs)
|
||||||
query = self.request.GET.get('q')
|
query = self.request.GET.get('q')
|
||||||
if query:
|
if query:
|
||||||
qs = qs.filter(title__icontains=query)
|
qs = qs.filter(Q(title__icontains=query) | Q(url__icontains=query) | Q(timestamp__icontains=query) | Q(tags__name__icontains=query))
|
||||||
for snapshot in qs:
|
for snapshot in qs:
|
||||||
snapshot.icons = snapshot_icons(snapshot)
|
snapshot.icons = snapshot_icons(snapshot)
|
||||||
return qs
|
return qs
|
||||||
|
|
|
@ -96,7 +96,7 @@ def archive_snapshot(snapshot: Model, overwrite: bool=False, methods: Optional[I
|
||||||
if method_name not in details["history"]:
|
if method_name not in details["history"]:
|
||||||
details["history"][method_name] = []
|
details["history"][method_name] = []
|
||||||
|
|
||||||
if should_run(snapshot, out_dir) or overwrite:
|
if should_run(snapshot, out_dir, overwrite):
|
||||||
log_archive_method_started(method_name)
|
log_archive_method_started(method_name)
|
||||||
|
|
||||||
result = method_function(snapshot=snapshot, out_dir=out_dir)
|
result = method_function(snapshot=snapshot, out_dir=out_dir)
|
||||||
|
|
|
@ -25,14 +25,17 @@ from ..config import (
|
||||||
from ..logging_util import TimedProgress
|
from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
|
||||||
|
# output = '{domain}/'
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_archive_dot_org(snapshot: Model, out_dir: Optional[Path]=None) -> bool:
|
def should_save_archive_dot_org(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool:
|
||||||
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
||||||
if is_static_file(snapshot.url):
|
if is_static_file(snapshot.url):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if (out_dir / "archive.org.txt").exists():
|
out_dir = out_dir or Path(link.link_dir)
|
||||||
|
if not overwrite and (out_dir / 'archive.org.txt').exists():
|
||||||
# if open(path, 'r').read().strip() != 'None':
|
# if open(path, 'r').read().strip() != 'None':
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
|
@ -20,18 +20,21 @@ from ..config import (
|
||||||
from ..logging_util import TimedProgress
|
from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
|
||||||
|
# output = 'output.html'
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_dom(snapshot: Model, out_dir: Optional[Path]=None) -> bool:
|
def should_save_dom(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool:
|
||||||
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
||||||
if is_static_file(snapshot.url):
|
if is_static_file(snapshot.url):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if (out_dir / 'output.html').exists():
|
out_dir = out_dir or Path(link.link_dir)
|
||||||
|
if not overwrite and (out_dir / 'output.html').exists():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return SAVE_DOM
|
return SAVE_DOM
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def save_dom(snapshot: Model, out_dir: Optional[Path]=None, timeout: int=TIMEOUT) -> ArchiveResult:
|
def save_dom(snapshot: Model, out_dir: Optional[Path]=None, timeout: int=TIMEOUT) -> ArchiveResult:
|
||||||
"""print HTML of site to file using chrome --dump-html"""
|
"""print HTML of site to file using chrome --dump-html"""
|
||||||
|
|
|
@ -21,14 +21,17 @@ from ..config import (
|
||||||
from ..logging_util import TimedProgress
|
from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
|
||||||
|
# output = 'favicon.ico'
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_favicon(snapshot: Model, out_dir: Optional[str]=None) -> bool:
|
def should_save_favicon(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[str]=None) -> bool:
|
||||||
out_dir = out_dir or snapshot.snapshot_dir
|
out_dir = out_dir or snapshot.snapshot_dir
|
||||||
if (Path(out_dir) / 'favicon.ico').exists():
|
if not overwrite and (Path(out_dir) / 'favicon.ico').exists():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return SAVE_FAVICON
|
return SAVE_FAVICON
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def save_favicon(snapshot: Model, out_dir: Optional[Path]=None, timeout: int=TIMEOUT) -> ArchiveResult:
|
def save_favicon(snapshot: Model, out_dir: Optional[Path]=None, timeout: int=TIMEOUT) -> ArchiveResult:
|
||||||
"""download site favicon from google's favicon api"""
|
"""download site favicon from google's favicon api"""
|
||||||
|
|
|
@ -28,14 +28,20 @@ from ..config import (
|
||||||
from ..logging_util import TimedProgress
|
from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
|
||||||
|
# output = 'git/'
|
||||||
|
# @contents = output.glob('*.*')
|
||||||
|
# @exists = self.contents.exists()
|
||||||
|
# @size => get_size(self.contents)
|
||||||
|
# @num_files => len(self.contents)
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_git(snapshot: Model, out_dir: Optional[Path]=None) -> bool:
|
def should_save_git(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool:
|
||||||
out_dir = out_dir or snapshot.snapshot_dir
|
out_dir = out_dir or snapshot.snapshot_dir
|
||||||
if is_static_file(snapshot.url):
|
if is_static_file(snapshot.url):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if (out_dir / "git").exists():
|
out_dir = out_dir or Path(link.link_dir)
|
||||||
|
if not overwrite and (out_dir / 'git').exists():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
is_clonable_url = (
|
is_clonable_url = (
|
||||||
|
|
|
@ -23,12 +23,21 @@ from ..config import (
|
||||||
)
|
)
|
||||||
from ..logging_util import TimedProgress
|
from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
|
||||||
|
# output = 'headers.json'
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_headers(snapshot: Model, out_dir: Optional[str]=None) -> bool:
|
def should_save_headers(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[str]=None) -> bool:
|
||||||
out_dir = out_dir or snapshot.snapshot_dir
|
out_dir = out_dir or snapshot.snapshot_dir
|
||||||
|
|
||||||
|
if not SAVE_HEADERS:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if overwrite:
|
||||||
|
return True
|
||||||
|
|
||||||
output = Path(out_dir or snapshot.snapshot_dir) / 'headers.json'
|
output = Path(out_dir or snapshot.snapshot_dir) / 'headers.json'
|
||||||
return not output.exists() and SAVE_HEADERS
|
return not output.exists()
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
|
|
|
@ -22,14 +22,17 @@ from ..config import (
|
||||||
from ..logging_util import TimedProgress
|
from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
|
||||||
|
# output = 'media/'
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_media(snapshot: Model, out_dir: Optional[Path]=None) -> bool:
|
def should_save_media(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool:
|
||||||
out_dir = out_dir or snapshot.snapshot_dir
|
out_dir = out_dir or snapshot.snapshot_dir
|
||||||
|
|
||||||
if is_static_file(snapshot.url):
|
if is_static_file(snapshot.url):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if (out_dir / "media").exists():
|
out_dir = out_dir or Path(link.link_dir)
|
||||||
|
if not overwrite and (out_dir / 'media').exists():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return SAVE_MEDIA
|
return SAVE_MEDIA
|
||||||
|
|
|
@ -39,13 +39,16 @@ def ShellError(cmd: List[str], result: CompletedProcess, lines: int=20) -> Archi
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_mercury(snapshot: Model, out_dir: Optional[str]=None) -> bool:
|
def should_save_mercury(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[str]=None) -> bool:
|
||||||
out_dir = out_dir or snapshot.snapshot_dir
|
out_dir = out_dir or snapshot.snapshot_dir
|
||||||
if is_static_file(snapshot.url):
|
if is_static_file(snapshot.url):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
output = Path(out_dir or snapshot.snapshot_dir) / 'mercury'
|
output = Path(out_dir or snapshot.snapshot_dir) / 'mercury'
|
||||||
return SAVE_MERCURY and MERCURY_VERSION and (not output.exists())
|
if not overwrite and output.exists():
|
||||||
|
return False
|
||||||
|
|
||||||
|
return SAVE_MERCURY and MERCURY_VERSION
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
|
|
|
@ -19,14 +19,16 @@ from ..config import (
|
||||||
)
|
)
|
||||||
from ..logging_util import TimedProgress
|
from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
# output = 'output.pdf'
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_pdf(snapshot: Model, out_dir: Optional[Path]=None) -> bool:
|
def should_save_pdf(snapshot: Model, verwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool:
|
||||||
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
||||||
if is_static_file(snapshot.url):
|
if is_static_file(snapshot.url):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if (out_dir / "output.pdf").exists():
|
out_dir = out_dir or Path(link.link_dir)
|
||||||
|
if not overwrite and (out_dir / 'output.pdf').exists():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return SAVE_PDF
|
return SAVE_PDF
|
||||||
|
|
|
@ -25,6 +25,7 @@ from ..config import (
|
||||||
)
|
)
|
||||||
from ..logging_util import TimedProgress
|
from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def get_html(snapshot: Model, path: Path) -> str:
|
def get_html(snapshot: Model, path: Path) -> str:
|
||||||
"""
|
"""
|
||||||
|
@ -47,14 +48,20 @@ def get_html(snapshot: Model, path: Path) -> str:
|
||||||
else:
|
else:
|
||||||
return document
|
return document
|
||||||
|
|
||||||
|
|
||||||
|
# output = 'readability/'
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_readability(snapshot: Model, out_dir: Optional[str]=None) -> bool:
|
def should_save_readability(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[str]=None) -> bool:
|
||||||
out_dir = out_dir or snapshot.link_dir
|
out_dir = out_dir or snapshot.link_dir
|
||||||
if is_static_file(snapshot.url):
|
if is_static_file(snapshot.url):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
output = Path(out_dir or snapshot.snapshot_dir) / 'readability'
|
output = Path(out_dir or snapshot.snapshot_dir) / 'readability'
|
||||||
return SAVE_READABILITY and READABILITY_VERSION and (not output.exists())
|
if not overwrite and output.exists():
|
||||||
|
return False
|
||||||
|
|
||||||
|
return SAVE_READABILITY and READABILITY_VERSION
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
|
|
|
@ -20,14 +20,16 @@ from ..config import (
|
||||||
from ..logging_util import TimedProgress
|
from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
|
||||||
|
# output = 'screenshot.png'
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_screenshot(snapshot: Model, out_dir: Optional[Path]=None) -> bool:
|
def should_save_screenshot(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool:
|
||||||
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
||||||
if is_static_file(snapshot.url):
|
if is_static_file(snapshot.url):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if (out_dir / "screenshot.png").exists():
|
out_dir = out_dir or Path(link.link_dir)
|
||||||
|
if not overwrite and (out_dir / 'screenshot.png').exists():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return SAVE_SCREENSHOT
|
return SAVE_SCREENSHOT
|
||||||
|
|
|
@ -25,13 +25,16 @@ from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_singlefile(snapshot: Model, out_dir: Optional[Path]=None) -> bool:
|
def should_save_singlefile(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool:
|
||||||
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
||||||
if is_static_file(snapshot.url):
|
if is_static_file(snapshot.url):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
output = out_dir / 'singlefile.html'
|
out_dir = out_dir or Path(link.link_dir)
|
||||||
return SAVE_SINGLEFILE and SINGLEFILE_VERSION and (not output.exists())
|
if not overwrite and (out_dir / 'singlefile.html').exists():
|
||||||
|
return False
|
||||||
|
|
||||||
|
return SAVE_SINGLEFILE
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
|
|
|
@ -62,13 +62,15 @@ class TitleParser(HTMLParser):
|
||||||
self.inside_title_tag = False
|
self.inside_title_tag = False
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
# output = '{title}'
|
||||||
def should_save_title(snapshot: Model, out_dir: Optional[str]=None) -> bool:
|
|
||||||
# if link already has valid title, skip it
|
|
||||||
if snapshot.title and not snapshot.title.lower().startswith('http'):
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
@enforce_types
|
||||||
|
def should_save_title(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[str]=None) -> bool:
|
||||||
if is_static_file(snapshot.url):
|
if is_static_file(snapshot.url):
|
||||||
|
False
|
||||||
|
|
||||||
|
# if snapshot already has valid title, skip it
|
||||||
|
if not overwrite and snapshot.title and not snapshot.title.lower().startswith('http'):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return SAVE_TITLE
|
return SAVE_TITLE
|
||||||
|
|
|
@ -38,10 +38,10 @@ from ..logging_util import TimedProgress
|
||||||
|
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def should_save_wget(snapshot: Model, out_dir: Optional[Path]=None) -> bool:
|
def should_save_wget(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool:
|
||||||
output_path = wget_output_path(snapshot)
|
output_path = wget_output_path(snapshot)
|
||||||
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
out_dir = out_dir or Path(snapshot.snapshot_dir)
|
||||||
if output_path and (out_dir / output_path).exists():
|
if not overwrite output_path and (out_dir / output_path).exists():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return SAVE_WGET
|
return SAVE_WGET
|
||||||
|
@ -68,7 +68,7 @@ def save_wget(snapshot: Model, out_dir: Optional[Path]=None, timeout: int=TIMEOU
|
||||||
*(['--warc-file={}'.format(str(warc_path))] if SAVE_WARC else []),
|
*(['--warc-file={}'.format(str(warc_path))] if SAVE_WARC else []),
|
||||||
*(['--page-requisites'] if SAVE_WGET_REQUISITES else []),
|
*(['--page-requisites'] if SAVE_WGET_REQUISITES else []),
|
||||||
*(['--user-agent={}'.format(WGET_USER_AGENT)] if WGET_USER_AGENT else []),
|
*(['--user-agent={}'.format(WGET_USER_AGENT)] if WGET_USER_AGENT else []),
|
||||||
*(['--load-cookies', COOKIES_FILE] if COOKIES_FILE else []),
|
*(['--load-cookies', str(COOKIES_FILE)] if COOKIES_FILE else []),
|
||||||
*(['--compression=auto'] if WGET_AUTO_COMPRESSION else []),
|
*(['--compression=auto'] if WGET_AUTO_COMPRESSION else []),
|
||||||
*([] if SAVE_WARC else ['--timestamping']),
|
*([] if SAVE_WARC else ['--timestamping']),
|
||||||
*([] if CHECK_SSL_VALIDITY else ['--no-check-certificate', '--no-hsts']),
|
*([] if CHECK_SSL_VALIDITY else ['--no-check-certificate', '--no-hsts']),
|
||||||
|
@ -177,11 +177,22 @@ def wget_output_path(snapshot: Model) -> Optional[str]:
|
||||||
if html_files:
|
if html_files:
|
||||||
return str(html_files[0].relative_to(snapshot.snapshot_dir))
|
return str(html_files[0].relative_to(snapshot.snapshot_dir))
|
||||||
|
|
||||||
|
# sometimes wget'd URLs have no ext and return non-html
|
||||||
|
# e.g. /some/example/rss/all -> some RSS XML content)
|
||||||
|
# /some/other/url.o4g -> some binary unrecognized ext)
|
||||||
|
# test this with archivebox add --depth=1 https://getpocket.com/users/nikisweeting/feed/all
|
||||||
|
last_part_of_url = urldecode(full_path.rsplit('/', 1)[-1])
|
||||||
|
for file_present in search_dir.iterdir():
|
||||||
|
if file_present == last_part_of_url:
|
||||||
|
return str(search_dir / file_present)
|
||||||
|
|
||||||
# Move up one directory level
|
# Move up one directory level
|
||||||
search_dir = search_dir.parent
|
search_dir = search_dir.parent
|
||||||
|
|
||||||
if search_dir == snapshot.snapshot_dir:
|
if search_dir == snapshot.snapshot_dir:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
search_dir = Path(snapshot.snapshot_dir) / domain(snapshot.url).replace(":", "+") / urldecode(full_path)
|
search_dir = Path(snapshot.snapshot_dir) / domain(snapshot.url).replace(":", "+") / urldecode(full_path)
|
||||||
if not search_dir.is_dir():
|
if not search_dir.is_dir():
|
||||||
|
|
|
@ -4,8 +4,8 @@ from datetime import datetime
|
||||||
from typing import List, Optional, Iterator, Mapping
|
from typing import List, Optional, Iterator, Mapping
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from django.utils.html import format_html
|
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
|
from django.utils.html import format_html, mark_safe
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from .schema import Link
|
from .schema import Link
|
||||||
|
@ -119,7 +119,7 @@ def snapshot_icons(snapshot) -> str:
|
||||||
path = snapshot.archive_path
|
path = snapshot.archive_path
|
||||||
canon = snapshot.canonical_outputs()
|
canon = snapshot.canonical_outputs()
|
||||||
output = ""
|
output = ""
|
||||||
output_template = '<a href="/{}/{}" class="exists-{}" title="{}">{} </a>'
|
output_template = '<a href="/{}/{}" class="exists-{}" title="{}">{}</a> '
|
||||||
icons = {
|
icons = {
|
||||||
"singlefile": "❶",
|
"singlefile": "❶",
|
||||||
"wget": "🆆",
|
"wget": "🆆",
|
||||||
|
@ -145,12 +145,12 @@ def snapshot_icons(snapshot) -> str:
|
||||||
for extractor, _ in EXTRACTORS:
|
for extractor, _ in EXTRACTORS:
|
||||||
if extractor not in exclude:
|
if extractor not in exclude:
|
||||||
exists = extractor_items[extractor] is not None
|
exists = extractor_items[extractor] is not None
|
||||||
output += output_template.format(path, canon[f"{extractor}_path"], str(exists),
|
output += format_html(output_template, path, canon[f"{extractor}_path"], str(exists),
|
||||||
extractor, icons.get(extractor, "?"))
|
extractor, icons.get(extractor, "?"))
|
||||||
if extractor == "wget":
|
if extractor == "wget":
|
||||||
# warc isn't technically it's own extractor, so we have to add it after wget
|
# warc isn't technically it's own extractor, so we have to add it after wget
|
||||||
exists = list((Path(path) / canon["warc_path"]).glob("*.warc.gz"))
|
exists = list((Path(path) / canon["warc_path"]).glob("*.warc.gz"))
|
||||||
output += output_template.format(exists[0] if exists else '#', canon["warc_path"], str(bool(exists)), "warc", icons.get("warc", "?"))
|
output += format_html(output_template, exists[0] if exists else '#', canon["warc_path"], str(bool(exists)), "warc", icons.get("warc", "?"))
|
||||||
|
|
||||||
if extractor == "archive_org":
|
if extractor == "archive_org":
|
||||||
# The check for archive_org is different, so it has to be handled separately
|
# The check for archive_org is different, so it has to be handled separately
|
||||||
|
@ -159,4 +159,4 @@ def snapshot_icons(snapshot) -> str:
|
||||||
output += '<a href="{}" class="exists-{}" title="{}">{}</a> '.format(canon["archive_org_path"], str(exists),
|
output += '<a href="{}" class="exists-{}" title="{}">{}</a> '.format(canon["archive_org_path"], str(exists),
|
||||||
"archive_org", icons.get("archive_org", "?"))
|
"archive_org", icons.get("archive_org", "?"))
|
||||||
|
|
||||||
return format_html(f'<span class="files-icons" style="font-size: 1.1em; opacity: 0.8">{output}<span>')
|
return format_html('<span class="files-icons" style="font-size: 1.1em; opacity: 0.8; min-width: 240px; display: inline-block">{}<span>', mark_safe(output))
|
||||||
|
|
|
@ -1070,6 +1070,7 @@ def server(runserver_args: Optional[List[str]]=None,
|
||||||
reload: bool=False,
|
reload: bool=False,
|
||||||
debug: bool=False,
|
debug: bool=False,
|
||||||
init: bool=False,
|
init: bool=False,
|
||||||
|
createsuperuser: bool=False,
|
||||||
out_dir: Path=OUTPUT_DIR) -> None:
|
out_dir: Path=OUTPUT_DIR) -> None:
|
||||||
"""Run the ArchiveBox HTTP server"""
|
"""Run the ArchiveBox HTTP server"""
|
||||||
|
|
||||||
|
@ -1078,6 +1079,9 @@ def server(runserver_args: Optional[List[str]]=None,
|
||||||
if init:
|
if init:
|
||||||
run_subcommand('init', stdin=None, pwd=out_dir)
|
run_subcommand('init', stdin=None, pwd=out_dir)
|
||||||
|
|
||||||
|
if createsuperuser:
|
||||||
|
run_subcommand('manage', subcommand_args=['createsuperuser'], pwd=out_dir)
|
||||||
|
|
||||||
# setup config for django runserver
|
# setup config for django runserver
|
||||||
from . import config
|
from . import config
|
||||||
config.SHOW_PROGRESS = False
|
config.SHOW_PROGRESS = False
|
||||||
|
|
|
@ -5,7 +5,7 @@ from sonic import IngestClient, SearchClient
|
||||||
from archivebox.util import enforce_types
|
from archivebox.util import enforce_types
|
||||||
from archivebox.config import SEARCH_BACKEND_HOST_NAME, SEARCH_BACKEND_PORT, SEARCH_BACKEND_PASSWORD, SONIC_BUCKET, SONIC_COLLECTION
|
from archivebox.config import SEARCH_BACKEND_HOST_NAME, SEARCH_BACKEND_PORT, SEARCH_BACKEND_PASSWORD, SONIC_BUCKET, SONIC_COLLECTION
|
||||||
|
|
||||||
MAX_SONIC_TEXT_LENGTH = 20000
|
MAX_SONIC_TEXT_LENGTH = 2000
|
||||||
|
|
||||||
@enforce_types
|
@enforce_types
|
||||||
def index(snapshot_id: str, texts: List[str]):
|
def index(snapshot_id: str, texts: List[str]):
|
||||||
|
|
|
@ -68,4 +68,6 @@
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block footer %}{% endblock %}
|
||||||
|
|
||||||
{% block sidebar %}{% endblock %}
|
{% block sidebar %}{% endblock %}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
{% load admin_urls %}
|
||||||
{% load static %}
|
{% load static %}
|
||||||
|
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
|
@ -7,222 +8,8 @@
|
||||||
<title>Archived Sites</title>
|
<title>Archived Sites</title>
|
||||||
<meta charset="utf-8" name="viewport" content="width=device-width, initial-scale=1">
|
<meta charset="utf-8" name="viewport" content="width=device-width, initial-scale=1">
|
||||||
|
|
||||||
<style>
|
<link rel="stylesheet" href="{% static 'admin/css/base.css' %}">
|
||||||
:root {
|
<link rel="stylesheet" type="text/css" href="{% static 'admin.css' %}">
|
||||||
--bg-main: #efefef;
|
|
||||||
--accent-1: #aa1e55;
|
|
||||||
--accent-2: #ffebeb;
|
|
||||||
--accent-3: #efefef;
|
|
||||||
|
|
||||||
--text-1: #1c1c1c;
|
|
||||||
--text-2: #eaeaea;
|
|
||||||
--text-main: #1a1a1a;
|
|
||||||
--font-main: "Gill Sans", Helvetica, sans-serif;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Dark Mode (WIP) */
|
|
||||||
/*
|
|
||||||
@media (prefers-color-scheme: dark) {
|
|
||||||
:root {
|
|
||||||
--accent-2: hsl(160, 100%, 96%);
|
|
||||||
|
|
||||||
--text-1: #eaeaea;
|
|
||||||
--text-2: #1a1a1a;
|
|
||||||
--bg-main: #101010;
|
|
||||||
}
|
|
||||||
|
|
||||||
#table-bookmarks_wrapper,
|
|
||||||
#table-bookmarks_wrapper img,
|
|
||||||
tbody td:nth-child(3),
|
|
||||||
tbody td:nth-child(3) span,
|
|
||||||
footer {
|
|
||||||
filter: invert(100%);
|
|
||||||
}
|
|
||||||
}*/
|
|
||||||
|
|
||||||
html,
|
|
||||||
body {
|
|
||||||
width: 100%;
|
|
||||||
height: 100%;
|
|
||||||
font-size: 18px;
|
|
||||||
font-weight: 200;
|
|
||||||
text-align: center;
|
|
||||||
margin: 0px;
|
|
||||||
padding: 0px;
|
|
||||||
font-family: var(--font-main);
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-top small {
|
|
||||||
font-weight: 200;
|
|
||||||
color: var(--accent-3);
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-top {
|
|
||||||
width: 100%;
|
|
||||||
height: auto;
|
|
||||||
min-height: 40px;
|
|
||||||
margin: 0px;
|
|
||||||
text-align: center;
|
|
||||||
color: white;
|
|
||||||
font-size: calc(11px + 0.84vw);
|
|
||||||
font-weight: 200;
|
|
||||||
padding: 4px 4px;
|
|
||||||
border-bottom: 3px solid var(--accent-1);
|
|
||||||
background-color: var(--accent-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
input[type=search] {
|
|
||||||
width: 22vw;
|
|
||||||
border-radius: 4px;
|
|
||||||
border: 1px solid #aeaeae;
|
|
||||||
padding: 3px 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.nav>div {
|
|
||||||
min-height: 30px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-top a {
|
|
||||||
text-decoration: none;
|
|
||||||
color: rgba(0, 0, 0, 0.6);
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-top a:hover {
|
|
||||||
text-decoration: none;
|
|
||||||
color: rgba(0, 0, 0, 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-top .col-lg-4 {
|
|
||||||
text-align: center;
|
|
||||||
padding-top: 4px;
|
|
||||||
padding-bottom: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-archivebox img {
|
|
||||||
display: inline-block;
|
|
||||||
margin-right: 3px;
|
|
||||||
height: 30px;
|
|
||||||
margin-left: 12px;
|
|
||||||
margin-top: -4px;
|
|
||||||
margin-bottom: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-archivebox img:hover {
|
|
||||||
opacity: 0.5;
|
|
||||||
}
|
|
||||||
|
|
||||||
#table-bookmarks_length,
|
|
||||||
#table-bookmarks_filter {
|
|
||||||
padding-top: 12px;
|
|
||||||
opacity: 0.8;
|
|
||||||
padding-left: 24px;
|
|
||||||
padding-right: 22px;
|
|
||||||
margin-bottom: -16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table {
|
|
||||||
padding: 6px;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
table thead th {
|
|
||||||
font-weight: 400;
|
|
||||||
}
|
|
||||||
|
|
||||||
table tr {
|
|
||||||
height: 35px;
|
|
||||||
}
|
|
||||||
|
|
||||||
tbody tr:nth-child(odd) {
|
|
||||||
background-color: var(--accent-2) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
table tr td {
|
|
||||||
white-space: nowrap;
|
|
||||||
overflow: hidden;
|
|
||||||
/*padding-bottom: 0.4em;*/
|
|
||||||
/*padding-top: 0.4em;*/
|
|
||||||
padding-left: 2px;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
table tr td a {
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
table tr td img,
|
|
||||||
table tr td object {
|
|
||||||
display: inline-block;
|
|
||||||
margin: auto;
|
|
||||||
height: 24px;
|
|
||||||
width: 24px;
|
|
||||||
padding: 0px;
|
|
||||||
padding-right: 5px;
|
|
||||||
vertical-align: middle;
|
|
||||||
margin-left: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#table-bookmarks {
|
|
||||||
width: 100%;
|
|
||||||
overflow-y: scroll;
|
|
||||||
table-layout: fixed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dataTables_wrapper {
|
|
||||||
background-color: #fafafa;
|
|
||||||
}
|
|
||||||
|
|
||||||
table tr a span[data-archived~=False] {
|
|
||||||
opacity: 0.4;
|
|
||||||
}
|
|
||||||
|
|
||||||
.files-spinner {
|
|
||||||
height: 15px;
|
|
||||||
width: auto;
|
|
||||||
opacity: 0.5;
|
|
||||||
vertical-align: -2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.in-progress {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
tr td a.favicon img {
|
|
||||||
padding-left: 6px;
|
|
||||||
padding-right: 12px;
|
|
||||||
vertical-align: -4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
tr td a.title {
|
|
||||||
font-size: 1.4em;
|
|
||||||
text-decoration: none;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
tr td a.title small {
|
|
||||||
background-color: var(--accent-3);
|
|
||||||
border-radius: 4px;
|
|
||||||
float: right
|
|
||||||
}
|
|
||||||
|
|
||||||
input[type=search]::-webkit-search-cancel-button {
|
|
||||||
-webkit-appearance: searchfield-cancel-button;
|
|
||||||
}
|
|
||||||
|
|
||||||
.title-col {
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
.title-col a {
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
.exists-False {
|
|
||||||
opacity: 0.1;
|
|
||||||
filter: grayscale(100%);
|
|
||||||
pointer-events: none;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<link rel="stylesheet" href="{% static 'bootstrap.min.css' %}">
|
<link rel="stylesheet" href="{% static 'bootstrap.min.css' %}">
|
||||||
<link rel="stylesheet" href="{% static 'jquery.dataTables.min.css' %}" />
|
<link rel="stylesheet" href="{% static 'jquery.dataTables.min.css' %}" />
|
||||||
{% block extra_head %}
|
{% block extra_head %}
|
||||||
|
@ -247,38 +34,51 @@
|
||||||
<base href="{% url 'Home' %}">
|
<base href="{% url 'Home' %}">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<header>
|
<div id="container">
|
||||||
<div class="header-top container-fluid">
|
<div id="header">
|
||||||
<div class="row nav">
|
<div id="branding">
|
||||||
<div class="col-sm-2">
|
<h1 id="site-name">
|
||||||
<a href="{% url 'public-index' %}" class="header-archivebox" title="Last updated: {{updated}}">
|
<a href="{% url 'public-index' %}" class="header-archivebox" title="Last updated: {{updated}}">
|
||||||
<img src="{% static 'archive.png' %}" alt="Logo" />
|
<img src="{% static 'archive.png' %}" alt="Logo" style="height: 30px"/>
|
||||||
ArchiveBox: Index
|
ArchiveBox
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</h1>
|
||||||
<div class="col-sm-10" style="text-align: right">
|
</div>
|
||||||
<a href="/add/">Add Links</a> |
|
<div id="user-tools">
|
||||||
<a href="/admin/core/snapshot/">Admin</a> |
|
<a href="/add/">➕ Add</a> /
|
||||||
<a href="https://github.com/ArchiveBox/ArchiveBox/wiki">Docs</a>
|
<a href="/">Snapshots</a> /
|
||||||
</div>
|
<a href="/admin/">Admin</a> /
|
||||||
|
<a href="https://github.com/ArchiveBox/ArchiveBox/wiki">Docs</a>
|
||||||
|
{% if user.is_authenticated %}
|
||||||
|
|
||||||
|
User
|
||||||
|
<strong>{% firstof user.get_short_name user.get_username %}</strong>
|
||||||
|
{% if user.has_usable_password %}
|
||||||
|
<a href="{% url 'admin:password_change' %}">Change password</a> /
|
||||||
|
{% endif %}
|
||||||
|
<a href="{% url 'admin:logout' %}">Log out</a>
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</header>
|
<div id="content" class="flex">
|
||||||
{% block body %}
|
{% block body %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
<br>
|
</div>
|
||||||
<footer>
|
{% block footer %}
|
||||||
<br />
|
<footer>
|
||||||
<center>
|
<br />
|
||||||
<small>
|
<center>
|
||||||
Archive created using <a href="https://github.com/ArchiveBox/ArchiveBox" title="Github">ArchiveBox</a> version
|
<small>
|
||||||
<a href="https://github.com/ArchiveBox/ArchiveBox/releases" title="Releases">v{{VERSION}}</a>.
|
Archive created using <a href="https://github.com/ArchiveBox/ArchiveBox" title="Github">ArchiveBox</a> version
|
||||||
<br/><br/>
|
<a href="https://github.com/ArchiveBox/ArchiveBox/releases" title="Releases">v{{VERSION}}</a>.
|
||||||
{{FOOTER_INFO}}
|
<br/><br/>
|
||||||
</small>
|
{{FOOTER_INFO}}
|
||||||
</center>
|
</small>
|
||||||
<br />
|
</center>
|
||||||
</footer>
|
<br />
|
||||||
|
</footer>
|
||||||
|
{% endblock %}
|
||||||
|
</div>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -2,13 +2,21 @@
|
||||||
{% load static %}
|
{% load static %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<br>
|
<div id="toolbar">
|
||||||
<form action="{% url 'public-index' %}" method="get">
|
<form id="changelist-search" action="{% url 'public-index' %}" method="get">
|
||||||
<input name="q" type="text" placeholder="Search...">
|
<div>
|
||||||
<button type="submit">Search</button>
|
<label for="searchbar"><img src="/static/admin/img/search.svg" alt="Search"></label>
|
||||||
<button onclick="location.href='{% url 'public-index' %}'" type="button">
|
<input type="text" size="40" name="q" value="" id="searchbar" autofocus placeholder="Title, URL, tags, timestamp, or content...".>
|
||||||
Reload Index</button>
|
<input type="submit" value="Search" style="height: 36px; padding-top: 6px; margin: 8px"/>
|
||||||
</form>
|
<input type="button"
|
||||||
|
value="♺"
|
||||||
|
title="Refresh..."
|
||||||
|
onclick="location.href='{% url 'public-index' %}'"
|
||||||
|
style="background-color: rgba(121, 174, 200, 0.8); height: 30px; font-size: 0.8em; margin-top: 12px; padding-top: 6px; float:right">
|
||||||
|
</input>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
<table id="table-bookmarks">
|
<table id="table-bookmarks">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
|
|
|
@ -243,7 +243,7 @@
|
||||||
<center>
|
<center>
|
||||||
<small>
|
<small>
|
||||||
Archive created using <a href="https://github.com/ArchiveBox/ArchiveBox" title="Github">ArchiveBox</a>
|
Archive created using <a href="https://github.com/ArchiveBox/ArchiveBox" title="Github">ArchiveBox</a>
|
||||||
version <a href="https://github.com/ArchiveBox/ArchiveBox/tree/v{{VERSION}}" title="Git commit">v{{VERSION}}</a> |
|
version <a href="https://github.com/ArchiveBox/ArchiveBox/tree/v{{version}}" title="Git commit">v{{version}}</a> |
|
||||||
Download index as <a href="index.json" title="JSON summary of archived links.">JSON</a>
|
Download index as <a href="index.json" title="JSON summary of archived links.">JSON</a>
|
||||||
<br/><br/>
|
<br/><br/>
|
||||||
{{FOOTER_INFO}}
|
{{FOOTER_INFO}}
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
{% load static %}
|
{% load static %}
|
||||||
|
|
||||||
<tr>
|
<tr>
|
||||||
|
{% comment %}
|
||||||
|
<!-- from upstream, may need to merge these two -->
|
||||||
<td title="{{snapshot.timestamp}}"> {% if snapshot.bookmarked_date %} {{ snapshot.bookmarked_date }} {% else %} {{ snapshot.added }} {% endif %} </td>
|
<td title="{{snapshot.timestamp}}"> {% if snapshot.bookmarked_date %} {{ snapshot.bookmarked_date }} {% else %} {{ snapshot.added }} {% endif %} </td>
|
||||||
<td class="title-col">
|
<td class="title-col">
|
||||||
{% if snapshot.is_archived %}
|
{% if snapshot.is_archived %}
|
||||||
<a href="archive/{{snapshot.timestamp}}/index.html"><img src="archive/{{snapshot.timestamp}}/favicon.ico" class="link-favicon" decoding="async"></a>
|
<a href="archive/{{snapshot.timestamp}}/index.html"><img src="archive/{{snapshot.timestamp}}/favicon.ico" class="snapshot-favicon" decoding="async"></a>
|
||||||
{% else %}
|
{% else %}
|
||||||
<a href="archive/{{snapshot.timestamp}}/index.html"><img src="{% static 'spinner.gif' %}" class="link-favicon" decoding="async"></a>
|
<a href="archive/{{snapshot.timestamp}}/index.html"><img src="{% static 'spinner.gif' %}" class="snapshot-favicon" decoding="async"></a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<a href="archive/{{snapshot.timestamp}}/{{snapshot.canonical_outputs.wget_path}}" title="{{snapshot.title}}">
|
<a href="archive/{{snapshot.timestamp}}/{{snapshot.canonical_outputs.wget_path}}" title="{{snapshot.title}}">
|
||||||
<span data-title-for="{{snapshot.url}}" data-archived="{{snapshot.is_archived}}">{{snapshot.title|default:'Loading...'}}</span>
|
<span data-title-for="{{snapshot.url}}" data-archived="{{snapshot.is_archived}}">{{snapshot.title|default:'Loading...'}}</span>
|
||||||
|
@ -19,4 +21,39 @@
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
<td style="text-align:left"><a href="{{snapshot.url}}">{{snapshot.url}}</a></td>
|
<td style="text-align:left"><a href="{{snapshot.url}}">{{snapshot.url}}</a></td>
|
||||||
|
{% endcomment %}
|
||||||
|
|
||||||
|
<td title="{{snapshot.timestamp}}"> {% if snapshot.bookmarked_date %} {{ snapshot.bookmarked_date }} {% else %} {{ snapshot.added }} {% endif %} </td>
|
||||||
|
<td class="title-col" style="opacity: {% if snapshot.title %}1{% else %}0.3{% endif %}">
|
||||||
|
{% if snapshot.is_archived %}
|
||||||
|
<a href="archive/{{snapshot.timestamp}}/index.html"><img src="archive/{{snapshot.timestamp}}/favicon.ico" class="snapshot-favicon" decoding="async"></a>
|
||||||
|
{% else %}
|
||||||
|
<a href="archive/{{snapshot.timestamp}}/index.html"><img src="{% static 'spinner.gif' %}" class="snapshot-favicon" decoding="async" style="height: 15px"></a>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<a href="archive/{{snapshot.timestamp}}/index.html" title="{{snapshot.title|default:'Not yet archived...'}}">
|
||||||
|
<span data-title-for="{{snapshot.url}}" data-archived="{{snapshot.is_archived}}">{{snapshot.title|default:'Loading...'}}</span>
|
||||||
|
{% if snapshot.tags_str %}
|
||||||
|
<span class="tags" style="float: right; border-radius: 5px; background-color: #bfdfff; padding: 2px 5px; margin-left: 4px; margin-top: 1px;">
|
||||||
|
{% if snapshot.tags_str != None %}
|
||||||
|
{{snapshot.tags_str|default:''}}
|
||||||
|
{% else %}
|
||||||
|
{{ snapshot.tags|default:'' }}
|
||||||
|
{% endif %}
|
||||||
|
</span>
|
||||||
|
{% endif %}
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<span data-number-for="{{snapshot.url}}" title="Fetching any missing files...">
|
||||||
|
{% if snapshot.icons %}
|
||||||
|
{{snapshot.icons}} <small style="float:right; opacity: 0.5">{{snapshot.num_outputs}}</small>
|
||||||
|
{% else %}
|
||||||
|
<a href="archive/{{snapshot.timestamp}}/index.html">📄
|
||||||
|
{{snapshot.num_outputs}} <img src="{% static 'spinner.gif' %}" class="files-spinner" decoding="async" style="height: 15px"/>
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td style="text-align:left"><a href="{{snapshot.url}}">{{snapshot.url}}</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
|
@ -1,3 +1,13 @@
|
||||||
|
header {
|
||||||
|
font-family: "Roboto","Lucida Grande","DejaVu Sans","Bitstream Vera Sans",Verdana,Arial,sans-serif;
|
||||||
|
font-size: 13px;
|
||||||
|
color: white;
|
||||||
|
height: 30px;
|
||||||
|
}
|
||||||
|
.header-top {
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
.dashboard #content {
|
.dashboard #content {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
margin-right: 0px;
|
margin-right: 0px;
|
||||||
|
@ -60,3 +70,21 @@ ul#id_depth {
|
||||||
box-sizing: border-box;
|
box-sizing: border-box;
|
||||||
animation: spin 2s linear infinite;
|
animation: spin 2s linear infinite;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
textarea, select {
|
||||||
|
border-radius: 4px;
|
||||||
|
border: 2px solid #004882;
|
||||||
|
box-shadow: 4px 4px 4px rgba(0,0,0,0.02);
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
select option:not(:checked) {
|
||||||
|
border: 1px dashed rgba(10,200,20,0.12);
|
||||||
|
}
|
||||||
|
select option:checked {
|
||||||
|
border: 1px solid green;
|
||||||
|
background-color: green;
|
||||||
|
color: green;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -224,7 +224,7 @@ body.model-snapshot.change-list #content .object-tools {
|
||||||
100% { transform: rotate(360deg); }
|
100% { transform: rotate(360deg); }
|
||||||
}
|
}
|
||||||
|
|
||||||
.tags > a > .tag {
|
.tag {
|
||||||
float: right;
|
float: right;
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
background-color: #bfdfff;
|
background-color: #bfdfff;
|
||||||
|
@ -232,3 +232,8 @@ body.model-snapshot.change-list #content .object-tools {
|
||||||
margin-left: 4px;
|
margin-left: 4px;
|
||||||
margin-top: 1px;
|
margin-top: 1px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.exists-False {
|
||||||
|
opacity: 0.1;
|
||||||
|
filter: grayscale(100%);
|
||||||
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@ IFS=$'\n'
|
||||||
|
|
||||||
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
||||||
VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
||||||
|
SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')"
|
||||||
cd "$REPO_DIR"
|
cd "$REPO_DIR"
|
||||||
|
|
||||||
which docker > /dev/null
|
which docker > /dev/null
|
||||||
|
@ -20,9 +21,13 @@ echo "[+] Building docker image in the background..."
|
||||||
docker build . -t archivebox \
|
docker build . -t archivebox \
|
||||||
-t archivebox:latest \
|
-t archivebox:latest \
|
||||||
-t archivebox:$VERSION \
|
-t archivebox:$VERSION \
|
||||||
|
-t archivebox:$SHORT_VERSION \
|
||||||
-t docker.io/nikisweeting/archivebox:latest \
|
-t docker.io/nikisweeting/archivebox:latest \
|
||||||
-t docker.io/nikisweeting/archivebox:$VERSION \
|
-t docker.io/nikisweeting/archivebox:$VERSION \
|
||||||
|
-t docker.io/nikisweeting/archivebox:$SHORT_VERSION \
|
||||||
-t docker.io/archivebox/archivebox:latest \
|
-t docker.io/archivebox/archivebox:latest \
|
||||||
-t docker.io/archivebox/archivebox:$VERSION \
|
-t docker.io/archivebox/archivebox:$VERSION \
|
||||||
|
-t docker.io/archivebox/archivebox:$SHORT_VERSION \
|
||||||
-t docker.pkg.github.com/pirate/archivebox/archivebox:latest \
|
-t docker.pkg.github.com/pirate/archivebox/archivebox:latest \
|
||||||
-t docker.pkg.github.com/pirate/archivebox/archivebox:$VERSION
|
-t docker.pkg.github.com/pirate/archivebox/archivebox:$VERSION \
|
||||||
|
-t docker.pkg.github.com/pirate/archivebox/archivebox:$SHORT_VERSION
|
||||||
|
|
|
@ -20,7 +20,6 @@ fi
|
||||||
cd "$REPO_DIR"
|
cd "$REPO_DIR"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
echo "[*] Fetching latest docs version"
|
echo "[*] Fetching latest docs version"
|
||||||
cd "$REPO_DIR/docs"
|
cd "$REPO_DIR/docs"
|
||||||
git pull
|
git pull
|
||||||
|
|
38
bin/build_git.sh
Normal file
38
bin/build_git.sh
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
### Bash Environment Setup
|
||||||
|
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
|
||||||
|
# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
|
||||||
|
# set -o xtrace
|
||||||
|
set -o errexit
|
||||||
|
set -o errtrace
|
||||||
|
set -o nounset
|
||||||
|
set -o pipefail
|
||||||
|
IFS=$'\n'
|
||||||
|
|
||||||
|
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
||||||
|
|
||||||
|
cd "$REPO_DIR"
|
||||||
|
source "./.venv/bin/activate"
|
||||||
|
|
||||||
|
|
||||||
|
# Make sure git is clean
|
||||||
|
if [ -z "$(git status --porcelain)" ] && [[ "$(git branch --show-current)" == "master" ]]; then
|
||||||
|
git pull
|
||||||
|
else
|
||||||
|
echo "[!] Warning: git status is dirty!"
|
||||||
|
echo " Press Ctrl-C to cancel, or wait 10sec to continue..."
|
||||||
|
sleep 10
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Bump version number in source
|
||||||
|
function bump_semver {
|
||||||
|
echo "$1" | awk -F. '{$NF = $NF + 1;} 1' | sed 's/ /./g'
|
||||||
|
}
|
||||||
|
|
||||||
|
OLD_VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
||||||
|
NEW_VERSION="$(bump_semver "$OLD_VERSION")"
|
||||||
|
echo "[*] Bumping VERSION from $OLD_VERSION to $NEW_VERSION"
|
||||||
|
contents="$(jq ".version = \"$NEW_VERSION\"" "$REPO_DIR/package.json")" && \
|
||||||
|
echo "${contents}" > package.json
|
||||||
|
|
|
@ -1,24 +1,33 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# Autodetect UID,GID of host user based on ownership of files in the data volume
|
|
||||||
DATA_DIR="${DATA_DIR:-/data}"
|
DATA_DIR="${DATA_DIR:-/data}"
|
||||||
ARCHIVEBOX_USER="${ARCHIVEBOX_USER:-archivebox}"
|
ARCHIVEBOX_USER="${ARCHIVEBOX_USER:-archivebox}"
|
||||||
|
|
||||||
USID=$(stat --format="%u" "$DATA_DIR")
|
# Set the archivebox user UID & GID
|
||||||
GRID=$(stat --format="%g" "$DATA_DIR")
|
if [[ -n "$PUID" && "$PUID" != 0 ]]; then
|
||||||
|
usermod -u "$PUID" "$ARCHIVEBOX_USER" > /dev/null 2>&1
|
||||||
# If user is not root, modify the archivebox user+files to have the same uid,gid
|
fi
|
||||||
if [[ "$USID" != 0 && "$GRID" != 0 ]]; then
|
if [[ -n "$PGID" && "$PGID" != 0 ]]; then
|
||||||
usermod -u "$USID" "$ARCHIVEBOX_USER" > /dev/null 2>&1
|
groupmod -g "$PGID" "$ARCHIVEBOX_USER" > /dev/null 2>&1
|
||||||
groupmod -g "$GRID" "$ARCHIVEBOX_USER" > /dev/null 2>&1
|
|
||||||
chown -R "$USID":"$GRID" "/home/$ARCHIVEBOX_USER"
|
|
||||||
chown "$USID":"$GRID" "$DATA_DIR"
|
|
||||||
chown "$USID":"$GRID" "$DATA_DIR/*" > /dev/null 2>&1 || true
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Run commands as the new archivebox user in Docker.
|
# Set the permissions of the data dir to match the archivebox user
|
||||||
# Any files touched will have the same uid & gid
|
if [[ -d "$DATA_DIR/archive" ]]; then
|
||||||
# inside Docker and outside on the host machine.
|
# check data directory permissions
|
||||||
|
if [[ ! "$(stat -c %u $DATA_DIR/archive)" = "$(id -u archivebox)" ]]; then
|
||||||
|
echo "Change in ownership detected, please be patient while we chown existing files"
|
||||||
|
echo "This could take some time..."
|
||||||
|
chown $ARCHIVEBOX_USER:$ARCHIVEBOX_USER -R "$DATA_DIR"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# create data directory
|
||||||
|
mkdir -p "$DATA_DIR"
|
||||||
|
chown -R $ARCHIVEBOX_USER:$ARCHIVEBOX_USER "$DATA_DIR"
|
||||||
|
fi
|
||||||
|
chown $ARCHIVEBOX_USER:$ARCHIVEBOX_USER "$DATA_DIR"
|
||||||
|
|
||||||
|
|
||||||
|
# Drop permissions to run commands as the archivebox user
|
||||||
if [[ "$1" == /* || "$1" == "echo" || "$1" == "archivebox" ]]; then
|
if [[ "$1" == /* || "$1" == "echo" || "$1" == "archivebox" ]]; then
|
||||||
# arg 1 is a binary, execute it verbatim
|
# arg 1 is a binary, execute it verbatim
|
||||||
# e.g. "archivebox init"
|
# e.g. "archivebox init"
|
||||||
|
|
|
@ -11,69 +11,28 @@ set -o pipefail
|
||||||
IFS=$'\n'
|
IFS=$'\n'
|
||||||
|
|
||||||
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
||||||
|
|
||||||
cd "$REPO_DIR"
|
cd "$REPO_DIR"
|
||||||
source "./.venv/bin/activate"
|
|
||||||
|
|
||||||
|
|
||||||
# Make sure git is clean
|
# Run the linters and tests
|
||||||
if [ -z "$(git status --porcelain)" ] && [[ "$(git branch --show-current)" == "master" ]]; then
|
# ./bin/lint.sh
|
||||||
git pull
|
# ./bin/test.sh
|
||||||
else
|
|
||||||
echo "[!] Warning: git status is dirty!"
|
|
||||||
echo " Press Ctrl-C to cancel, or wait 10sec to continue..."
|
|
||||||
sleep 10
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
# Run all the build scripts
|
||||||
# Bump version number in source
|
./bin/build_git.sh
|
||||||
function bump_semver {
|
|
||||||
echo "$1" | awk -F. '{$NF = $NF + 1;} 1' | sed 's/ /./g'
|
|
||||||
}
|
|
||||||
|
|
||||||
OLD_VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
|
||||||
NEW_VERSION="$(bump_semver "$OLD_VERSION")"
|
|
||||||
echo "[*] Bumping VERSION from $OLD_VERSION to $NEW_VERSION"
|
|
||||||
contents="$(jq ".version = \"$NEW_VERSION\"" "$REPO_DIR/package.json")" && \
|
|
||||||
echo "${contents}" > package.json
|
|
||||||
|
|
||||||
|
|
||||||
# Build docs, python package, and docker image
|
|
||||||
./bin/build_docs.sh
|
./bin/build_docs.sh
|
||||||
./bin/build_pip.sh
|
./bin/build_pip.sh
|
||||||
./bin/build_deb.sh
|
./bin/build_deb.sh
|
||||||
|
./bin/build_brew.sh
|
||||||
./bin/build_docker.sh
|
./bin/build_docker.sh
|
||||||
|
|
||||||
|
# Push relase to public repositories
|
||||||
|
./bin/release_git.sh
|
||||||
|
./bin/release_docs.sh
|
||||||
|
./bin/release_pip.sh
|
||||||
|
./bin/release_deb.sh
|
||||||
|
./bin/release_brew.sh
|
||||||
|
./bin/release_docker.sh
|
||||||
|
|
||||||
# Push build to github
|
VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
||||||
echo "[^] Pushing source to github"
|
echo "[√] Done. Published version v$VERSION"
|
||||||
git add "$REPO_DIR/docs"
|
|
||||||
git add "$REPO_DIR/deb_dist"
|
|
||||||
git add "$REPO_DIR/pip_dist"
|
|
||||||
git add "$REPO_DIR/brew_dist"
|
|
||||||
git add "$REPO_DIR/package.json"
|
|
||||||
git add "$REPO_DIR/package-lock.json"
|
|
||||||
git commit -m "$NEW_VERSION release"
|
|
||||||
git tag -a "v$NEW_VERSION" -m "v$NEW_VERSION"
|
|
||||||
git push origin master
|
|
||||||
git push origin --tags
|
|
||||||
|
|
||||||
|
|
||||||
# Push releases to github
|
|
||||||
echo "[^] Uploading to test.pypi.org"
|
|
||||||
python3 -m twine upload --repository testpypi pip_dist/*.{whl,tar.gz}
|
|
||||||
|
|
||||||
echo "[^] Uploading to pypi.org"
|
|
||||||
python3 -m twine upload --repository pypi pip_dist/*.{whl,tar.gz}
|
|
||||||
|
|
||||||
echo "[^] Uploading to launchpad.net"
|
|
||||||
dput archivebox "deb_dist/archivebox_${NEW_VERSION}-1_source.changes"
|
|
||||||
|
|
||||||
echo "[^] Uploading docker image"
|
|
||||||
# docker login --username=nikisweeting
|
|
||||||
# docker login docker.pkg.github.com --username=pirate
|
|
||||||
docker push docker.io/nikisweeting/archivebox
|
|
||||||
docker push docker.io/archivebox/archivebox
|
|
||||||
docker push docker.pkg.github.com/archivebox/archivebox/archivebox
|
|
||||||
|
|
||||||
echo "[√] Done. Published version v$NEW_VERSION"
|
|
||||||
|
|
19
bin/release_brew.sh
Normal file
19
bin/release_brew.sh
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
### Bash Environment Setup
|
||||||
|
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
|
||||||
|
# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
|
||||||
|
# set -o xtrace
|
||||||
|
set -o errexit
|
||||||
|
set -o errtrace
|
||||||
|
set -o nounset
|
||||||
|
set -o pipefail
|
||||||
|
IFS=$'\n'
|
||||||
|
|
||||||
|
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
||||||
|
VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
||||||
|
SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')"
|
||||||
|
cd "$REPO_DIR"
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
exit 0
|
20
bin/release_deb.sh
Normal file
20
bin/release_deb.sh
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
### Bash Environment Setup
|
||||||
|
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
|
||||||
|
# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
|
||||||
|
# set -o xtrace
|
||||||
|
set -o errexit
|
||||||
|
set -o errtrace
|
||||||
|
set -o nounset
|
||||||
|
set -o pipefail
|
||||||
|
IFS=$'\n'
|
||||||
|
|
||||||
|
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
||||||
|
VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
||||||
|
SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')"
|
||||||
|
cd "$REPO_DIR"
|
||||||
|
|
||||||
|
|
||||||
|
echo "[^] Uploading to launchpad.net"
|
||||||
|
dput archivebox "deb_dist/archivebox_${VERSION}-1_source.changes"
|
24
bin/release_docker.sh
Normal file
24
bin/release_docker.sh
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
### Bash Environment Setup
|
||||||
|
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
|
||||||
|
# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
|
||||||
|
# set -o xtrace
|
||||||
|
set -o errexit
|
||||||
|
set -o errtrace
|
||||||
|
set -o nounset
|
||||||
|
set -o pipefail
|
||||||
|
IFS=$'\n'
|
||||||
|
|
||||||
|
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
||||||
|
VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
||||||
|
SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')"
|
||||||
|
cd "$REPO_DIR"
|
||||||
|
|
||||||
|
|
||||||
|
echo "[^] Uploading docker image"
|
||||||
|
# docker login --username=nikisweeting
|
||||||
|
# docker login docker.pkg.github.com --username=pirate
|
||||||
|
docker push docker.io/nikisweeting/archivebox
|
||||||
|
docker push docker.io/archivebox/archivebox
|
||||||
|
docker push docker.pkg.github.com/archivebox/archivebox/archivebox
|
25
bin/release_docs.sh
Normal file
25
bin/release_docs.sh
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
### Bash Environment Setup
|
||||||
|
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
|
||||||
|
# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
|
||||||
|
# set -o xtrace
|
||||||
|
set -o errexit
|
||||||
|
set -o errtrace
|
||||||
|
set -o nounset
|
||||||
|
set -o pipefail
|
||||||
|
IFS=$'\n'
|
||||||
|
|
||||||
|
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
||||||
|
VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
||||||
|
SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')"
|
||||||
|
cd "$REPO_DIR"
|
||||||
|
|
||||||
|
|
||||||
|
echo "[^] Pushing docs to github"
|
||||||
|
cd docs/
|
||||||
|
git commit -am "$NEW_VERSION release"
|
||||||
|
git push
|
||||||
|
git tag -a "v$NEW_VERSION" -m "v$NEW_VERSION"
|
||||||
|
git push origin master
|
||||||
|
git push origin --tags
|
25
bin/release_git.sh
Normal file
25
bin/release_git.sh
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
### Bash Environment Setup
|
||||||
|
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
|
||||||
|
# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
|
||||||
|
# set -o xtrace
|
||||||
|
set -o errexit
|
||||||
|
set -o errtrace
|
||||||
|
set -o nounset
|
||||||
|
set -o pipefail
|
||||||
|
IFS=$'\n'
|
||||||
|
|
||||||
|
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
||||||
|
VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
||||||
|
cd "$REPO_DIR"
|
||||||
|
|
||||||
|
|
||||||
|
# Push build to github
|
||||||
|
echo "[^] Pushing release commit + tag to Github"
|
||||||
|
git commit -am "$VERSION release"
|
||||||
|
git tag -a "v$VERSION" -m "v$VERSION"
|
||||||
|
git push origin master
|
||||||
|
git push origin --tags
|
||||||
|
echo " To finish publishing the release go here:"
|
||||||
|
echo " https://github.com/ArchiveBox/ArchiveBox/releases/new"
|
26
bin/release_pip.sh
Normal file
26
bin/release_pip.sh
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
### Bash Environment Setup
|
||||||
|
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
|
||||||
|
# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
|
||||||
|
# set -o xtrace
|
||||||
|
set -o errexit
|
||||||
|
set -o errtrace
|
||||||
|
set -o nounset
|
||||||
|
set -o pipefail
|
||||||
|
IFS=$'\n'
|
||||||
|
|
||||||
|
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
|
||||||
|
VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
|
||||||
|
cd "$REPO_DIR"
|
||||||
|
|
||||||
|
|
||||||
|
# apt install python3 python3-all python3-dev
|
||||||
|
# pip install '.[dev]'
|
||||||
|
|
||||||
|
|
||||||
|
echo "[^] Uploading to test.pypi.org"
|
||||||
|
python3 -m twine upload --repository testpypi pip_dist/archivebox-${VERSION}*.{whl,tar.gz}
|
||||||
|
|
||||||
|
echo "[^] Uploading to pypi.org"
|
||||||
|
python3 -m twine upload --repository pypi pip_dist/archivebox-${VERSION}*.{whl,tar.gz}
|
28
bin/setup.sh
28
bin/setup.sh
|
@ -1,6 +1,5 @@
|
||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
# ArchiveBox Setup Script
|
# ArchiveBox Setup Script
|
||||||
# Nick Sweeting 2017 | MIT License
|
|
||||||
# https://github.com/ArchiveBox/ArchiveBox
|
# https://github.com/ArchiveBox/ArchiveBox
|
||||||
|
|
||||||
echo "[i] ArchiveBox Setup Script 📦"
|
echo "[i] ArchiveBox Setup Script 📦"
|
||||||
|
@ -8,27 +7,28 @@ echo ""
|
||||||
echo " This is a helper script which installs the ArchiveBox dependencies on your system using homebrew/aptitude."
|
echo " This is a helper script which installs the ArchiveBox dependencies on your system using homebrew/aptitude."
|
||||||
echo " You may be prompted for a password in order to install the following:"
|
echo " You may be prompted for a password in order to install the following:"
|
||||||
echo ""
|
echo ""
|
||||||
echo " - git"
|
|
||||||
echo " - python3, python3-pip, python3-distutils"
|
echo " - python3, python3-pip, python3-distutils"
|
||||||
echo " - curl"
|
echo " - curl"
|
||||||
echo " - wget"
|
echo " - wget"
|
||||||
|
echo " - git"
|
||||||
echo " - youtube-dl"
|
echo " - youtube-dl"
|
||||||
echo " - chromium-browser (skip this if Chrome/Chromium is already installed)"
|
echo " - chromium-browser (skip this if Chrome/Chromium is already installed)"
|
||||||
|
echo " - nodejs (used for singlefile, readability, mercury, and more)"
|
||||||
echo ""
|
echo ""
|
||||||
echo " If you'd rather install these manually, you can find documentation here:"
|
echo " If you'd rather install these manually, you can find documentation here:"
|
||||||
echo " https://github.com/ArchiveBox/ArchiveBox/wiki/Install"
|
echo " https://github.com/ArchiveBox/ArchiveBox/wiki/Install"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Press enter to continue with the automatic install, or Ctrl+C to cancel..."
|
read -p "Press [enter] to continue with the automatic install, or Ctrl+C to cancel..." REPLY
|
||||||
read
|
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
# On Linux:
|
# On Linux:
|
||||||
if which apt-get > /dev/null; then
|
if which apt-get > /dev/null; then
|
||||||
echo "[+] Updating apt repos..."
|
echo "[+] Adding ArchiveBox apt repo to sources..."
|
||||||
apt update -q
|
sudo apt install software-properties-common
|
||||||
|
sudo add-apt-repository -u ppa:archivebox/archivebox
|
||||||
echo "[+] Installing python3, wget, curl..."
|
echo "[+] Installing python3, wget, curl..."
|
||||||
apt install git python3 python3-pip python3-distutils wget curl youtube-dl
|
sudo apt install -y git python3 python3-pip python3-distutils wget curl youtube-dl nodejs npm ripgrep
|
||||||
|
# sudo apt install archivebox
|
||||||
|
|
||||||
if which google-chrome; then
|
if which google-chrome; then
|
||||||
echo "[i] You already have google-chrome installed, if you would like to download chromium instead (they work pretty much the same), follow the Manual Setup instructions"
|
echo "[i] You already have google-chrome installed, if you would like to download chromium instead (they work pretty much the same), follow the Manual Setup instructions"
|
||||||
|
@ -41,13 +41,13 @@ if which apt-get > /dev/null; then
|
||||||
chromium --version
|
chromium --version
|
||||||
else
|
else
|
||||||
echo "[+] Installing chromium..."
|
echo "[+] Installing chromium..."
|
||||||
apt install chromium
|
sudo apt install chromium || sudo apt install chromium-browser
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# On Mac:
|
# On Mac:
|
||||||
elif which brew > /dev/null; then # 🐍 eye of newt
|
elif which brew > /dev/null; then # 🐍 eye of newt
|
||||||
echo "[+] Installing python3, wget, curl (ignore 'already installed' warnings)..."
|
echo "[+] Installing python3, wget, curl (ignore 'already installed' warnings)..."
|
||||||
brew install git wget curl youtube-dl
|
brew install git wget curl youtube-dl ripgrep node
|
||||||
if which python3; then
|
if which python3; then
|
||||||
if python3 -c 'import sys; raise SystemExit(sys.version_info < (3,5,0))'; then
|
if python3 -c 'import sys; raise SystemExit(sys.version_info < (3,5,0))'; then
|
||||||
echo "[√] Using existing $(which python3)..."
|
echo "[√] Using existing $(which python3)..."
|
||||||
|
@ -83,7 +83,11 @@ else
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -m pip install --upgrade archivebox
|
npm i -g npm
|
||||||
|
pip3 install --upgrade pip setuptools
|
||||||
|
|
||||||
|
pip3 install --upgrade archivebox
|
||||||
|
npm install -g 'git+https://github.com/ArchiveBox/ArchiveBox.git'
|
||||||
|
|
||||||
# Check:
|
# Check:
|
||||||
echo ""
|
echo ""
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue