diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 0d902dca..f78490a6 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -9,15 +9,15 @@ **Useful links:** -- https://github.com/pirate/ArchiveBox/issues -- https://github.com/pirate/ArchiveBox/pulls -- https://github.com/pirate/ArchiveBox/wiki/Roadmap -- https://github.com/pirate/ArchiveBox/wiki/Install#manual-setup +- https://github.com/ArchiveBox/ArchiveBox/issues +- https://github.com/ArchiveBox/ArchiveBox/pulls +- https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap +- https://github.com/ArchiveBox/ArchiveBox/wiki/Install#manual-setup ### Development Setup ```bash -git clone https://github.com/pirate/ArchiveBox +git clone https://github.com/ArchiveBox/ArchiveBox cd ArchiveBox # Ideally do this in a virtualenv pip install -e '.[dev]' # or use: pipenv install --dev @@ -31,6 +31,8 @@ pip install -e '.[dev]' # or use: pipenv install --dev ./bin/build.sh ``` +For more common tasks see the `Development` section at the bottom of the README. + ### Getting Help Open issues on Github or message me https://sweeting.me/#contact. diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index c2bf8b23..220707b9 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -10,7 +10,8 @@ assignees: '' #### Describe the bug @@ -35,9 +36,11 @@ If applicable, post any relevant screenshots or copy/pasted terminal output from If you're reporting a parsing / importing error, **you must paste a copy of your redacted import file here**. --> -#### Software versions +#### ArchiveBox version - - OS: ([e.g. macOS 10.14] the operating system you're running ArchiveBox on) - - ArchiveBox version: (`git rev-parse HEAD | head -c7` [e.g. d798117] commit ID of the version you're running) - - Python version: (`python3 --version` [e.g. 3.7.0]) - - Chrome version: (`chromium-browser --version` [e.g. 73.1.2.3] if relevant to bug) + +```logs +replace this line with the *full*, unshortened output of running `archivebox version` +``` + diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 3361571d..5378139f 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -45,6 +45,6 @@ workarounds, or other software you've considered using to fix the problem. --- - - [ ] I'm willing to contribute dev time / money to fix this issue + - [ ] I'm willing to contribute [dev time](https://github.com/ArchiveBox/ArchiveBox#archivebox-development) / [money](https://github.com/sponsors/pirate) to fix this issue - [ ] I like ArchiveBox so far / would recommend it to a friend - [ ] I've had a lot of difficulty getting ArchiveBox set up diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 00000000..66e331b2 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,32 @@ +name: "CodeQL" + +on: + push: + branches: [ dev ] + pull_request: + branches: [ dev ] + schedule: + - cron: '43 1 * * 2' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + queries: security-extended + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/debian.yml b/.github/workflows/debian.yml index 82a635d0..6492f020 100644 --- a/.github/workflows/debian.yml +++ b/.github/workflows/debian.yml @@ -19,7 +19,8 @@ jobs: - name: Install packaging dependencies run: | - sudo apt install -y \ + sudo apt-get update -qq + sudo apt-get install -y \ python3 python3-dev python3-pip python3-venv python3-all \ dh-python debhelper devscripts dput software-properties-common \ python3-distutils python3-setuptools python3-wheel python3-stdeb @@ -36,7 +37,7 @@ jobs: - name: Install archivebox from deb run: | cd deb_dist/ - sudo apt install ./archivebox*.deb + sudo apt-get install ./archivebox*.deb - name: Check ArchiveBox version run: | diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 1d8c14e7..277061d1 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -1,9 +1,7 @@ name: Build Docker image on: - push: - branches: - - master + on: workflow_dispatch release: types: - created @@ -16,12 +14,6 @@ jobs: buildx: runs-on: ubuntu-latest steps: - - name: Docker Login - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - name: Checkout uses: actions/checkout@v2 with: @@ -51,6 +43,23 @@ jobs: key: ${{ runner.os }}-buildx-${{ github.sha }} restore-keys: | ${{ runner.os }}-buildx- + + - name: Docker Login + uses: docker/login-action@v1 + if: github.event_name != 'pull_request' + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Collect Docker tags + id: docker_meta + uses: crazy-max/ghaction-docker-meta@v1 + with: + images: archivebox/archivebox,nikisweeting/archivebox + tag-sha: true + tag-semver: | + {{version}} + {{major}}.{{minor}} - name: Build and push id: docker_build @@ -59,15 +68,11 @@ jobs: context: ./ file: ./Dockerfile builder: ${{ steps.buildx.outputs.name }} - push: true - tags: | - ${{ secrets.DOCKER_USERNAME }}/archivebox:latest - ${{ secrets.DOCKER_USERNAME }}/archivebox:${{ github.sha }} - archivebox/archivebox:latest - archivebox/archivebox:${{ github.sha }} + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.docker_meta.outputs.tags }} cache-from: type=local,src=/tmp/.buildx-cache cache-to: type=local,dest=/tmp/.buildx-cache - platforms: linux/amd64,linux/arm64,linux/arm/v7 - + platforms: linux/amd64,linux/386,linux/arm64,linux/arm/v7 + - name: Image digest run: echo ${{ steps.docker_build.outputs.digest }} diff --git a/.github/workflows/pip.yml b/.github/workflows/pip.yml index 36153189..8d8e3f91 100644 --- a/.github/workflows/pip.yml +++ b/.github/workflows/pip.yml @@ -1,4 +1,4 @@ -name: Build pip package +name: Build Pip package on: workflow_dispatch: diff --git a/README.md b/README.md index 65e87230..19196b4f 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@
-

ArchiveBox
The open-source self-hosted web archive.

+

ArchiveBox
Open-source self-hosted web archiving.

▶️ Quickstart | Demo | @@ -17,96 +17,145 @@ - - +
+ +Language grade: Python +Language grade: JavaScript +Total alerts +
-ArchiveBox is a powerful self-hosted internet archiving solution written in Python 3. You feed it URLs of pages you want to archive, and it saves them to disk in a variety of formats depending on the configuration and the content it detects. +ArchiveBox is a powerful self-hosted internet archiving solution written in Python. You feed it URLs of pages you want to archive, and it saves them to disk in a variety of formats depending on setup and content within. -Your archive can be managed through the command line with commands like `archivebox add`, through the built-in Web UI `archivebox server`, or via the Python library API (beta). It can ingest bookmarks from a browser or service like Pocket/Pinboard, your entire browsing history, RSS feeds, or URLs one at a time. You can also schedule regular/realtime imports with `archivebox schedule`. +**🔢  Run ArchiveBox via [Docker Compose (recommended)](#Quickstart), Docker, Apt, Brew, or Pip ([see below](#Quickstart)).** + +```bash +apt/brew/pip3 install archivebox + +archivebox init # run this in an empty folder +archivebox add 'https://example.com' # start adding URLs to archive +curl https://example.com/rss.xml | archivebox add # or add via stdin +archivebox schedule --every=day https://example.com/rss.xml +``` + +For each URL added, ArchiveBox saves several types of HTML snapshot (wget, Chrome headless, singlefile), a PDF, a screenshot, a WARC archive, any git repositories, images, audio, video, subtitles, article text, [and more...](#output-formats). + +```bash +archivebox server --createsuperuser 0.0.0.0:8000 # use the interactive web UI +archivebox list 'https://example.com' # use the CLI commands (--help for more) +ls ./archive/*/index.json # or browse directly via the filesystem +``` + +You can then manage your snapshots via the [filesystem](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#disk-layout), [CLI](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage), [Web UI](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#UI-Usage), [SQLite DB](https://github.com/ArchiveBox/ArchiveBox/blob/dev/archivebox/core/models.py) (`./index.sqlite3`), [Python API](https://docs.archivebox.io/en/latest/modules.html) (alpha), [REST API](https://github.com/ArchiveBox/ArchiveBox/issues/496) (alpha), or [desktop app](https://github.com/ArchiveBox/electron-archivebox) (alpha). + +At the end of the day, the goal is to sleep soundly knowing that the part of the internet you care about will be automatically preserved in multiple, durable long-term formats that will be accessible for decades (or longer). + +
+

+bookshelf graphic   logo   bookshelf graphic +

+
+ +#### ⚡️  CLI Usage + +```bash +# archivebox [subcommand] [--args] +archivebox --version +archivebox help +``` + +- `archivebox init/version/status/config/manage` to administer your collection +- `archivebox add/remove/update/list` to manage Snapshots in the archive +- `archivebox schedule` to pull in fresh URLs in regularly from [boorkmarks/history/Pocket/Pinboard/RSS/etc.](#input-formats) +- `archivebox oneshot` archive single URLs without starting a whole collection +- `archivebox shell` open a REPL to use the [Python API](https://docs.archivebox.io/en/latest/modules.html) (alpha) + +
+
+Demo | Screenshots | Usage +
+. . . . . . . . . . . . . . . . . . . . . . . . . . . . +

+cli init screenshot +server snapshot admin screenshot +server snapshot details page screenshot +
+
+grassgrass +
-The main index is a self-contained `index.sqlite3` file, and each snapshot is stored as a folder `data/archive//`, with an easy-to-read `index.html` and `index.json` within. For each page, ArchiveBox auto-extracts many types of assets/media and saves them in standard formats, with out-of-the-box support for: several types of HTML snapshots (wget, Chrome headless, singlefile), PDF snapshotting, screenshotting, WARC archiving, git repositories, images, audio, video, subtitles, article text, and more. The snapshots are browseable and managable offline through the filesystem, the built-in webserver, or the Python library API. ### Quickstart -It works on Linux/BSD (Intel and ARM CPUs with `docker`/`apt`/`pip3`), macOS (with `docker`/`brew`/`pip3`), and Windows (beta with `docker`/`pip3`). +**🖥  Supported OSs:** Linux/BSD, macOS, Windows     **🎮  CPU Architectures:** x86, amd64, arm7, arm8 (raspi >=3) +**📦  Distributions:** `docker`/`apt`/`brew`/`pip3`/`npm` (in order of completeness) -```bash -pip3 install archivebox -archivebox --version -# install extras as-needed, or use one of full setup methods below to get everything out-of-the-box - -mkdir ~/archivebox && cd ~/archivebox # this can be anywhere -archivebox init - -archivebox add 'https://example.com' -archivebox add --depth=1 'https://example.com' -archivebox schedule --every=day https://getpocket.com/users/USERNAME/feed/all -archivebox oneshot --extract=title,favicon,media https://www.youtube.com/watch?v=dQw4w9WgXcQ -archivebox help # to see more options -``` - -*(click to expand the sections below for full setup instructions)* +*(click to expand your preferred **► `distribution`** below for full setup instructions)*
Get ArchiveBox with docker-compose on any platform (recommended, everything included out-of-the-box) -First make sure you have Docker installed: https://docs.docker.com/get-docker/ -

-This is the recommended way to run ArchiveBox because it includes *all* the extractors like chrome, wget, youtube-dl, git, etc., as well as full-text search with sonic, and many other great features. +First make sure you have Docker installed: https://docs.docker.com/get-docker/ -```bash +

 # create a new empty directory and initalize your collection (can be anywhere)
 mkdir ~/archivebox && cd ~/archivebox
-curl -O https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/master/docker-compose.yml
+curl -O 'https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/master/docker-compose.yml'
 docker-compose run archivebox init
 docker-compose run archivebox --version
 
 # start the webserver and open the UI (optional)
 docker-compose run archivebox manage createsuperuser
 docker-compose up -d
-open http://127.0.0.1:8000
+open 'http://127.0.0.1:8000'
 
 # you can also add links and manage your archive via the CLI:
 docker-compose run archivebox add 'https://example.com'
 docker-compose run archivebox status
 docker-compose run archivebox help  # to see more options
-```
+
+ +This is the recommended way to run ArchiveBox because it includes all the extractors like:
+chrome, wget, youtube-dl, git, etc., full-text search w/ sonic, and many other great features.
Get ArchiveBox with docker on any platform -First make sure you have Docker installed: https://docs.docker.com/get-docker/
-```bash +First make sure you have Docker installed: https://docs.docker.com/get-docker/ + +

 # create a new empty directory and initalize your collection (can be anywhere)
 mkdir ~/archivebox && cd ~/archivebox
 docker run -v $PWD:/data -it archivebox/archivebox init
 docker run -v $PWD:/data -it archivebox/archivebox --version
 
 # start the webserver and open the UI (optional)
-docker run -v $PWD:/data -it archivebox/archivebox manage createsuperuser
-docker run -v $PWD:/data -p 8000:8000 archivebox/archivebox server 0.0.0.0:8000
+docker run -v $PWD:/data -it -p 8000:8000 archivebox/archivebox server --createsuperuser 0.0.0.0:8000
 open http://127.0.0.1:8000
 
 # you can also add links and manage your archive via the CLI:
 docker run -v $PWD:/data -it archivebox/archivebox add 'https://example.com'
 docker run -v $PWD:/data -it archivebox/archivebox status
 docker run -v $PWD:/data -it archivebox/archivebox help  # to see more options
-```
+
Get ArchiveBox with apt on Ubuntu >=20.04 -```bash +First make sure you're on Ubuntu >= 20.04, or scroll down for older/non-Ubuntu instructions. + +

+# add the repo to your sources and install the archivebox package using apt
+sudo apt install software-properties-common
 sudo add-apt-repository -u ppa:archivebox/archivebox
 sudo apt install archivebox
 
@@ -117,8 +166,7 @@ archivebox init
 archivebox --version
 
 # start the webserver and open the web UI (optional)
-archivebox manage createsuperuser
-archivebox server 0.0.0.0:8000
+archivebox server --createsuperuser 0.0.0.0:8000
 open http://127.0.0.1:8000
 
 # you can also add URLs and manage the archive via the CLI and filesystem:
@@ -127,13 +175,17 @@ archivebox status
 archivebox list --html --with-headers > index.html
 archivebox list --json --with-headers > index.json
 archivebox help  # to see more options
-```
+
For other Debian-based systems or older Ubuntu systems you can add these sources to `/etc/apt/sources.list`: -```bash + +

 deb http://ppa.launchpad.net/archivebox/archivebox/ubuntu focal main
 deb-src http://ppa.launchpad.net/archivebox/archivebox/ubuntu focal main
-```
+
+ +Then run `apt update; apt install archivebox; archivebox --version`. + (you may need to install some other dependencies manually however)
@@ -141,7 +193,10 @@ deb-src http://ppa.launchpad.net/archivebox/archivebox/ubuntu focal main
Get ArchiveBox with brew on macOS >=10.13 -```bash +First make sure you have Homebrew installed: https://brew.sh/#install + +

+# install the archivebox package using homebrew
 brew install archivebox/archivebox/archivebox
 
 # create a new empty directory and initalize your collection (can be anywhere)
@@ -151,8 +206,7 @@ archivebox init
 archivebox --version
 
 # start the webserver and open the web UI (optional)
-archivebox manage createsuperuser
-archivebox server 0.0.0.0:8000
+archivebox server --createsuperuser 0.0.0.0:8000
 open http://127.0.0.1:8000
 
 # you can also add URLs and manage the archive via the CLI and filesystem:
@@ -161,14 +215,17 @@ archivebox status
 archivebox list --html --with-headers > index.html
 archivebox list --json --with-headers > index.json
 archivebox help  # to see more options
-```
+
Get ArchiveBox with pip on any platform -```bash +First make sure you have Python >= 3.7 installed: https://realpython.com/installing-python/ + +

+# install the archivebox package using pip3
 pip3 install archivebox
 
 # create a new empty directory and initalize your collection (can be anywhere)
@@ -179,8 +236,7 @@ archivebox --version
 # Install any missing extras like wget/git/chrome/etc. manually as needed
 
 # start the webserver and open the web UI (optional)
-archivebox manage createsuperuser
-archivebox server 0.0.0.0:8000
+archivebox server --createsuperuser 0.0.0.0:8000
 open http://127.0.0.1:8000
 
 # you can also add URLs and manage the archive via the CLI and filesystem:
@@ -189,56 +245,58 @@ archivebox status
 archivebox list --html --with-headers > index.html
 archivebox list --json --with-headers > index.json
 archivebox help  # to see more options
-```
+
- ---- - -
- -
-DEMO: archivebox.zervice.io/ -For more information, see the full Quickstart guide, Usage, and Configuration docs. +No matter which install method you choose, they all roughly follow this 3-step process and all provide the same CLI, Web UI, and on-disk data format. + + + +1. Install ArchiveBox: `apt/brew/pip3 install archivebox` +2. Start a collection: `archivebox init` +3. Start archiving: `archivebox add 'https://example.com'` + + + +
+
+grassgrass
- ---- - - -# Overview - -ArchiveBox is a command line tool, self-hostable web-archiving server, and Python library all-in-one. It can be installed on Docker, macOS, and Linux/BSD, and Windows. You can download and install it as a Debian/Ubuntu package, Homebrew package, Python3 package, or a Docker image. No matter which install method you choose, they all provide the same CLI, Web UI, and on-disk data format. - -To use ArchiveBox you start by creating a folder for your data to live in (it can be anywhere on your system), and running `archivebox init` inside of it. That will create a sqlite3 index and an `ArchiveBox.conf` file. After that, you can continue to add/export/manage/etc using the CLI `archivebox help`, or you can run the Web UI (recommended). If you only want to archive a single site, you can run `archivebox oneshot` to avoid having to create a whole collection. - -The CLI is considered "stable", the ArchiveBox Python API and REST APIs are "beta", and the [desktop app](https://github.com/ArchiveBox/desktop) is "alpha". - -At the end of the day, the goal is to sleep soundly knowing that the part of the internet you care about will be automatically preserved in multiple, durable long-term formats that will be accessible for decades (or longer). You can also self-host your archivebox server on a public domain to provide archive.org-style public access to your site snapshots. +
-CLI Screenshot -Desktop index screenshot -Desktop details page Screenshot -Desktop details page Screenshot
-Demo | Usage -
+ + + +

. . . . . . . . . . . . . . . . . . . . . . . . . . . . -

- +

+DEMO: https://archivebox.zervice.io
+Quickstart | Usage | Configuration +
+
## Key Features - [**Free & open source**](https://github.com/ArchiveBox/ArchiveBox/blob/master/LICENSE), doesn't require signing up for anything, stores all data locally -- [**Few dependencies**](https://github.com/ArchiveBox/ArchiveBox/wiki/Install#dependencies) and [simple command line interface](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage) +- [**Powerful, intuitive command line interface**](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage) with [modular optional dependencies](#dependencies) - [**Comprehensive documentation**](https://github.com/ArchiveBox/ArchiveBox/wiki), [active development](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap), and [rich community](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community) -- Easy to set up **[scheduled importing](https://github.com/ArchiveBox/ArchiveBox/wiki/Scheduled-Archiving) from multiple sources** -- Uses common, **durable, [long-term formats](#saves-lots-of-useful-stuff-for-each-imported-link)** like HTML, JSON, PDF, PNG, and WARC -- ~~**Suitable for paywalled / [authenticated content](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#chrome_user_data_dir)** (can use your cookies)~~ (do not do this until v0.5 is released with some security fixes) -- **Doesn't require a constantly-running daemon**, proxy, or native app -- Provides a CLI, Python API, self-hosted web UI, and REST API (WIP) -- Architected to be able to run [**many varieties of scripts during archiving**](https://github.com/ArchiveBox/ArchiveBox/issues/51), e.g. to extract media, summarize articles, [scroll pages](https://github.com/ArchiveBox/ArchiveBox/issues/80), [close modals](https://github.com/ArchiveBox/ArchiveBox/issues/175), expand comment threads, etc. -- Can also [**mirror content to 3rd-party archiving services**](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#submit_archive_dot_org) automatically for redundancy +- [**Extracts a wide variety of content out-of-the-box**](https://github.com/ArchiveBox/ArchiveBox/issues/51): [media (youtube-dl), articles (readability), code (git), etc.](#output-formats) +- [**Supports scheduled/realtime importing**](https://github.com/ArchiveBox/ArchiveBox/wiki/Scheduled-Archiving) from [many types of sources](#input-formats) +- [**Uses standard, durable, long-term formats**](#saves-lots-of-useful-stuff-for-each-imported-link) like HTML, JSON, PDF, PNG, and WARC +- [**Usable as a oneshot CLI**](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage), [**self-hosted web UI**](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#UI-Usage), [Python API](https://docs.archivebox.io/en/latest/modules.html) (BETA), [REST API](https://github.com/ArchiveBox/ArchiveBox/issues/496) (ALPHA), or [desktop app](https://github.com/ArchiveBox/electron-archivebox) (ALPHA) +- [**Saves all pages to archive.org as well**](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#submit_archive_dot_org) by default for redundancy (can be [disabled](https://github.com/ArchiveBox/ArchiveBox/wiki/Security-Overview#stealth-mode) for local-only mode) +- Planned: support for archiving [content requiring a login/paywall/cookies](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#chrome_user_data_dir) (working, but ill-advised until some pending fixes are released) +- Planned: support for running [JS scripts during archiving](https://github.com/ArchiveBox/ArchiveBox/issues/51), e.g. adblock, [autoscroll](https://github.com/ArchiveBox/ArchiveBox/issues/80), [modal-hiding](https://github.com/ArchiveBox/ArchiveBox/issues/175), [thread-expander](https://github.com/ArchiveBox/ArchiveBox/issues/345), etc. + +
+ +--- + +
+lego +
## Input formats @@ -253,9 +311,10 @@ archivebox add --depth=1 'https://example.com/some/downloads.html' archivebox add --depth=1 'https://news.ycombinator.com#2020-12-12' ``` -- Browser history or bookmarks exports (Chrome, Firefox, Safari, IE, Opera, and more) -- RSS, XML, JSON, CSV, SQL, HTML, Markdown, TXT, or any other text-based format -- Pocket, Pinboard, Instapaper, Shaarli, Delicious, Reddit Saved Posts, Wallabag, Unmark.it, OneTab, and more + +- TXT, RSS, XML, JSON, CSV, SQL, HTML, Markdown, or [any other text-based format...](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#Import-a-list-of-URLs-from-a-text-file) +- [Browser history](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart#2-get-your-list-of-urls-to-archive) or [browser bookmarks](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart#2-get-your-list-of-urls-to-archive) (see instructions for: [Chrome](https://support.google.com/chrome/answer/96816?hl=en), [Firefox](https://support.mozilla.org/en-US/kb/export-firefox-bookmarks-to-backup-or-transfer), [Safari](http://i.imgur.com/AtcvUZA.png), [IE](https://support.microsoft.com/en-us/help/211089/how-to-import-and-export-the-internet-explorer-favorites-folder-to-a-32-bit-version-of-windows), [Opera](http://help.opera.com/Windows/12.10/en/importexport.html), [and more...](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart#2-get-your-list-of-urls-to-archive)) +- [Pocket](https://getpocket.com/export), [Pinboard](https://pinboard.in/export/), [Instapaper](https://www.instapaper.com/user/export), [Shaarli](https://shaarli.readthedocs.io/en/master/Usage/#importexport), [Delicious](https://www.groovypost.com/howto/howto/export-delicious-bookmarks-xml/), [Reddit Saved](https://github.com/csu/export-saved-reddit), [Wallabag](https://doc.wallabag.org/en/user/import/wallabagv2.html), [Unmark.it](http://help.unmark.it/import-export), [OneTab](https://www.addictivetips.com/web/onetab-save-close-all-chrome-tabs-to-restore-export-or-import/), [and more...](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart#2-get-your-list-of-urls-to-archive) See the [Usage: CLI](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage) page for documentation and examples. @@ -272,34 +331,51 @@ The on-disk layout is optimized to be easy to browse by hand and durable long-te ``` - **Index:** `index.html` & `index.json` HTML and JSON index files containing metadata and details -- **Title:** `title` title of the site -- **Favicon:** `favicon.ico` favicon of the site -- **Headers:** `headers.json` Any HTTP headers the site returns are saved in a json file -- **SingleFile:** `singlefile.html` HTML snapshot rendered with headless Chrome using SingleFile -- **WGET Clone:** `example.com/page-name.html` wget clone of the site, with .html appended if not present -- **WARC:** `warc/.gz` gzipped WARC of all the resources fetched while archiving -- **PDF:** `output.pdf` Printed PDF of site using headless chrome -- **Screenshot:** `screenshot.png` 1440x900 screenshot of site using headless chrome -- **DOM Dump:** `output.html` DOM Dump of the HTML after rendering using headless chrome -- **Readability:** `article.html/json` Article text extraction using Readability -- **URL to Archive.org:** `archive.org.txt` A link to the saved site on archive.org +- **Title**, **Favicon**, **Headers** Response headers, site favicon, and parsed site title +- **Wget Clone:** `example.com/page-name.html` wget clone of the site with `warc/.gz` +- Chrome Headless + - **SingleFile:** `singlefile.html` HTML snapshot rendered with headless Chrome using SingleFile + - **PDF:** `output.pdf` Printed PDF of site using headless chrome + - **Screenshot:** `screenshot.png` 1440x900 screenshot of site using headless chrome + - **DOM Dump:** `output.html` DOM Dump of the HTML after rendering using headless chrome + - **Readability:** `article.html/json` Article text extraction using Readability +- **Archive.org Permalink:** `archive.org.txt` A link to the saved site on archive.org - **Audio & Video:** `media/` all audio/video files + playlists, including subtitles & metadata with youtube-dl - **Source Code:** `git/` clone of any repository found on github, bitbucket, or gitlab links - _More coming soon! See the [Roadmap](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap)..._ It does everything out-of-the-box by default, but you can disable or tweak [individual archive methods](https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration) via environment variables or config file. +
+lego graphic +
+ +
+ +--- + +
+ ## Dependencies You don't need to install all the dependencies, ArchiveBox will automatically enable the relevant modules based on whatever you have available, but it's recommended to use the official [Docker image](https://github.com/ArchiveBox/ArchiveBox/wiki/Docker) with everything preinstalled. -If you so choose, you can also install ArchiveBox and its dependencies directly on any Linux or macOS systems using the [automated setup script](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart) or the [system package manager](https://github.com/ArchiveBox/ArchiveBox/wiki/Install). +If you so choose, you can also install ArchiveBox and its dependencies directly on any Linux or macOS systems using the [system package manager](https://github.com/ArchiveBox/ArchiveBox/wiki/Install) or by running the [automated setup script](https://github.com/ArchiveBox/ArchiveBox/wiki/Quickstart). ArchiveBox is written in Python 3 so it requires `python3` and `pip3` available on your system. It also uses a set of optional, but highly recommended external dependencies for archiving sites: `wget` (for plain HTML, static files, and WARC saving), `chromium` (for screenshots, PDFs, JS execution, and more), `youtube-dl` (for audio and video), `git` (for cloning git repos), and `nodejs` (for readability and singlefile), and more. +
+ +--- + +
+security graphic +
+ ## Caveats If you're importing URLs containing secret slugs or pages with private content (e.g Google Docs, CodiMD notepads, etc), you may want to disable some of the extractor modules to avoid leaking private URLs to 3rd party APIs during the archiving process. + ```bash # don't do this: archivebox add 'https://docs.google.com/document/d/12345somelongsecrethere' @@ -312,6 +388,7 @@ archivebox config --set CHROME_BINARY=chromium # optional: switch to chromium t ``` Be aware that malicious archived JS can also read the contents of other pages in your archive due to snapshot CSRF and XSS protections being imperfect. See the [Security Overview](https://github.com/ArchiveBox/ArchiveBox/wiki/Security-Overview#stealth-mode) page for more details. + ```bash # visiting an archived page with malicious JS: https://127.0.0.1:8000/archive/1602401954/example.com/index.html @@ -323,20 +400,67 @@ https://127.0.0.1:8000/archive/* ``` Support for saving multiple snapshots of each site over time will be [added soon](https://github.com/ArchiveBox/ArchiveBox/issues/179) (along with the ability to view diffs of the changes between runs). For now ArchiveBox is designed to only archive each URL with each extractor type once. A workaround to take multiple snapshots of the same URL is to make them slightly different by adding a hash: + ```bash archivebox add 'https://example.com#2020-10-24' ... archivebox add 'https://example.com#2020-10-25' ``` +
+ --- +
+ +## Screenshots + +
+ + + + + + + + + + + + + + + + +
+brew install archivebox
+archivebox version +
+archivebox init
+
+archivebox add + +archivebox data dir +
+archivebox server + +archivebox server add + +archivebox server list + +archivebox server detail +
+
+
+ +--- + +
+
- +paisley graphic
---- - # Background & Motivation Vast treasure troves of knowledge are lost every day on the internet to link rot. As a society, we have an imperative to preserve some important parts of that treasure, just like we preserve our books, paintings, and music in physical libraries long after the originals go out of print or fade into obscurity. @@ -376,6 +500,11 @@ Unlike crawler software that starts from a seed URL and works outwards, or publi Because ArchiveBox is designed to ingest a firehose of browser history and bookmark feeds to a local disk, it can be much more disk-space intensive than a centralized service like the Internet Archive or Archive.today. However, as storage space gets cheaper and compression improves, you should be able to use it continuously over the years without having to delete anything. In my experience, ArchiveBox uses about 5gb per 1000 articles, but your milage may vary depending on which options you have enabled and what types of sites you're archiving. By default, it archives everything in as many formats as possible, meaning it takes more space than a using a single method, but more content is accurately replayable over extended periods of time. Storage requirements can be reduced by using a compressed/deduplicated filesystem like ZFS/BTRFS, or by setting `SAVE_MEDIA=False` to skip audio & video files. +
+
+dependencies graphic +
+ ## Learn more Whether you want to learn which organizations are the big players in the web archiving space, want to find a specific open-source tool for your web archiving need, or just want to see where archivists hang out online, our Community Wiki page serves as an index of the broader web archiving community. Check it out to learn about some of the coolest web archiving projects and communities on the web! @@ -383,20 +512,26 @@ Whether you want to learn which organizations are the big players in the web arc - [Community Wiki](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community) - - [The Master Lists](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#The-Master-Lists) + - [The Master Lists](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#the-master-lists) _Community-maintained indexes of archiving tools and institutions._ - - [Web Archiving Software](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#Web-Archiving-Projects) + - [Web Archiving Software](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#web-archiving-projects) _Open source tools and projects in the internet archiving space._ - - [Reading List](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#Reading-List) + - [Reading List](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#reading-list) _Articles, posts, and blogs relevant to ArchiveBox and web archiving in general._ - - [Communities](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#Communities) + - [Communities](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community#communities) _A collection of the most active internet archiving communities and initiatives._ - Check out the ArchiveBox [Roadmap](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap) and [Changelog](https://github.com/ArchiveBox/ArchiveBox/wiki/Changelog) - Learn why archiving the internet is important by reading the "[On the Importance of Web Archiving](https://parameters.ssrc.org/2018/09/on-the-importance-of-web-archiving/)" blog post. - Or reach out to me for questions and comments via [@ArchiveBoxApp](https://twitter.com/ArchiveBoxApp) or [@theSquashSH](https://twitter.com/thesquashSH) on Twitter. +
+ --- +
+documentation graphic +
+ # Documentation @@ -422,8 +557,8 @@ You can also access the docs locally by looking in the [`ArchiveBox/docs/`](http - [Chromium Install](https://github.com/ArchiveBox/ArchiveBox/wiki/Chromium-Install) - [Security Overview](https://github.com/ArchiveBox/ArchiveBox/wiki/Security-Overview) - [Troubleshooting](https://github.com/ArchiveBox/ArchiveBox/wiki/Troubleshooting) -- [Python API](https://docs.archivebox.io/en/latest/modules.html) -- REST API (coming soon...) +- [Python API](https://docs.archivebox.io/en/latest/modules.html) (alpha) +- [REST API](https://github.com/ArchiveBox/ArchiveBox/issues/496) (alpha) ## More Info @@ -434,37 +569,58 @@ You can also access the docs locally by looking in the [`ArchiveBox/docs/`](http - [Background & Motivation](https://github.com/ArchiveBox/ArchiveBox#background--motivation) - [Web Archiving Community](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community) +
+ --- +
+development +
+ # ArchiveBox Development All contributions to ArchiveBox are welcomed! Check our [issues](https://github.com/ArchiveBox/ArchiveBox/issues) and [Roadmap](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap) for things to work on, and please open an issue to discuss your proposed implementation before working on things! Otherwise we may have to close your PR if it doesn't align with our roadmap. +Low hanging fruit / easy first tickets:
+Total alerts + ### Setup the dev environment -First, install the system dependencies from the "Bare Metal" section above. -Then you can clone the ArchiveBox repo and install -```python3 -git clone https://github.com/ArchiveBox/ArchiveBox && cd ArchiveBox -git checkout master # or the branch you want to test +#### 1. Clone the main code repo (making sure to pull the submodules as well) + +```bash +git clone --recurse-submodules https://github.com/ArchiveBox/ArchiveBox +cd ArchiveBox +git checkout dev # or the branch you want to test git submodule update --init --recursive git pull --recurse-submodules +``` +#### 2. Option A: Install the Python, JS, and system dependencies directly on your machine + +```bash # Install ArchiveBox + python dependencies -python3 -m venv .venv && source .venv/bin/activate && pip install -e .[dev] -# or with pipenv: pipenv install --dev && pipenv shell +python3 -m venv .venv && source .venv/bin/activate && pip install -e '.[dev]' +# or: pipenv install --dev && pipenv shell # Install node dependencies npm install -# Optional: install extractor dependencies manually or with helper script +# Check to see if anything is missing +archivebox --version +# install any missing dependencies manually, or use the helper script: ./bin/setup.sh +``` +#### 2. Option B: Build the docker container and use that for development instead + +```bash # Optional: develop via docker by mounting the code dir into the container # if you edit e.g. ./archivebox/core/models.py on the docker host, runserver # inside the container will reload and pick up your changes docker build . -t archivebox -docker run -it -p 8000:8000 \ +docker run -it --rm archivebox version +docker run -it --rm -p 8000:8000 \ -v $PWD/data:/data \ -v $PWD/archivebox:/app/archivebox \ archivebox server 0.0.0.0:8000 --debug --reload @@ -475,6 +631,21 @@ docker run -it -p 8000:8000 \ See the `./bin/` folder and read the source of the bash scripts within. You can also run all these in Docker. For more examples see the Github Actions CI/CD tests that are run: `.github/workflows/*.yaml`. +#### Run in DEBUG mode + +```bash +archivebox config --set DEBUG=True +# or +archivebox server --debug ... +``` + +### Build and run a Github branch + +```bash +docker build -t archivebox:dev https://github.com/ArchiveBox/ArchiveBox.git#dev +docker run -it -v $PWD:/data archivebox:dev ... +``` + #### Run the linters ```bash @@ -491,17 +662,19 @@ You can also run all these in Docker. For more examples see the Github Actions C #### Make migrations or enter a django shell +Make sure to run this whenever you change things in `models.py`. ```bash cd archivebox/ ./manage.py makemigrations -cd data/ +cd path/to/test/data/ archivebox shell ``` (uses `pytest -s`) #### Build the docs, pip package, and docker image +(Normally CI takes care of this, but these scripts can be run to do it manually) ```bash ./bin/build.sh @@ -515,11 +688,17 @@ archivebox shell #### Roll a release +(Normally CI takes care of this, but these scripts can be run to do it manually) ```bash ./bin/release.sh -``` -(bumps the version, builds, and pushes a release to PyPI, Docker Hub, and Github Packages) +# or individually: +./bin/release_docs.sh +./bin/release_pip.sh +./bin/release_deb.sh +./bin/release_brew.sh +./bin/release_docker.sh +``` --- diff --git a/archivebox/cli/archivebox_schedule.py b/archivebox/cli/archivebox_schedule.py index ec5e9146..568b25b9 100644 --- a/archivebox/cli/archivebox_schedule.py +++ b/archivebox/cli/archivebox_schedule.py @@ -42,6 +42,7 @@ def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional parser.add_argument( '--depth', # '-d', type=int, + choices=[0, 1], default=0, help='Depth to archive to [0] or 1, see "add" command help for more info.', ) diff --git a/archivebox/cli/archivebox_server.py b/archivebox/cli/archivebox_server.py index dbacf7e5..a4d96dc9 100644 --- a/archivebox/cli/archivebox_server.py +++ b/archivebox/cli/archivebox_server.py @@ -43,6 +43,11 @@ def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional action='store_true', help='Run archivebox init before starting the server', ) + parser.add_argument( + '--createsuperuser', + action='store_true', + help='Run archivebox manage createsuperuser before starting the server', + ) command = parser.parse_args(args or ()) reject_stdin(__command__, stdin) @@ -51,6 +56,7 @@ def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional reload=command.reload, debug=command.debug, init=command.init, + createsuperuser=command.createsuperuser, out_dir=pwd or OUTPUT_DIR, ) diff --git a/archivebox/config.py b/archivebox/config.py index 9a3f9a77..dc014ed5 100644 --- a/archivebox/config.py +++ b/archivebox/config.py @@ -116,16 +116,15 @@ CONFIG_SCHEMA: Dict[str, ConfigDefaultDict] = { '--write-annotations', '--write-thumbnail', '--no-call-home', - '--user-agent', '--all-subs', - '--extract-audio', - '--keep-video', + '--yes-playlist', + '--continue', '--ignore-errors', '--geo-bypass', - '--audio-format', 'mp3', - '--audio-quality', '320K', - '--embed-thumbnail', - '--add-metadata']}, + '--add-metadata', + '--max-filesize=750m', + ]}, + 'WGET_ARGS': {'type': list, 'default': ['--no-verbose', '--adjust-extension', @@ -775,7 +774,7 @@ def get_dependency_info(config: ConfigDict) -> ConfigValue: 'version': config['PYTHON_VERSION'], 'hash': bin_hash(config['PYTHON_BINARY']), 'enabled': True, - 'is_valid': bool(config['DJANGO_VERSION']), + 'is_valid': bool(config['PYTHON_VERSION']), }, 'DJANGO_BINARY': { 'path': bin_path(config['DJANGO_BINARY']), @@ -787,7 +786,7 @@ def get_dependency_info(config: ConfigDict) -> ConfigValue: 'CURL_BINARY': { 'path': bin_path(config['CURL_BINARY']), 'version': config['CURL_VERSION'], - 'hash': bin_hash(config['PYTHON_BINARY']), + 'hash': bin_hash(config['CURL_BINARY']), 'enabled': config['USE_CURL'], 'is_valid': bool(config['CURL_VERSION']), }, @@ -803,7 +802,7 @@ def get_dependency_info(config: ConfigDict) -> ConfigValue: 'version': config['NODE_VERSION'], 'hash': bin_hash(config['NODE_BINARY']), 'enabled': config['USE_NODE'], - 'is_valid': bool(config['SINGLEFILE_VERSION']), + 'is_valid': bool(config['NODE_VERSION']), }, 'SINGLEFILE_BINARY': { 'path': bin_path(config['SINGLEFILE_BINARY']), diff --git a/archivebox/core/admin.py b/archivebox/core/admin.py index 4eda8b59..899b6059 100644 --- a/archivebox/core/admin.py +++ b/archivebox/core/admin.py @@ -11,13 +11,14 @@ from django.shortcuts import render, redirect from django.contrib.auth import get_user_model from django import forms +from ..util import htmldecode, urldecode, ansi_to_html + from core.models import Snapshot, Tag from core.forms import AddLinkForm, TagField from core.mixins import SearchResultsAdminMixin from index.html import snapshot_icons -from util import htmldecode, urldecode, ansi_to_html from logging_util import printable_filesize from main import add, remove from config import OUTPUT_DIR diff --git a/archivebox/core/forms.py b/archivebox/core/forms.py index 86b29bb7..ed584c68 100644 --- a/archivebox/core/forms.py +++ b/archivebox/core/forms.py @@ -22,10 +22,32 @@ class AddLinkForm(forms.Form): url = forms.RegexField(label="URLs (one per line)", regex=URL_REGEX, min_length='6', strip=True, widget=forms.Textarea, required=True) depth = forms.ChoiceField(label="Archive depth", choices=CHOICES, widget=forms.RadioSelect, initial='0') archive_methods = forms.MultipleChoiceField( + label="Archive methods (select at least 1, otherwise all will be used by default)", required=False, widget=forms.SelectMultiple, choices=ARCHIVE_METHODS, ) + # TODO: hook these up to the view and put them + # in a collapsible UI section labeled "Advanced" + # + # exclude_patterns = forms.CharField( + # label="Exclude patterns", + # min_length='1', + # required=False, + # initial=URL_BLACKLIST, + # ) + # timeout = forms.IntegerField( + # initial=TIMEOUT, + # ) + # overwrite = forms.BooleanField( + # label="Overwrite any existing Snapshots", + # initial=False, + # ) + # index_only = forms.BooleanField( + # label="Add URLs to index without Snapshotting", + # initial=False, + # ) + class TagWidgetMixin: def format_value(self, value): if value is not None and not isinstance(value, str): diff --git a/archivebox/core/migrations/0007_archiveresult.py b/archivebox/core/migrations/0007_archiveresult.py index a780376f..ec48d3ff 100644 --- a/archivebox/core/migrations/0007_archiveresult.py +++ b/archivebox/core/migrations/0007_archiveresult.py @@ -36,7 +36,7 @@ def forwards_func(apps, schema_editor): for extractor in history: for result in history[extractor]: - ArchiveResult.objects.create(extractor=extractor, snapshot=snapshot, cmd=result["cmd"], cmd_version=result["cmd_version"], + ArchiveResult.objects.create(extractor=extractor, snapshot=snapshot, cmd=result["cmd"], cmd_version=result["cmd_version"] or 'unknown', start_ts=result["start_ts"], end_ts=result["end_ts"], status=result["status"], pwd=result["pwd"], output=result["output"]) diff --git a/archivebox/core/settings.py b/archivebox/core/settings.py index e8ed6b16..bfc0cdc3 100644 --- a/archivebox/core/settings.py +++ b/archivebox/core/settings.py @@ -101,7 +101,7 @@ TEMPLATES = [ ################################################################################ DATABASE_FILE = Path(OUTPUT_DIR) / SQL_INDEX_FILENAME -DATABASE_NAME = os.environ.get("ARCHIVEBOX_DATABASE_NAME", DATABASE_FILE) +DATABASE_NAME = os.environ.get("ARCHIVEBOX_DATABASE_NAME", str(DATABASE_FILE)) DATABASES = { 'default': { diff --git a/archivebox/core/views.py b/archivebox/core/views.py index 411cce29..9592f40d 100644 --- a/archivebox/core/views.py +++ b/archivebox/core/views.py @@ -9,6 +9,7 @@ from django.http import HttpResponse from django.views import View, static from django.views.generic.list import ListView from django.views.generic import FormView +from django.db.models import Q from django.contrib.auth.mixins import UserPassesTestMixin from core.models import Snapshot @@ -108,7 +109,7 @@ class PublicArchiveView(ListView): qs = super().get_queryset(**kwargs) query = self.request.GET.get('q') if query: - qs = qs.filter(title__icontains=query) + qs = qs.filter(Q(title__icontains=query) | Q(url__icontains=query) | Q(timestamp__icontains=query) | Q(tags__name__icontains=query)) for snapshot in qs: snapshot.icons = snapshot_icons(snapshot) return qs diff --git a/archivebox/extractors/__init__.py b/archivebox/extractors/__init__.py index f80e686b..7b87050c 100644 --- a/archivebox/extractors/__init__.py +++ b/archivebox/extractors/__init__.py @@ -96,7 +96,7 @@ def archive_snapshot(snapshot: Model, overwrite: bool=False, methods: Optional[I if method_name not in details["history"]: details["history"][method_name] = [] - if should_run(snapshot, out_dir) or overwrite: + if should_run(snapshot, out_dir, overwrite): log_archive_method_started(method_name) result = method_function(snapshot=snapshot, out_dir=out_dir) diff --git a/archivebox/extractors/archive_org.py b/archivebox/extractors/archive_org.py index d1e15c11..1e7ededb 100644 --- a/archivebox/extractors/archive_org.py +++ b/archivebox/extractors/archive_org.py @@ -25,14 +25,17 @@ from ..config import ( from ..logging_util import TimedProgress +# output = '{domain}/' + @enforce_types -def should_save_archive_dot_org(snapshot: Model, out_dir: Optional[Path]=None) -> bool: +def should_save_archive_dot_org(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool: out_dir = out_dir or Path(snapshot.snapshot_dir) if is_static_file(snapshot.url): return False - if (out_dir / "archive.org.txt").exists(): + out_dir = out_dir or Path(link.link_dir) + if not overwrite and (out_dir / 'archive.org.txt').exists(): # if open(path, 'r').read().strip() != 'None': return False diff --git a/archivebox/extractors/dom.py b/archivebox/extractors/dom.py index 61038e76..4803ad0b 100644 --- a/archivebox/extractors/dom.py +++ b/archivebox/extractors/dom.py @@ -20,18 +20,21 @@ from ..config import ( from ..logging_util import TimedProgress +# output = 'output.html' + @enforce_types -def should_save_dom(snapshot: Model, out_dir: Optional[Path]=None) -> bool: +def should_save_dom(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool: out_dir = out_dir or Path(snapshot.snapshot_dir) if is_static_file(snapshot.url): return False - - if (out_dir / 'output.html').exists(): + + out_dir = out_dir or Path(link.link_dir) + if not overwrite and (out_dir / 'output.html').exists(): return False return SAVE_DOM - + @enforce_types def save_dom(snapshot: Model, out_dir: Optional[Path]=None, timeout: int=TIMEOUT) -> ArchiveResult: """print HTML of site to file using chrome --dump-html""" diff --git a/archivebox/extractors/favicon.py b/archivebox/extractors/favicon.py index b678776b..3023382f 100644 --- a/archivebox/extractors/favicon.py +++ b/archivebox/extractors/favicon.py @@ -21,14 +21,17 @@ from ..config import ( from ..logging_util import TimedProgress +# output = 'favicon.ico' + + @enforce_types -def should_save_favicon(snapshot: Model, out_dir: Optional[str]=None) -> bool: +def should_save_favicon(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[str]=None) -> bool: out_dir = out_dir or snapshot.snapshot_dir - if (Path(out_dir) / 'favicon.ico').exists(): + if not overwrite and (Path(out_dir) / 'favicon.ico').exists(): return False return SAVE_FAVICON - + @enforce_types def save_favicon(snapshot: Model, out_dir: Optional[Path]=None, timeout: int=TIMEOUT) -> ArchiveResult: """download site favicon from google's favicon api""" diff --git a/archivebox/extractors/git.py b/archivebox/extractors/git.py index 6674cab8..66e1d0d9 100644 --- a/archivebox/extractors/git.py +++ b/archivebox/extractors/git.py @@ -28,14 +28,20 @@ from ..config import ( from ..logging_util import TimedProgress +# output = 'git/' +# @contents = output.glob('*.*') +# @exists = self.contents.exists() +# @size => get_size(self.contents) +# @num_files => len(self.contents) @enforce_types -def should_save_git(snapshot: Model, out_dir: Optional[Path]=None) -> bool: +def should_save_git(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool: out_dir = out_dir or snapshot.snapshot_dir if is_static_file(snapshot.url): return False - if (out_dir / "git").exists(): + out_dir = out_dir or Path(link.link_dir) + if not overwrite and (out_dir / 'git').exists(): return False is_clonable_url = ( diff --git a/archivebox/extractors/headers.py b/archivebox/extractors/headers.py index 7104b499..4ff59c15 100644 --- a/archivebox/extractors/headers.py +++ b/archivebox/extractors/headers.py @@ -23,12 +23,21 @@ from ..config import ( ) from ..logging_util import TimedProgress + +# output = 'headers.json' + @enforce_types -def should_save_headers(snapshot: Model, out_dir: Optional[str]=None) -> bool: +def should_save_headers(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[str]=None) -> bool: out_dir = out_dir or snapshot.snapshot_dir + if not SAVE_HEADERS: + return False + + if overwrite: + return True + output = Path(out_dir or snapshot.snapshot_dir) / 'headers.json' - return not output.exists() and SAVE_HEADERS + return not output.exists() @enforce_types diff --git a/archivebox/extractors/media.py b/archivebox/extractors/media.py index a865e572..0ffdb627 100644 --- a/archivebox/extractors/media.py +++ b/archivebox/extractors/media.py @@ -22,14 +22,17 @@ from ..config import ( from ..logging_util import TimedProgress +# output = 'media/' + @enforce_types -def should_save_media(snapshot: Model, out_dir: Optional[Path]=None) -> bool: +def should_save_media(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool: out_dir = out_dir or snapshot.snapshot_dir if is_static_file(snapshot.url): return False - if (out_dir / "media").exists(): + out_dir = out_dir or Path(link.link_dir) + if not overwrite and (out_dir / 'media').exists(): return False return SAVE_MEDIA diff --git a/archivebox/extractors/mercury.py b/archivebox/extractors/mercury.py index 135007ab..99ab92c4 100644 --- a/archivebox/extractors/mercury.py +++ b/archivebox/extractors/mercury.py @@ -39,13 +39,16 @@ def ShellError(cmd: List[str], result: CompletedProcess, lines: int=20) -> Archi @enforce_types -def should_save_mercury(snapshot: Model, out_dir: Optional[str]=None) -> bool: +def should_save_mercury(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[str]=None) -> bool: out_dir = out_dir or snapshot.snapshot_dir if is_static_file(snapshot.url): return False output = Path(out_dir or snapshot.snapshot_dir) / 'mercury' - return SAVE_MERCURY and MERCURY_VERSION and (not output.exists()) + if not overwrite and output.exists(): + return False + + return SAVE_MERCURY and MERCURY_VERSION @enforce_types diff --git a/archivebox/extractors/pdf.py b/archivebox/extractors/pdf.py index 196c0089..7af910f2 100644 --- a/archivebox/extractors/pdf.py +++ b/archivebox/extractors/pdf.py @@ -19,14 +19,16 @@ from ..config import ( ) from ..logging_util import TimedProgress +# output = 'output.pdf' @enforce_types -def should_save_pdf(snapshot: Model, out_dir: Optional[Path]=None) -> bool: +def should_save_pdf(snapshot: Model, verwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool: out_dir = out_dir or Path(snapshot.snapshot_dir) if is_static_file(snapshot.url): return False - - if (out_dir / "output.pdf").exists(): + + out_dir = out_dir or Path(link.link_dir) + if not overwrite and (out_dir / 'output.pdf').exists(): return False return SAVE_PDF diff --git a/archivebox/extractors/readability.py b/archivebox/extractors/readability.py index 655e9499..823a52ec 100644 --- a/archivebox/extractors/readability.py +++ b/archivebox/extractors/readability.py @@ -25,6 +25,7 @@ from ..config import ( ) from ..logging_util import TimedProgress + @enforce_types def get_html(snapshot: Model, path: Path) -> str: """ @@ -47,14 +48,20 @@ def get_html(snapshot: Model, path: Path) -> str: else: return document + +# output = 'readability/' + @enforce_types -def should_save_readability(snapshot: Model, out_dir: Optional[str]=None) -> bool: +def should_save_readability(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[str]=None) -> bool: out_dir = out_dir or snapshot.link_dir if is_static_file(snapshot.url): return False output = Path(out_dir or snapshot.snapshot_dir) / 'readability' - return SAVE_READABILITY and READABILITY_VERSION and (not output.exists()) + if not overwrite and output.exists(): + return False + + return SAVE_READABILITY and READABILITY_VERSION @enforce_types diff --git a/archivebox/extractors/screenshot.py b/archivebox/extractors/screenshot.py index 723feb19..5747c289 100644 --- a/archivebox/extractors/screenshot.py +++ b/archivebox/extractors/screenshot.py @@ -20,14 +20,16 @@ from ..config import ( from ..logging_util import TimedProgress +# output = 'screenshot.png' @enforce_types -def should_save_screenshot(snapshot: Model, out_dir: Optional[Path]=None) -> bool: +def should_save_screenshot(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool: out_dir = out_dir or Path(snapshot.snapshot_dir) if is_static_file(snapshot.url): return False - - if (out_dir / "screenshot.png").exists(): + + out_dir = out_dir or Path(link.link_dir) + if not overwrite and (out_dir / 'screenshot.png').exists(): return False return SAVE_SCREENSHOT diff --git a/archivebox/extractors/singlefile.py b/archivebox/extractors/singlefile.py index 8dd91936..df76ab5e 100644 --- a/archivebox/extractors/singlefile.py +++ b/archivebox/extractors/singlefile.py @@ -25,13 +25,16 @@ from ..logging_util import TimedProgress @enforce_types -def should_save_singlefile(snapshot: Model, out_dir: Optional[Path]=None) -> bool: +def should_save_singlefile(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool: out_dir = out_dir or Path(snapshot.snapshot_dir) if is_static_file(snapshot.url): return False - output = out_dir / 'singlefile.html' - return SAVE_SINGLEFILE and SINGLEFILE_VERSION and (not output.exists()) + out_dir = out_dir or Path(link.link_dir) + if not overwrite and (out_dir / 'singlefile.html').exists(): + return False + + return SAVE_SINGLEFILE @enforce_types diff --git a/archivebox/extractors/title.py b/archivebox/extractors/title.py index 519c5961..fa082b43 100644 --- a/archivebox/extractors/title.py +++ b/archivebox/extractors/title.py @@ -62,13 +62,15 @@ class TitleParser(HTMLParser): self.inside_title_tag = False -@enforce_types -def should_save_title(snapshot: Model, out_dir: Optional[str]=None) -> bool: - # if link already has valid title, skip it - if snapshot.title and not snapshot.title.lower().startswith('http'): - return False +# output = '{title}' +@enforce_types +def should_save_title(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[str]=None) -> bool: if is_static_file(snapshot.url): + False + + # if snapshot already has valid title, skip it + if not overwrite and snapshot.title and not snapshot.title.lower().startswith('http'): return False return SAVE_TITLE diff --git a/archivebox/extractors/wget.py b/archivebox/extractors/wget.py index 0f011064..6e83c12f 100644 --- a/archivebox/extractors/wget.py +++ b/archivebox/extractors/wget.py @@ -38,10 +38,10 @@ from ..logging_util import TimedProgress @enforce_types -def should_save_wget(snapshot: Model, out_dir: Optional[Path]=None) -> bool: +def should_save_wget(snapshot: Model, overwrite: Optional[bool]=False, out_dir: Optional[Path]=None) -> bool: output_path = wget_output_path(snapshot) out_dir = out_dir or Path(snapshot.snapshot_dir) - if output_path and (out_dir / output_path).exists(): + if not overwrite output_path and (out_dir / output_path).exists(): return False return SAVE_WGET @@ -68,7 +68,7 @@ def save_wget(snapshot: Model, out_dir: Optional[Path]=None, timeout: int=TIMEOU *(['--warc-file={}'.format(str(warc_path))] if SAVE_WARC else []), *(['--page-requisites'] if SAVE_WGET_REQUISITES else []), *(['--user-agent={}'.format(WGET_USER_AGENT)] if WGET_USER_AGENT else []), - *(['--load-cookies', COOKIES_FILE] if COOKIES_FILE else []), + *(['--load-cookies', str(COOKIES_FILE)] if COOKIES_FILE else []), *(['--compression=auto'] if WGET_AUTO_COMPRESSION else []), *([] if SAVE_WARC else ['--timestamping']), *([] if CHECK_SSL_VALIDITY else ['--no-check-certificate', '--no-hsts']), @@ -177,11 +177,22 @@ def wget_output_path(snapshot: Model) -> Optional[str]: if html_files: return str(html_files[0].relative_to(snapshot.snapshot_dir)) + # sometimes wget'd URLs have no ext and return non-html + # e.g. /some/example/rss/all -> some RSS XML content) + # /some/other/url.o4g -> some binary unrecognized ext) + # test this with archivebox add --depth=1 https://getpocket.com/users/nikisweeting/feed/all + last_part_of_url = urldecode(full_path.rsplit('/', 1)[-1]) + for file_present in search_dir.iterdir(): + if file_present == last_part_of_url: + return str(search_dir / file_present) + # Move up one directory level search_dir = search_dir.parent if search_dir == snapshot.snapshot_dir: break + + search_dir = Path(snapshot.snapshot_dir) / domain(snapshot.url).replace(":", "+") / urldecode(full_path) if not search_dir.is_dir(): diff --git a/archivebox/index/html.py b/archivebox/index/html.py index a2a7cb35..22fad5c8 100644 --- a/archivebox/index/html.py +++ b/archivebox/index/html.py @@ -4,8 +4,8 @@ from datetime import datetime from typing import List, Optional, Iterator, Mapping from pathlib import Path -from django.utils.html import format_html from django.db.models import Model +from django.utils.html import format_html, mark_safe from collections import defaultdict from .schema import Link @@ -119,7 +119,7 @@ def snapshot_icons(snapshot) -> str: path = snapshot.archive_path canon = snapshot.canonical_outputs() output = "" - output_template = '{} ' + output_template = '{}  ' icons = { "singlefile": "❶", "wget": "🆆", @@ -145,12 +145,12 @@ def snapshot_icons(snapshot) -> str: for extractor, _ in EXTRACTORS: if extractor not in exclude: exists = extractor_items[extractor] is not None - output += output_template.format(path, canon[f"{extractor}_path"], str(exists), + output += format_html(output_template, path, canon[f"{extractor}_path"], str(exists), extractor, icons.get(extractor, "?")) if extractor == "wget": # warc isn't technically it's own extractor, so we have to add it after wget exists = list((Path(path) / canon["warc_path"]).glob("*.warc.gz")) - output += output_template.format(exists[0] if exists else '#', canon["warc_path"], str(bool(exists)), "warc", icons.get("warc", "?")) + output += format_html(output_template, exists[0] if exists else '#', canon["warc_path"], str(bool(exists)), "warc", icons.get("warc", "?")) if extractor == "archive_org": # The check for archive_org is different, so it has to be handled separately @@ -159,4 +159,4 @@ def snapshot_icons(snapshot) -> str: output += '{} '.format(canon["archive_org_path"], str(exists), "archive_org", icons.get("archive_org", "?")) - return format_html(f'{output}') + return format_html('{}', mark_safe(output)) diff --git a/archivebox/main.py b/archivebox/main.py index bfe59640..e266adf9 100644 --- a/archivebox/main.py +++ b/archivebox/main.py @@ -1070,6 +1070,7 @@ def server(runserver_args: Optional[List[str]]=None, reload: bool=False, debug: bool=False, init: bool=False, + createsuperuser: bool=False, out_dir: Path=OUTPUT_DIR) -> None: """Run the ArchiveBox HTTP server""" @@ -1078,6 +1079,9 @@ def server(runserver_args: Optional[List[str]]=None, if init: run_subcommand('init', stdin=None, pwd=out_dir) + if createsuperuser: + run_subcommand('manage', subcommand_args=['createsuperuser'], pwd=out_dir) + # setup config for django runserver from . import config config.SHOW_PROGRESS = False diff --git a/archivebox/search/backends/sonic.py b/archivebox/search/backends/sonic.py index f0beaddd..f3ef6628 100644 --- a/archivebox/search/backends/sonic.py +++ b/archivebox/search/backends/sonic.py @@ -5,7 +5,7 @@ from sonic import IngestClient, SearchClient from archivebox.util import enforce_types from archivebox.config import SEARCH_BACKEND_HOST_NAME, SEARCH_BACKEND_PORT, SEARCH_BACKEND_PASSWORD, SONIC_BUCKET, SONIC_COLLECTION -MAX_SONIC_TEXT_LENGTH = 20000 +MAX_SONIC_TEXT_LENGTH = 2000 @enforce_types def index(snapshot_id: str, texts: List[str]): diff --git a/archivebox/themes/default/add_links.html b/archivebox/themes/default/add_links.html index 0b384f5c..fa8b441f 100644 --- a/archivebox/themes/default/add_links.html +++ b/archivebox/themes/default/add_links.html @@ -68,4 +68,6 @@ {% endblock %} +{% block footer %}{% endblock %} + {% block sidebar %}{% endblock %} diff --git a/archivebox/themes/default/base.html b/archivebox/themes/default/base.html index a70430ea..c6eda60f 100644 --- a/archivebox/themes/default/base.html +++ b/archivebox/themes/default/base.html @@ -1,3 +1,4 @@ +{% load admin_urls %} {% load static %} @@ -7,222 +8,8 @@ Archived Sites - + + {% block extra_head %} @@ -247,38 +34,51 @@ -
-
-
- {% block body %} - {% endblock %} -
- +
+ {% block body %} + {% endblock %} +
+ {% block footer %} + + {% endblock %} + diff --git a/archivebox/themes/default/core/snapshot_list.html b/archivebox/themes/default/core/snapshot_list.html index ce2b2faa..dd8ebf15 100644 --- a/archivebox/themes/default/core/snapshot_list.html +++ b/archivebox/themes/default/core/snapshot_list.html @@ -2,13 +2,21 @@ {% load static %} {% block body %} -
-
- - - -
+
+ +
diff --git a/archivebox/themes/default/main_index.html b/archivebox/themes/default/main_index.html index 42150342..7ad0f30d 100644 --- a/archivebox/themes/default/main_index.html +++ b/archivebox/themes/default/main_index.html @@ -243,7 +243,7 @@
Archive created using ArchiveBox - version v{{VERSION}}   |   + version v{{version}}   |   Download index as JSON

{{FOOTER_INFO}} diff --git a/archivebox/themes/default/main_index_row.html b/archivebox/themes/default/main_index_row.html index 66c297a7..984a0432 100644 --- a/archivebox/themes/default/main_index_row.html +++ b/archivebox/themes/default/main_index_row.html @@ -1,12 +1,14 @@ {% load static %}
+{% comment %} + +{% endcomment %} + + + + + diff --git a/archivebox/themes/default/static/add.css b/archivebox/themes/default/static/add.css index b128bf4b..875c61bc 100644 --- a/archivebox/themes/default/static/add.css +++ b/archivebox/themes/default/static/add.css @@ -1,3 +1,13 @@ +header { + font-family: "Roboto","Lucida Grande","DejaVu Sans","Bitstream Vera Sans",Verdana,Arial,sans-serif; + font-size: 13px; + color: white; + height: 30px; +} +.header-top { + color: white; +} + .dashboard #content { width: 100%; margin-right: 0px; @@ -60,3 +70,21 @@ ul#id_depth { box-sizing: border-box; animation: spin 2s linear infinite; } + + +textarea, select { + border-radius: 4px; + border: 2px solid #004882; + box-shadow: 4px 4px 4px rgba(0,0,0,0.02); + width: 100%; +} + +select option:not(:checked) { + border: 1px dashed rgba(10,200,20,0.12); +} +select option:checked { + border: 1px solid green; + background-color: green; + color: green; +} + diff --git a/archivebox/themes/default/static/admin.css b/archivebox/themes/default/static/admin.css index 181c06de..142e1b89 100644 --- a/archivebox/themes/default/static/admin.css +++ b/archivebox/themes/default/static/admin.css @@ -224,7 +224,7 @@ body.model-snapshot.change-list #content .object-tools { 100% { transform: rotate(360deg); } } -.tags > a > .tag { +.tag { float: right; border-radius: 5px; background-color: #bfdfff; @@ -232,3 +232,8 @@ body.model-snapshot.change-list #content .object-tools { margin-left: 4px; margin-top: 1px; } + +.exists-False { + opacity: 0.1; + filter: grayscale(100%); +} diff --git a/bin/build_docker.sh b/bin/build_docker.sh index 0115acdf..42fade38 100755 --- a/bin/build_docker.sh +++ b/bin/build_docker.sh @@ -12,6 +12,7 @@ IFS=$'\n' REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )" VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")" +SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')" cd "$REPO_DIR" which docker > /dev/null @@ -20,9 +21,13 @@ echo "[+] Building docker image in the background..." docker build . -t archivebox \ -t archivebox:latest \ -t archivebox:$VERSION \ + -t archivebox:$SHORT_VERSION \ -t docker.io/nikisweeting/archivebox:latest \ -t docker.io/nikisweeting/archivebox:$VERSION \ + -t docker.io/nikisweeting/archivebox:$SHORT_VERSION \ -t docker.io/archivebox/archivebox:latest \ -t docker.io/archivebox/archivebox:$VERSION \ + -t docker.io/archivebox/archivebox:$SHORT_VERSION \ -t docker.pkg.github.com/pirate/archivebox/archivebox:latest \ - -t docker.pkg.github.com/pirate/archivebox/archivebox:$VERSION + -t docker.pkg.github.com/pirate/archivebox/archivebox:$VERSION \ + -t docker.pkg.github.com/pirate/archivebox/archivebox:$SHORT_VERSION diff --git a/bin/build_docs.sh b/bin/build_docs.sh index afc849ed..5fa220fb 100755 --- a/bin/build_docs.sh +++ b/bin/build_docs.sh @@ -20,7 +20,6 @@ fi cd "$REPO_DIR" - echo "[*] Fetching latest docs version" cd "$REPO_DIR/docs" git pull diff --git a/bin/build_git.sh b/bin/build_git.sh new file mode 100644 index 00000000..19e185e8 --- /dev/null +++ b/bin/build_git.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + +### Bash Environment Setup +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html +# set -o xtrace +set -o errexit +set -o errtrace +set -o nounset +set -o pipefail +IFS=$'\n' + +REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )" + +cd "$REPO_DIR" +source "./.venv/bin/activate" + + +# Make sure git is clean +if [ -z "$(git status --porcelain)" ] && [[ "$(git branch --show-current)" == "master" ]]; then + git pull +else + echo "[!] Warning: git status is dirty!" + echo " Press Ctrl-C to cancel, or wait 10sec to continue..." + sleep 10 +fi + +# Bump version number in source +function bump_semver { + echo "$1" | awk -F. '{$NF = $NF + 1;} 1' | sed 's/ /./g' +} + +OLD_VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")" +NEW_VERSION="$(bump_semver "$OLD_VERSION")" +echo "[*] Bumping VERSION from $OLD_VERSION to $NEW_VERSION" +contents="$(jq ".version = \"$NEW_VERSION\"" "$REPO_DIR/package.json")" && \ +echo "${contents}" > package.json + diff --git a/bin/docker_entrypoint.sh b/bin/docker_entrypoint.sh index 29fcb646..65a4c1f6 100755 --- a/bin/docker_entrypoint.sh +++ b/bin/docker_entrypoint.sh @@ -1,24 +1,33 @@ #!/usr/bin/env bash -# Autodetect UID,GID of host user based on ownership of files in the data volume DATA_DIR="${DATA_DIR:-/data}" ARCHIVEBOX_USER="${ARCHIVEBOX_USER:-archivebox}" -USID=$(stat --format="%u" "$DATA_DIR") -GRID=$(stat --format="%g" "$DATA_DIR") - -# If user is not root, modify the archivebox user+files to have the same uid,gid -if [[ "$USID" != 0 && "$GRID" != 0 ]]; then - usermod -u "$USID" "$ARCHIVEBOX_USER" > /dev/null 2>&1 - groupmod -g "$GRID" "$ARCHIVEBOX_USER" > /dev/null 2>&1 - chown -R "$USID":"$GRID" "/home/$ARCHIVEBOX_USER" - chown "$USID":"$GRID" "$DATA_DIR" - chown "$USID":"$GRID" "$DATA_DIR/*" > /dev/null 2>&1 || true +# Set the archivebox user UID & GID +if [[ -n "$PUID" && "$PUID" != 0 ]]; then + usermod -u "$PUID" "$ARCHIVEBOX_USER" > /dev/null 2>&1 +fi +if [[ -n "$PGID" && "$PGID" != 0 ]]; then + groupmod -g "$PGID" "$ARCHIVEBOX_USER" > /dev/null 2>&1 fi -# Run commands as the new archivebox user in Docker. -# Any files touched will have the same uid & gid -# inside Docker and outside on the host machine. +# Set the permissions of the data dir to match the archivebox user +if [[ -d "$DATA_DIR/archive" ]]; then + # check data directory permissions + if [[ ! "$(stat -c %u $DATA_DIR/archive)" = "$(id -u archivebox)" ]]; then + echo "Change in ownership detected, please be patient while we chown existing files" + echo "This could take some time..." + chown $ARCHIVEBOX_USER:$ARCHIVEBOX_USER -R "$DATA_DIR" + fi +else + # create data directory + mkdir -p "$DATA_DIR" + chown -R $ARCHIVEBOX_USER:$ARCHIVEBOX_USER "$DATA_DIR" +fi +chown $ARCHIVEBOX_USER:$ARCHIVEBOX_USER "$DATA_DIR" + + +# Drop permissions to run commands as the archivebox user if [[ "$1" == /* || "$1" == "echo" || "$1" == "archivebox" ]]; then # arg 1 is a binary, execute it verbatim # e.g. "archivebox init" diff --git a/bin/release.sh b/bin/release.sh index 12459c74..34256fad 100755 --- a/bin/release.sh +++ b/bin/release.sh @@ -11,69 +11,28 @@ set -o pipefail IFS=$'\n' REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )" - cd "$REPO_DIR" -source "./.venv/bin/activate" -# Make sure git is clean -if [ -z "$(git status --porcelain)" ] && [[ "$(git branch --show-current)" == "master" ]]; then - git pull -else - echo "[!] Warning: git status is dirty!" - echo " Press Ctrl-C to cancel, or wait 10sec to continue..." - sleep 10 -fi +# Run the linters and tests +# ./bin/lint.sh +# ./bin/test.sh - -# Bump version number in source -function bump_semver { - echo "$1" | awk -F. '{$NF = $NF + 1;} 1' | sed 's/ /./g' -} - -OLD_VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")" -NEW_VERSION="$(bump_semver "$OLD_VERSION")" -echo "[*] Bumping VERSION from $OLD_VERSION to $NEW_VERSION" -contents="$(jq ".version = \"$NEW_VERSION\"" "$REPO_DIR/package.json")" && \ -echo "${contents}" > package.json - - -# Build docs, python package, and docker image +# Run all the build scripts +./bin/build_git.sh ./bin/build_docs.sh ./bin/build_pip.sh ./bin/build_deb.sh +./bin/build_brew.sh ./bin/build_docker.sh +# Push relase to public repositories +./bin/release_git.sh +./bin/release_docs.sh +./bin/release_pip.sh +./bin/release_deb.sh +./bin/release_brew.sh +./bin/release_docker.sh -# Push build to github -echo "[^] Pushing source to github" -git add "$REPO_DIR/docs" -git add "$REPO_DIR/deb_dist" -git add "$REPO_DIR/pip_dist" -git add "$REPO_DIR/brew_dist" -git add "$REPO_DIR/package.json" -git add "$REPO_DIR/package-lock.json" -git commit -m "$NEW_VERSION release" -git tag -a "v$NEW_VERSION" -m "v$NEW_VERSION" -git push origin master -git push origin --tags - - -# Push releases to github -echo "[^] Uploading to test.pypi.org" -python3 -m twine upload --repository testpypi pip_dist/*.{whl,tar.gz} - -echo "[^] Uploading to pypi.org" -python3 -m twine upload --repository pypi pip_dist/*.{whl,tar.gz} - -echo "[^] Uploading to launchpad.net" -dput archivebox "deb_dist/archivebox_${NEW_VERSION}-1_source.changes" - -echo "[^] Uploading docker image" -# docker login --username=nikisweeting -# docker login docker.pkg.github.com --username=pirate -docker push docker.io/nikisweeting/archivebox -docker push docker.io/archivebox/archivebox -docker push docker.pkg.github.com/archivebox/archivebox/archivebox - -echo "[√] Done. Published version v$NEW_VERSION" +VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")" +echo "[√] Done. Published version v$VERSION" diff --git a/bin/release_brew.sh b/bin/release_brew.sh new file mode 100644 index 00000000..526d9d59 --- /dev/null +++ b/bin/release_brew.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +### Bash Environment Setup +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html +# set -o xtrace +set -o errexit +set -o errtrace +set -o nounset +set -o pipefail +IFS=$'\n' + +REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )" +VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")" +SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')" +cd "$REPO_DIR" + +# TODO +exit 0 diff --git a/bin/release_deb.sh b/bin/release_deb.sh new file mode 100644 index 00000000..dc1bff35 --- /dev/null +++ b/bin/release_deb.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +### Bash Environment Setup +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html +# set -o xtrace +set -o errexit +set -o errtrace +set -o nounset +set -o pipefail +IFS=$'\n' + +REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )" +VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")" +SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')" +cd "$REPO_DIR" + + +echo "[^] Uploading to launchpad.net" +dput archivebox "deb_dist/archivebox_${VERSION}-1_source.changes" diff --git a/bin/release_docker.sh b/bin/release_docker.sh new file mode 100644 index 00000000..344a456d --- /dev/null +++ b/bin/release_docker.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +### Bash Environment Setup +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html +# set -o xtrace +set -o errexit +set -o errtrace +set -o nounset +set -o pipefail +IFS=$'\n' + +REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )" +VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")" +SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')" +cd "$REPO_DIR" + + +echo "[^] Uploading docker image" +# docker login --username=nikisweeting +# docker login docker.pkg.github.com --username=pirate +docker push docker.io/nikisweeting/archivebox +docker push docker.io/archivebox/archivebox +docker push docker.pkg.github.com/archivebox/archivebox/archivebox diff --git a/bin/release_docs.sh b/bin/release_docs.sh new file mode 100644 index 00000000..114c1262 --- /dev/null +++ b/bin/release_docs.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +### Bash Environment Setup +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html +# set -o xtrace +set -o errexit +set -o errtrace +set -o nounset +set -o pipefail +IFS=$'\n' + +REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )" +VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")" +SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')" +cd "$REPO_DIR" + + +echo "[^] Pushing docs to github" +cd docs/ +git commit -am "$NEW_VERSION release" +git push +git tag -a "v$NEW_VERSION" -m "v$NEW_VERSION" +git push origin master +git push origin --tags diff --git a/bin/release_git.sh b/bin/release_git.sh new file mode 100644 index 00000000..4a999e34 --- /dev/null +++ b/bin/release_git.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +### Bash Environment Setup +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html +# set -o xtrace +set -o errexit +set -o errtrace +set -o nounset +set -o pipefail +IFS=$'\n' + +REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )" +VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")" +cd "$REPO_DIR" + + +# Push build to github +echo "[^] Pushing release commit + tag to Github" +git commit -am "$VERSION release" +git tag -a "v$VERSION" -m "v$VERSION" +git push origin master +git push origin --tags +echo " To finish publishing the release go here:" +echo " https://github.com/ArchiveBox/ArchiveBox/releases/new" diff --git a/bin/release_pip.sh b/bin/release_pip.sh new file mode 100644 index 00000000..87323603 --- /dev/null +++ b/bin/release_pip.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +### Bash Environment Setup +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html +# set -o xtrace +set -o errexit +set -o errtrace +set -o nounset +set -o pipefail +IFS=$'\n' + +REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )" +VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")" +cd "$REPO_DIR" + + +# apt install python3 python3-all python3-dev +# pip install '.[dev]' + + +echo "[^] Uploading to test.pypi.org" +python3 -m twine upload --repository testpypi pip_dist/archivebox-${VERSION}*.{whl,tar.gz} + +echo "[^] Uploading to pypi.org" +python3 -m twine upload --repository pypi pip_dist/archivebox-${VERSION}*.{whl,tar.gz} diff --git a/bin/setup.sh b/bin/setup.sh index e87c9571..304c96c5 100755 --- a/bin/setup.sh +++ b/bin/setup.sh @@ -1,6 +1,5 @@ -#!/bin/bash +#!/usr/bin/env bash # ArchiveBox Setup Script -# Nick Sweeting 2017 | MIT License # https://github.com/ArchiveBox/ArchiveBox echo "[i] ArchiveBox Setup Script 📦" @@ -8,27 +7,28 @@ echo "" echo " This is a helper script which installs the ArchiveBox dependencies on your system using homebrew/aptitude." echo " You may be prompted for a password in order to install the following:" echo "" -echo " - git" echo " - python3, python3-pip, python3-distutils" echo " - curl" echo " - wget" +echo " - git" echo " - youtube-dl" echo " - chromium-browser (skip this if Chrome/Chromium is already installed)" +echo " - nodejs (used for singlefile, readability, mercury, and more)" echo "" echo " If you'd rather install these manually, you can find documentation here:" echo " https://github.com/ArchiveBox/ArchiveBox/wiki/Install" echo "" -echo "Press enter to continue with the automatic install, or Ctrl+C to cancel..." -read - +read -p "Press [enter] to continue with the automatic install, or Ctrl+C to cancel..." REPLY echo "" # On Linux: if which apt-get > /dev/null; then - echo "[+] Updating apt repos..." - apt update -q + echo "[+] Adding ArchiveBox apt repo to sources..." + sudo apt install software-properties-common + sudo add-apt-repository -u ppa:archivebox/archivebox echo "[+] Installing python3, wget, curl..." - apt install git python3 python3-pip python3-distutils wget curl youtube-dl + sudo apt install -y git python3 python3-pip python3-distutils wget curl youtube-dl nodejs npm ripgrep + # sudo apt install archivebox if which google-chrome; then echo "[i] You already have google-chrome installed, if you would like to download chromium instead (they work pretty much the same), follow the Manual Setup instructions" @@ -41,13 +41,13 @@ if which apt-get > /dev/null; then chromium --version else echo "[+] Installing chromium..." - apt install chromium + sudo apt install chromium || sudo apt install chromium-browser fi # On Mac: elif which brew > /dev/null; then # 🐍 eye of newt echo "[+] Installing python3, wget, curl (ignore 'already installed' warnings)..." - brew install git wget curl youtube-dl + brew install git wget curl youtube-dl ripgrep node if which python3; then if python3 -c 'import sys; raise SystemExit(sys.version_info < (3,5,0))'; then echo "[√] Using existing $(which python3)..." @@ -83,7 +83,11 @@ else exit 1 fi -python3 -m pip install --upgrade archivebox +npm i -g npm +pip3 install --upgrade pip setuptools + +pip3 install --upgrade archivebox +npm install -g 'git+https://github.com/ArchiveBox/ArchiveBox.git' # Check: echo ""
{% if snapshot.bookmarked_date %} {{ snapshot.bookmarked_date }} {% else %} {{ snapshot.added }} {% endif %} {% if snapshot.is_archived %} - + {% else %} - + {% endif %} {{snapshot.title|default:'Loading...'}} @@ -19,4 +21,39 @@ {{snapshot.url}} {% if snapshot.bookmarked_date %} {{ snapshot.bookmarked_date }} {% else %} {{ snapshot.added }} {% endif %} + {% if snapshot.is_archived %} + + {% else %} + + {% endif %} + + + {{snapshot.title|default:'Loading...'}} + {% if snapshot.tags_str %} + + {% if snapshot.tags_str != None %} + {{snapshot.tags_str|default:''}} + {% else %} + {{ snapshot.tags|default:'' }} + {% endif %} + + {% endif %} + + + + {% if snapshot.icons %} + {{snapshot.icons}} {{snapshot.num_outputs}} + {% else %} + 📄 + {{snapshot.num_outputs}} + + {% endif %} + + {{snapshot.url}}