1
0
Fork 0
mirror of synced 2024-06-22 04:10:30 +12:00

make get_commit_hash more precisce by using exact ref (#1270)

This commit is contained in:
Nick Sweeting 2023-12-18 19:28:10 -08:00 committed by GitHub
commit 764a483903
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 1787 additions and 17 deletions

View file

@ -53,6 +53,7 @@ from .config_stubs import (
### Pre-Fetch Minimal System Config
TIMEZONE = 'UTC'
SYSTEM_USER = getpass.getuser() or os.getlogin()
try:
@ -81,7 +82,6 @@ CONFIG_SCHEMA: Dict[str, ConfigDefaultDict] = {
'IN_QEMU': {'type': bool, 'default': False},
'PUID': {'type': int, 'default': os.getuid()},
'PGID': {'type': int, 'default': os.getgid()},
# TODO: 'SHOW_HINTS': {'type: bool, 'default': True}, # hints are hidden automatically once collection contains >0 Snapshots, no need to configure
},
'GENERAL_CONFIG': {
@ -392,10 +392,20 @@ def get_version(config):
raise Exception('Failed to detect installed archivebox version!')
def get_commit_hash(config) -> Optional[str]:
try:
git_dir = config['PACKAGE_DIR'] / '../'
ref = (git_dir / 'HEAD').read_text().strip().split(' ')[-1]
commit_hash = git_dir.joinpath(ref).read_text().strip()
return commit_hash
except Exception:
pass
try:
return list((config['PACKAGE_DIR'] / '../.git/refs/heads/').glob('*'))[0].read_text().strip()
except Exception:
return None
pass
return None
def get_build_time(config) -> str:
if config['IN_DOCKER']:
@ -792,6 +802,7 @@ def find_chrome_binary() -> Optional[str]:
# Precedence: Chromium, Chrome, Beta, Canary, Unstable, Dev
# make sure data dir finding precedence order always matches binary finding order
default_executable_paths = (
# '~/Library/Caches/ms-playwright/chromium-*/chrome-mac/Chromium.app/Contents/MacOS/Chromium',
'chromium-browser',
'chromium',
'/Applications/Chromium.app/Contents/MacOS/Chromium',
@ -1212,7 +1223,7 @@ def check_dependencies(config: ConfigDict=CONFIG, show_help: bool=True) -> None:
if config['USE_YOUTUBEDL'] and config['MEDIA_TIMEOUT'] < 20:
stderr(f'[!] Warning: MEDIA_TIMEOUT is set too low! (currently set to MEDIA_TIMEOUT={config["MEDIA_TIMEOUT"]} seconds)', color='red')
stderr(' Youtube-dl will fail to archive all media if set to less than ~20 seconds.')
stderr(' youtube-dl/yt-dlp will fail to archive any media if set to less than ~20 seconds.')
stderr(' (Setting it somewhere over 60 seconds is recommended)')
stderr()
stderr(' If you want to disable media archiving entirely, set SAVE_MEDIA=False instead:')

View file

@ -48,22 +48,25 @@ class TagInline(admin.TabularInline):
from django.contrib.admin.helpers import ActionForm
from django.contrib.admin.widgets import AutocompleteSelectMultiple
class AutocompleteTags:
model = Tag
search_fields = ['name']
# WIP: commented out because broken by Django 3.1.2 -> 4.0 migration
# class AutocompleteTags:
# model = Tag
# search_fields = ['name']
# name = 'tags'
class AutocompleteTagsAdminStub:
name = 'admin'
# class AutocompleteTagsAdminStub:
# name = 'admin'
class SnapshotActionForm(ActionForm):
tags = forms.ModelMultipleChoiceField(
queryset=Tag.objects.all(),
required=False,
widget=AutocompleteSelectMultiple(
AutocompleteTags(),
AutocompleteTagsAdminStub(),
),
# WIP: commented out because broken by Django 3.1.2 -> 4.0 migration
# widget=AutocompleteSelectMultiple(
# # AutocompleteTags(),
# # AutocompleteTagsAdminStub(),
# ),
)
# TODO: allow selecting actions for specific extractors? is this useful?

View file

@ -3,4 +3,4 @@ from django.apps import AppConfig
class CoreConfig(AppConfig):
name = 'core'
default_auto_field = 'django.db.models.UUIDField'
# default_auto_field = 'django.db.models.UUIDField'

View file

@ -268,6 +268,8 @@ AUTH_PASSWORD_VALIDATORS = [
{'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'},
]
# WIP: commented out because broken by Django 3.1.2 -> 4.0 migration
# DEFAULT_AUTO_FIELD = 'django.db.models.UUIDField'
################################################################################
### Shell Settings

View file

@ -184,7 +184,7 @@ def archive_link(link: Link, overwrite: bool=False, methods: Optional[Iterable[s
link.url,
command,
ts
) + "\n"))
) + "\n" + str(e) + "\n"))
#f.write(f"\n> {command}; ts={ts} version={config['VERSION']} docker={config['IN_DOCKER']} is_tty={config['IS_TTY']}\n")
# print(' ', stats)

View file

@ -393,7 +393,11 @@ def log_link_archiving_finished(link: "Link", link_dir: str, is_new: bool, stats
else:
_LAST_RUN_STATS.succeeded += 1
size = get_dir_size(link_dir)
try:
size = get_dir_size(link_dir)
except FileNotFoundError:
size = (0, None, '0')
end_ts = datetime.now(timezone.utc)
duration = str(end_ts - start_ts).split('.')[0]
print(' {black}{} files ({}) in {}s {reset}'.format(size[2], printable_filesize(size[0]), duration, **ANSI))

1
archivebox/static Symbolic link
View file

@ -0,0 +1 @@
templates/static

View file

@ -51,6 +51,7 @@ function create_builder() {
docker buildx use xbuilder && return 0
echo "[+] Creating new xbuilder for: $SELECTED_PLATFORMS"
echo
docker pull 'moby/buildkit:buildx-stable-1'
# Switch to buildx builder if already present / previously created
docker buildx create --name xbuilder --driver docker-container --bootstrap --use --platform "$SELECTED_PLATFORMS" || true
@ -75,6 +76,7 @@ echo "[+] Generating requirements.txt and pdm.lock from pyproject.toml..."
pdm lock --group=':all' --strategy="cross_platform" --production
pdm export --group=':all' --production --without-hashes -o requirements.txt
echo "[+] Building archivebox:$VERSION docker image..."
# docker builder prune
# docker build . --no-cache -t archivebox-dev \

View file

@ -21,7 +21,6 @@ services:
# - ./etc/crontabs:/var/spool/cron/crontabs # uncomment this and archivebox_scheduler below to set up automatic recurring archive jobs
# - ./archivebox:/app/archivebox # uncomment this to mount the ArchiveBox source code at runtime (for developers working on archivebox)
# build: . # uncomment this to build the image from source code at buildtime (for developers working on archivebox)
environment:
- ALLOWED_HOSTS=* # restrict this to only accept incoming traffic via specific domain name
# - PUBLIC_INDEX=True # set to False to prevent anonymous users from viewing snapshot list
@ -161,4 +160,4 @@ networks:
ipam:
driver: default
config:
- subnet: 172.20.0.0/24
- subnet: 172.20.0.0/24

8
etc/crontabs/archivebox Normal file
View file

@ -0,0 +1,8 @@
# DO NOT EDIT THIS FILE - edit the master and reinstall.
# (/tmp/tmpe3dawo9u installed on Tue Jun 13 23:21:48 2023)
# (Cron version -- $Id: crontab.c,v 2.13 1994/01/17 03:20:37 vixie Exp $)
@daily cd /data && /usr/local/bin/archivebox add --depth=0 "https://example.com/3" >> /data/logs/schedule.log 2>&1 # archivebox_schedule
@daily cd /data && /usr/local/bin/archivebox add --depth=0 "https://example.com/2" >> /data/logs/schedule.log 2>&1 # archivebox_schedule
@daily cd /data && /usr/local/bin/archivebox add --depth=0 "https://example.com" >> /data/logs/schedule.log 2>&1 # archivebox_schedule
@daily cd /data && /usr/local/bin/archivebox add --depth=0 "update" >> /data/logs/schedule.log 2>&1 # archivebox_schedule

1740
package-lock.json generated

File diff suppressed because it is too large Load diff