1
0
Fork 0
mirror of synced 2024-06-16 17:35:15 +12:00
ArchiveBox/archivebox/index/json.py

162 lines
5 KiB
Python
Raw Normal View History

2019-04-28 09:26:24 +12:00
__package__ = 'archivebox.index'
2019-04-18 14:00:54 +12:00
import os
import sys
2019-05-01 15:13:04 +12:00
import json as pyjson
from pathlib import Path
from datetime import datetime
2020-09-04 10:26:49 +12:00
from typing import List, Optional, Iterator, Any, Union
from django.db.models import Model
2020-11-29 07:11:15 +13:00
from .schema import Link
2019-05-01 15:13:04 +12:00
from ..system import atomic_write
from ..util import enforce_types
from ..config import (
VERSION,
OUTPUT_DIR,
FOOTER_INFO,
GIT_SHA,
DEPENDENCIES,
JSON_INDEX_FILENAME,
ARCHIVE_DIR_NAME,
ANSI
)
2019-04-28 09:26:24 +12:00
MAIN_INDEX_HEADER = {
'info': 'This is an index of site data archived by ArchiveBox: The self-hosted web archive.',
2019-04-28 09:26:24 +12:00
'schema': 'archivebox.index.json',
'copyright_info': FOOTER_INFO,
'meta': {
'project': 'ArchiveBox',
'version': VERSION,
'git_sha': GIT_SHA,
'website': 'https://ArchiveBox.io',
2020-11-23 20:04:39 +13:00
'docs': 'https://github.com/ArchiveBox/ArchiveBox/wiki',
'source': 'https://github.com/ArchiveBox/ArchiveBox',
'issues': 'https://github.com/ArchiveBox/ArchiveBox/issues',
'dependencies': DEPENDENCIES,
},
}
@enforce_types
2021-01-01 06:59:06 +13:00
def generate_json_index_from_snapshots(snapshots: List[Model], with_headers: bool):
snapshots_json = [snapshot.as_json() for snapshot in snapshots]
if with_headers:
output = {
**MAIN_INDEX_HEADER,
2021-01-01 06:59:06 +13:00
'num_links': len(snapshots),
'updated': datetime.now(),
'last_run_cmd': sys.argv,
'links': snapshots_json,
}
else:
output = snapshots_json
return to_json(output, indent=4, sort_keys=True)
@enforce_types
2020-09-04 10:26:49 +12:00
def parse_json_main_index(out_dir: Path=OUTPUT_DIR) -> Iterator[Link]:
2019-04-28 09:26:24 +12:00
"""parse an archive index json file and return the list of links"""
2020-10-01 08:21:41 +13:00
index_path = Path(out_dir) / JSON_INDEX_FILENAME
if index_path.exists():
with open(index_path, 'r', encoding='utf-8') as f:
2019-05-01 15:13:04 +12:00
links = pyjson.load(f)['links']
for link_json in links:
try:
yield Link.from_json(link_json)
except KeyError:
try:
detail_index_path = Path(OUTPUT_DIR) / ARCHIVE_DIR_NAME / link_json['timestamp']
yield parse_json_link_details(str(detail_index_path))
except KeyError:
# as a last effort, try to guess the missing values out of existing ones
try:
yield Link.from_json(link_json, guess=True)
except KeyError:
print(" {lightyellow}! Failed to load the index.json from {}".format(detail_index_path, **ANSI))
continue
return ()
### Link Details Index
@enforce_types
def write_json_snapshot_details(snapshot: Model, out_dir: Optional[str]=None) -> None:
"""write a json file with some info about the snapshot"""
out_dir = out_dir or snapshot.snapshot_dir
2020-10-01 08:21:41 +13:00
path = Path(out_dir) / JSON_INDEX_FILENAME
atomic_write(str(path), snapshot.as_json())
@enforce_types
2020-12-31 06:53:20 +13:00
def load_json_snapshot(out_dir: Path) -> Optional[Model]:
"""
Loads the detail from the local json index
"""
2020-12-31 06:53:20 +13:00
from core.models import Snapshot
existing_index = Path(out_dir) / JSON_INDEX_FILENAME
if existing_index.exists():
with open(existing_index, 'r', encoding='utf-8') as f:
try:
2020-12-31 06:25:32 +13:00
output = pyjson.load(f)
2020-12-31 06:53:20 +13:00
output = Snapshot.from_json(output)
2020-12-31 06:25:32 +13:00
return output
except pyjson.JSONDecodeError:
pass
return None
@enforce_types
2020-12-30 06:07:15 +13:00
def parse_json_snapshot_details(out_dir: Union[Path, str]) -> Iterator[dict]:
2020-12-31 06:53:20 +13:00
"""read through all the archive data folders and return the parsed snapshots"""
for entry in os.scandir(Path(out_dir) / ARCHIVE_DIR_NAME):
if entry.is_dir(follow_symlinks=True):
2020-10-01 08:21:41 +13:00
if (Path(entry.path) / 'index.json').exists():
try:
2021-01-01 07:03:53 +13:00
snapshot_details = load_json_snapshot(Path(entry.path))
except KeyError:
2020-12-30 06:07:15 +13:00
snapshot_details = None
if snapshot_details:
yield snapshot_details
2019-05-01 15:13:04 +12:00
### Helpers
class ExtendedEncoder(pyjson.JSONEncoder):
"""
Extended json serializer that supports serializing several model
fields and objects
"""
def default(self, obj):
cls_name = obj.__class__.__name__
if hasattr(obj, '_asdict'):
return obj._asdict()
elif isinstance(obj, bytes):
return obj.decode()
elif isinstance(obj, datetime):
return obj.isoformat()
elif isinstance(obj, Exception):
return '{}: {}'.format(obj.__class__.__name__, obj)
elif cls_name in ('dict_items', 'dict_keys', 'dict_values'):
return tuple(obj)
return pyjson.JSONEncoder.default(self, obj)
@enforce_types
def to_json(obj: Any, indent: Optional[int]=4, sort_keys: bool=True, cls=ExtendedEncoder) -> str:
return pyjson.dumps(obj, indent=indent, sort_keys=sort_keys, cls=ExtendedEncoder)