diff --git a/archivebox/index/__init__.py b/archivebox/index/__init__.py index bf1d0c6a..53ce3f26 100644 --- a/archivebox/index/__init__.py +++ b/archivebox/index/__init__.py @@ -243,12 +243,6 @@ def write_main_index(links: List[Link], out_dir: Path=OUTPUT_DIR) -> None: log_indexing_process_finished() -@enforce_types -def get_empty_snapshot_queryset(out_dir: Path=OUTPUT_DIR): - setup_django(out_dir, check_db=True) - from core.models import Snapshot - return Snapshot.objects.none() - @enforce_types def load_main_index(out_dir: Path=OUTPUT_DIR, warn: bool=True) -> List[Link]: """parse and load existing index with any new links from import_path merged in""" @@ -390,8 +384,9 @@ def search_filter(snapshots: QuerySet, filter_patterns: List[str], filter_type: color='red', ) raise SystemExit(2) + from core.models import Snapshot - qsearch = get_empty_snapshot_queryset() + qsearch = Snapshot.objects.none() for pattern in filter_patterns: try: qsearch |= query_search_index(pattern) diff --git a/archivebox/main.py b/archivebox/main.py index 756fecde..eb8cd6a0 100644 --- a/archivebox/main.py +++ b/archivebox/main.py @@ -29,7 +29,6 @@ from .util import enforce_types # type: ignore from .system import get_dir_size, dedupe_cron_jobs, CRON_COMMENT from .index import ( load_main_index, - get_empty_snapshot_queryset, parse_links_from_source, dedupe_links, write_main_index, @@ -265,6 +264,7 @@ def run(subcommand: str, @enforce_types def init(force: bool=False, out_dir: Path=OUTPUT_DIR) -> None: """Initialize a new ArchiveBox collection in the current directory""" + from core.models import Snapshot Path(out_dir).mkdir(exist_ok=True) is_empty = not len(set(os.listdir(out_dir)) - ALLOWED_IN_OUTPUT_DIR) @@ -335,7 +335,7 @@ def init(force: bool=False, out_dir: Path=OUTPUT_DIR) -> None: print() print('{green}[*] Collecting links from any existing indexes and archive folders...{reset}'.format(**ANSI)) - all_links = get_empty_snapshot_queryset() + all_links = Snapshot.objects.none() pending_links: Dict[str, Link] = {} if existing_index: