1
0
Fork 0
mirror of synced 2024-06-28 11:00:35 +12:00

fix sql link parsing and writing

This commit is contained in:
Nick Sweeting 2019-04-24 04:06:54 -04:00
parent ce2d18644b
commit 0b27f33d2e
3 changed files with 6 additions and 4 deletions

View file

@ -245,10 +245,12 @@ def load_main_index(out_dir: str=OUTPUT_DIR, warn: bool=True) -> List[Link]:
all_links: List[Link] = []
all_links = list(parse_json_main_index(out_dir))
links_from_sql = list(parse_sql_main_index())
links_from_sql = list(parse_sql_main_index(out_dir))
if warn and not set(l.url for l in all_links) == set(l['url'] for l in links_from_sql):
if warn and not set(l.url for l in all_links) == set(l.url for l in links_from_sql):
stderr('{red}[!] Warning: SQL index does not match JSON index!{reset}'.format(**ANSI))
stderr(' To repair the index and re-import any orphaned links run:')
stderr(' archivebox init')
return all_links

View file

@ -60,7 +60,7 @@ def log_indexing_process_started(num_links: int):
start_ts = datetime.now()
_LAST_RUN_STATS.index_start_ts = start_ts
print()
print('{green}[*] [{}] Updating {} links in main index...{reset}'.format(
print('{green}[*] [{}] Writing {} links to main index...{reset}'.format(
start_ts.strftime('%Y-%m-%d %H:%M:%S'),
num_links,
**ANSI,

View file

@ -15,7 +15,7 @@ def parse_sql_main_index(out_dir: str=OUTPUT_DIR) -> Iterator[Link]:
from core.models import Page
return (
page.as_json(*Page.keys)
Link.from_json(page.as_json(*Page.keys))
for page in Page.objects.all()
)