1
0
Fork 0
mirror of synced 2024-07-01 04:20:55 +12:00
ArchiveBox/archivebox/core/links.py

94 lines
2.9 KiB
Python
Raw Normal View History

from typing import Iterable
from collections import OrderedDict
2018-04-17 19:22:59 +12:00
2019-04-03 09:36:41 +13:00
from core.schema import Link
from core.util import (
scheme,
fuzzy_url,
merge_links,
2017-10-23 22:58:41 +13:00
)
2019-04-03 09:36:41 +13:00
from core.config import URL_BLACKLIST_PTN
2019-03-31 08:37:36 +13:00
2017-10-23 22:58:41 +13:00
def validate_links(links: Iterable[Link]) -> Iterable[Link]:
2017-10-23 22:58:41 +13:00
links = archivable_links(links) # remove chrome://, about:, mailto: etc.
2018-04-18 01:49:32 +12:00
links = sorted_links(links) # deterministically sort the links based on timstamp, url
links = uniquefied_links(links) # merge/dedupe duplicate timestamps & urls
if not links:
print('[X] No links found :(')
raise SystemExit(1)
return links
def archivable_links(links: Iterable[Link]) -> Iterable[Link]:
2017-10-23 22:58:41 +13:00
"""remove chrome://, about:// or other schemed links that cant be archived"""
2019-03-31 08:00:21 +13:00
for link in links:
2019-03-31 08:36:54 +13:00
scheme_is_valid = scheme(link.url) in ('http', 'https', 'ftp')
2019-03-31 13:49:45 +13:00
not_blacklisted = (not URL_BLACKLIST_PTN.match(link.url)) if URL_BLACKLIST_PTN else True
2019-03-31 08:00:21 +13:00
if scheme_is_valid and not_blacklisted:
yield link
def uniquefied_links(sorted_links: Iterable[Link]) -> Iterable[Link]:
"""
ensures that all non-duplicate links have monotonically increasing timestamps
"""
unique_urls: OrderedDict[str, Link] = OrderedDict()
for link in sorted_links:
fuzzy = fuzzy_url(link.url)
if fuzzy in unique_urls:
# merge with any other links that share the same url
link = merge_links(unique_urls[fuzzy], link)
unique_urls[fuzzy] = link
2017-10-19 13:33:31 +13:00
unique_timestamps: OrderedDict[str, Link] = OrderedDict()
2017-10-19 13:33:31 +13:00
for link in unique_urls.values():
2019-03-28 09:44:00 +13:00
new_link = link.overwrite(
timestamp=lowest_uniq_timestamp(unique_timestamps, link.timestamp),
)
unique_timestamps[new_link.timestamp] = new_link
2017-10-19 13:33:31 +13:00
return unique_timestamps.values()
def sorted_links(links: Iterable[Link]) -> Iterable[Link]:
sort_func = lambda link: (link.timestamp.split('.', 1)[0], link.url)
2017-10-23 22:58:41 +13:00
return sorted(links, key=sort_func, reverse=True)
def links_after_timestamp(links: Iterable[Link], resume: float=None) -> Iterable[Link]:
if not resume:
yield from links
return
for link in links:
try:
if float(link.timestamp) <= resume:
yield link
except (ValueError, TypeError):
print('Resume value and all timestamp values must be valid numbers.')
def lowest_uniq_timestamp(used_timestamps: OrderedDict, timestamp: str) -> str:
"""resolve duplicate timestamps by appending a decimal 1234, 1234 -> 1234.1, 1234.2"""
2017-10-19 13:33:31 +13:00
timestamp = timestamp.split('.')[0]
nonce = 0
# first try 152323423 before 152323423.0
if timestamp not in used_timestamps:
return timestamp
new_timestamp = '{}.{}'.format(timestamp, nonce)
while new_timestamp in used_timestamps:
nonce += 1
new_timestamp = '{}.{}'.format(timestamp, nonce)
return new_timestamp
2019-03-31 08:00:21 +13:00