1
0
Fork 0
mirror of synced 2024-07-01 12:30:24 +12:00

tweak console output formatting

This commit is contained in:
Nick Sweeting 2018-04-17 07:30:06 -04:00
parent 4a2e74a3ac
commit 3c2e0acb9c
5 changed files with 31 additions and 24 deletions

16
archive
View file

@ -55,7 +55,7 @@ def merge_links(archive_path=HTML_FOLDER, import_path=None):
num_new_links = len(all_links) - len(existing_links) num_new_links = len(all_links) - len(existing_links)
if import_path: if import_path:
print('[*] [{}] Adding {} new links from {} to index'.format( print('[+] [{}] Adding {} new links from {} to index'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'), datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
num_new_links, num_new_links,
import_path, import_path,
@ -75,6 +75,18 @@ def update_archive(archive_path, links, source=None, resume=None, append=True):
start_ts = datetime.now().timestamp() start_ts = datetime.now().timestamp()
if resume:
print('{green}[▶] [{}] Resuming archive update from {}...{reset}'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
resume,
**ANSI,
))
else:
print('{green}[▶] [{}] Running full archive update...{reset}'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
**ANSI,
))
# loop over links and archive them # loop over links and archive them
archive_links(archive_path, links, source=source, resume=resume) archive_links(archive_path, links, source=source, resume=resume)
@ -117,7 +129,7 @@ if __name__ == '__main__':
# argv[1] is a path to a file to import # argv[1] is a path to a file to import
source, resume = sys.argv[1].strip(), None source, resume = sys.argv[1].strip(), None
elif argc == 3: elif argc == 3:
source, resume = sys.argv[1].strip(), sys.argv[1] source, resume = sys.argv[1].strip(), sys.argv[2]
else: else:
print_help() print_help()
raise SystemExit(1) raise SystemExit(1)

View file

@ -45,18 +45,19 @@ def archive_links(archive_path, links, source=None, resume=None):
to_archive = Peekable(links_after_timestamp(links, resume)) to_archive = Peekable(links_after_timestamp(links, resume))
idx, link = 0, to_archive.peek(0) idx, link = 0, to_archive.peek(0)
try: try:
for idx, link in enumerate(to_archive): for idx, link in enumerate(to_archive):
link_dir = os.path.join(archive_path, 'archive', link['timestamp']) link_dir = os.path.join(archive_path, 'archive', link['timestamp'])
archive_link(link_dir, link) archive_link(link_dir, link)
except (KeyboardInterrupt, SystemExit, Exception) as e: except (KeyboardInterrupt, SystemExit, Exception) as e:
print('⏸ [{now}] {lightyellow}Downloading paused on link {timestamp} ({idx}/{total}){reset}'.format( print('{lightyellow}[X] [{now}] Downloading paused on link {timestamp} ({idx}/{total}){reset}'.format(
**ANSI, **ANSI,
now=datetime.now().strftime('%Y-%m-%d %H:%M:%S'), now=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
idx=idx, idx=idx+1,
timestamp=link['timestamp'], timestamp=link['timestamp'],
total=len(list(to_archive)), total=len(links),
)) ))
print(' Continue where you left off by running:') print(' Continue where you left off by running:')
print(' {} {} {}'.format( print(' {} {} {}'.format(
@ -105,6 +106,7 @@ def archive_link(link_dir, link, overwrite=False):
link = fetch_favicon(link_dir, link, overwrite=overwrite) link = fetch_favicon(link_dir, link, overwrite=overwrite)
write_link_index(link_dir, link) write_link_index(link_dir, link)
print()
return link return link
@ -115,10 +117,10 @@ def log_link_archive(link_dir, link, update_existing):
**link, **link,
**ANSI, **ANSI,
)) ))
if link['type']:
print(' i Type: {}'.format(link['type']))
print(' {} ({})'.format(link_dir, 'updating' if update_existing else 'creating')) print(' > {} ({})'.format(link_dir, 'updating' if update_existing else 'creating'))
if link['type']:
print(' i {}'.format(link['type']))
@ -141,10 +143,10 @@ def attach_result_to_link(method):
# if a valid method output is already present, dont run the fetch function # if a valid method output is already present, dont run the fetch function
if link['latest'][method] and not overwrite: if link['latest'][method] and not overwrite:
print(' √ Skipping: {}'.format(method)) print(' {}'.format(method))
result = None result = None
else: else:
print(' - Fetching: {}'.format(method)) print(' > {}'.format(method))
result = fetch_func(link_dir, link, **kwargs) result = fetch_func(link_dir, link, **kwargs)
end_ts = datetime.now().timestamp() end_ts = datetime.now().timestamp()

View file

@ -33,7 +33,7 @@ def write_links_index(out_dir, links):
write_json_links_index(out_dir, links) write_json_links_index(out_dir, links)
write_html_links_index(out_dir, links) write_html_links_index(out_dir, links)
print('[√] [{}] Main archive index now up-to-date: {}/index.html'.format( print('[√] [{}] Archive Main Index now up-to-date: {}/index.html'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'), datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
out_dir, out_dir,
@ -110,7 +110,7 @@ def write_json_link_index(out_dir, link):
path = os.path.join(out_dir, 'index.json') path = os.path.join(out_dir, 'index.json')
print(' √ Updating: index.json') print(' index.json')
with open(path, 'w', encoding='utf-8') as f: with open(path, 'w', encoding='utf-8') as f:
json.dump(link, f, indent=4, default=str) json.dump(link, f, indent=4, default=str)
@ -131,7 +131,7 @@ def write_html_link_index(out_dir, link):
path = os.path.join(out_dir, 'index.html') path = os.path.join(out_dir, 'index.html')
print(' √ Updating: index.html') print(' index.html')
with open(path, 'w', encoding='utf-8') as f: with open(path, 'w', encoding='utf-8') as f:
f.write(Template(link_html).substitute({ f.write(Template(link_html).substitute({

View file

@ -98,11 +98,6 @@ def links_after_timestamp(links, timestamp=None):
yield from links yield from links
return return
print('▶️ [{}] {green}Resuming downloads at {}...{reset}'.format(
datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
timestamp,
**ANSI,
))
for link in links: for link in links:
try: try:
if float(link['timestamp']) <= float(timestamp): if float(link['timestamp']) <= float(timestamp):

10
util.py
View file

@ -41,8 +41,6 @@ short_ts = lambda ts: ts.split('.')[0]
def check_dependencies(): def check_dependencies():
"""Check that all necessary dependencies are installed, and have valid versions""" """Check that all necessary dependencies are installed, and have valid versions"""
print('[*] Checking Dependencies:')
python_vers = float('{}.{}'.format(sys.version_info.major, sys.version_info.minor)) python_vers = float('{}.{}'.format(sys.version_info.major, sys.version_info.minor))
if python_vers < 3.5: if python_vers < 3.5:
print('{}[X] Python version is not new enough: {} (>3.5 is required){}'.format(ANSI['red'], python_vers, ANSI['reset'])) print('{}[X] Python version is not new enough: {} (>3.5 is required){}'.format(ANSI['red'], python_vers, ANSI['reset']))
@ -50,7 +48,7 @@ def check_dependencies():
raise SystemExit(1) raise SystemExit(1)
if FETCH_PDF or FETCH_SCREENSHOT: if FETCH_PDF or FETCH_SCREENSHOT:
if run(['which', CHROME_BINARY]).returncode: if run(['which', CHROME_BINARY], stdout=DEVNULL).returncode:
print('{}[X] Missing dependency: {}{}'.format(ANSI['red'], CHROME_BINARY, ANSI['reset'])) print('{}[X] Missing dependency: {}{}'.format(ANSI['red'], CHROME_BINARY, ANSI['reset']))
print(' Run ./setup.sh, then confirm it was installed with: {} --version'.format(CHROME_BINARY)) print(' Run ./setup.sh, then confirm it was installed with: {} --version'.format(CHROME_BINARY))
print(' See https://github.com/pirate/bookmark-archiver for help.') print(' See https://github.com/pirate/bookmark-archiver for help.')
@ -74,21 +72,21 @@ def check_dependencies():
raise SystemExit(1) raise SystemExit(1)
if FETCH_WGET: if FETCH_WGET:
if run(['which', 'wget']).returncode or run(['wget', '--version'], stdout=DEVNULL).returncode: if run(['which', 'wget'], stdout=DEVNULL).returncode or run(['wget', '--version'], stdout=DEVNULL).returncode:
print('{red}[X] Missing dependency: wget{reset}'.format(**ANSI)) print('{red}[X] Missing dependency: wget{reset}'.format(**ANSI))
print(' Run ./setup.sh, then confirm it was installed with: {} --version'.format('wget')) print(' Run ./setup.sh, then confirm it was installed with: {} --version'.format('wget'))
print(' See https://github.com/pirate/bookmark-archiver for help.') print(' See https://github.com/pirate/bookmark-archiver for help.')
raise SystemExit(1) raise SystemExit(1)
if FETCH_FAVICON or SUBMIT_ARCHIVE_DOT_ORG: if FETCH_FAVICON or SUBMIT_ARCHIVE_DOT_ORG:
if run(['which', 'curl']).returncode or run(['curl', '--version'], stdout=DEVNULL).returncode: if run(['which', 'curl'], stdout=DEVNULL).returncode or run(['curl', '--version'], stdout=DEVNULL).returncode:
print('{red}[X] Missing dependency: curl{reset}'.format(**ANSI)) print('{red}[X] Missing dependency: curl{reset}'.format(**ANSI))
print(' Run ./setup.sh, then confirm it was installed with: {} --version'.format('curl')) print(' Run ./setup.sh, then confirm it was installed with: {} --version'.format('curl'))
print(' See https://github.com/pirate/bookmark-archiver for help.') print(' See https://github.com/pirate/bookmark-archiver for help.')
raise SystemExit(1) raise SystemExit(1)
if FETCH_AUDIO or FETCH_VIDEO: if FETCH_AUDIO or FETCH_VIDEO:
if run(['which', 'youtube-dl']).returncode or run(['youtube-dl', '--version'], stdout=DEVNULL).returncode: if run(['which', 'youtube-dl'], stdout=DEVNULL).returncode or run(['youtube-dl', '--version'], stdout=DEVNULL).returncode:
print('{red}[X] Missing dependency: youtube-dl{reset}'.format(**ANSI)) print('{red}[X] Missing dependency: youtube-dl{reset}'.format(**ANSI))
print(' Run ./setup.sh, then confirm it was installed with: {} --version'.format('youtube-dl')) print(' Run ./setup.sh, then confirm it was installed with: {} --version'.format('youtube-dl'))
print(' See https://github.com/pirate/bookmark-archiver for help.') print(' See https://github.com/pirate/bookmark-archiver for help.')