1
0
Fork 0
mirror of synced 2024-06-30 20:10:35 +12:00

comment out uninmplemented args

This commit is contained in:
Nick Sweeting 2019-04-03 03:52:56 -04:00
parent fd80275856
commit eb2b6978c3
2 changed files with 16 additions and 17 deletions

View file

@ -4,7 +4,6 @@ __package__ = 'archivebox.cli'
__command__ = 'archivebox add' __command__ = 'archivebox add'
__description__ = 'Add a new URL or list of URLs to your archive' __description__ = 'Add a new URL or list of URLs to your archive'
import os
import sys import sys
import argparse import argparse
@ -34,17 +33,17 @@ def main(args=None):
action='store_true', action='store_true',
help="Don't attempt to retry previously skipped/failed links when updating", help="Don't attempt to retry previously skipped/failed links when updating",
) )
parser.add_argument( # parser.add_argument(
'--mirror', #'-m', # '--mirror', #'-m',
action='store_true', # action='store_true',
help='Archive an entire site (finding all linked pages below it on the same domain)', # help='Archive an entire site (finding all linked pages below it on the same domain)',
) # )
parser.add_argument( # parser.add_argument(
'--crawler', #'-r', # '--crawler', #'-r',
choices=('depth_first', 'breadth_first'), # choices=('depth_first', 'breadth_first'),
help='Controls which crawler to use in order to find outlinks in a given page', # help='Controls which crawler to use in order to find outlinks in a given page',
default=None, # default=None,
) # )
parser.add_argument( parser.add_argument(
'url', 'url',
nargs='?', nargs='?',
@ -55,7 +54,7 @@ def main(args=None):
command = parser.parse_args(args) command = parser.parse_args(args)
### Handle ingesting urls piped in through stdin ### Handle ingesting urls piped in through stdin
# (.e.g if user does cat example_urls.txt | ./archive) # (.e.g if user does cat example_urls.txt | archivebox add)
import_path = None import_path = None
if not sys.stdin.isatty(): if not sys.stdin.isatty():
stdin_raw_text = sys.stdin.read() stdin_raw_text = sys.stdin.read()
@ -73,7 +72,6 @@ def main(args=None):
elif command.url: elif command.url:
import_path = handle_file_import(command.url) import_path = handle_file_import(command.url)
update_archive_data( update_archive_data(
import_path=import_path, import_path=import_path,
resume=None, resume=None,

View file

@ -39,11 +39,12 @@ Example Use:
mkdir my-archive; cd my-archive/ mkdir my-archive; cd my-archive/
archivebox init archivebox init
echo 'https://example.com/some/page' | archivebox add archivebox add https://example.com/some/page
archivebox add https://example.com/some/other/page
archivebox add --depth=1 ~/Downloads/bookmarks_export.html archivebox add --depth=1 ~/Downloads/bookmarks_export.html
archivebox add --depth=1 https://example.com/feed.rss
archivebox subscribe https://example.com/some/feed.rss
archivebox update --resume=15109948213.123 archivebox update --resume=15109948213.123
archivebox list --sort=timestamp --csv=timestamp,url,is_archived
Documentation: Documentation:
https://github.com/pirate/ArchiveBox/wiki https://github.com/pirate/ArchiveBox/wiki