1
0
Fork 0
mirror of synced 2024-06-23 08:30:29 +12:00

comment out uninmplemented args

This commit is contained in:
Nick Sweeting 2019-04-03 03:52:56 -04:00
parent fd80275856
commit eb2b6978c3
2 changed files with 16 additions and 17 deletions

View file

@ -4,7 +4,6 @@ __package__ = 'archivebox.cli'
__command__ = 'archivebox add'
__description__ = 'Add a new URL or list of URLs to your archive'
import os
import sys
import argparse
@ -34,17 +33,17 @@ def main(args=None):
action='store_true',
help="Don't attempt to retry previously skipped/failed links when updating",
)
parser.add_argument(
'--mirror', #'-m',
action='store_true',
help='Archive an entire site (finding all linked pages below it on the same domain)',
)
parser.add_argument(
'--crawler', #'-r',
choices=('depth_first', 'breadth_first'),
help='Controls which crawler to use in order to find outlinks in a given page',
default=None,
)
# parser.add_argument(
# '--mirror', #'-m',
# action='store_true',
# help='Archive an entire site (finding all linked pages below it on the same domain)',
# )
# parser.add_argument(
# '--crawler', #'-r',
# choices=('depth_first', 'breadth_first'),
# help='Controls which crawler to use in order to find outlinks in a given page',
# default=None,
# )
parser.add_argument(
'url',
nargs='?',
@ -55,7 +54,7 @@ def main(args=None):
command = parser.parse_args(args)
### Handle ingesting urls piped in through stdin
# (.e.g if user does cat example_urls.txt | ./archive)
# (.e.g if user does cat example_urls.txt | archivebox add)
import_path = None
if not sys.stdin.isatty():
stdin_raw_text = sys.stdin.read()
@ -73,7 +72,6 @@ def main(args=None):
elif command.url:
import_path = handle_file_import(command.url)
update_archive_data(
import_path=import_path,
resume=None,

View file

@ -39,11 +39,12 @@ Example Use:
mkdir my-archive; cd my-archive/
archivebox init
echo 'https://example.com/some/page' | archivebox add
archivebox add https://example.com/some/other/page
archivebox add https://example.com/some/page
archivebox add --depth=1 ~/Downloads/bookmarks_export.html
archivebox add --depth=1 https://example.com/feed.rss
archivebox subscribe https://example.com/some/feed.rss
archivebox update --resume=15109948213.123
archivebox list --sort=timestamp --csv=timestamp,url,is_archived
Documentation:
https://github.com/pirate/ArchiveBox/wiki