Switch from argparse to click
This commit is contained in:
parent
dd522c18d4
commit
50531c7b3e
|
@ -1,104 +1,54 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import argparse
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
from bulkredditdownloader.configuration import Configuration
|
from bulkredditdownloader.configuration import Configuration
|
||||||
from bulkredditdownloader.downloader import RedditDownloader
|
from bulkredditdownloader.downloader import RedditDownloader
|
||||||
from bulkredditdownloader.exceptions import BulkDownloaderException
|
from bulkredditdownloader.exceptions import BulkDownloaderException
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
parser = argparse.ArgumentParser(allow_abbrev=False,
|
|
||||||
description='This program downloads media from reddit posts')
|
|
||||||
|
|
||||||
|
|
||||||
def _add_options():
|
@click.group()
|
||||||
parser.add_argument('directory',
|
def cli():
|
||||||
help='Specifies the directory where posts will be downloaded to',
|
pass
|
||||||
metavar='DIRECTORY')
|
|
||||||
parser.add_argument('--verbose', '-v',
|
|
||||||
action='count',
|
@cli.command('download')
|
||||||
default=0,
|
@click.argument('directory', type=str)
|
||||||
)
|
@click.option('-v', '--verbose', default=None, count=True)
|
||||||
parser.add_argument('--link', '-l',
|
@click.option('-l', '--link', multiple=True, default=None, type=str)
|
||||||
help='Get posts from link',
|
@click.option('-s', '--subreddit', multiple=True, default=None, type=str)
|
||||||
action='append',
|
@click.option('-m', '--multireddit', multiple=True, default=None, type=str)
|
||||||
default=[],
|
@click.option('-L', '--limit', default=None, type=int)
|
||||||
metavar='link')
|
@click.option('--authenticate', is_flag=True, default=None)
|
||||||
parser.add_argument('--submitted',
|
@click.option('--submitted', is_flag=True, default=None)
|
||||||
action='store_true',
|
@click.option('--upvoted', is_flag=True, default=None)
|
||||||
help='Gets posts of --user')
|
@click.option('--saved', is_flag=True, default=None)
|
||||||
parser.add_argument('--saved',
|
@click.option('--search', default=None, type=str)
|
||||||
action='store_true',
|
@click.option('-u', '--user', type=str, default=None)
|
||||||
help='Gets upvoted posts of --user')
|
@click.option('-t', '--time', type=click.Choice(('all', 'hour', 'day', 'week', 'month', 'year')), default=None)
|
||||||
parser.add_argument('--upvoted',
|
@click.option('-S', '--sort', type=click.Choice(('hot', 'top', 'new',
|
||||||
action='store_true',
|
'controversial', 'rising', 'relevance')), default=None)
|
||||||
help='Gets upvoted posts of --user')
|
@click.option('--skip', default=None, multiple=True)
|
||||||
parser.add_argument('--subreddit',
|
@click.option('--skip-domain', default=None, multiple=True)
|
||||||
nargs='+',
|
@click.option('--set-file-scheme', default=None, type=str)
|
||||||
help='Triggers subreddit mode and takes subreddit name. use \"frontpage\" for frontpage',
|
@click.option('--set-folder-scheme', default=None, type=str)
|
||||||
metavar='SUBREDDIT',
|
@click.option('--no-dupes', is_flag=True, default=None)
|
||||||
type=str)
|
@click.option('--config', type=str, default=None)
|
||||||
parser.add_argument('--multireddit',
|
@click.pass_context
|
||||||
help='Triggers multireddit mode and takes multireddit name',
|
def cli_download(context: click.Context, **_):
|
||||||
metavar='MULTIREDDIT',
|
config = Configuration()
|
||||||
action='append',
|
config.process_click_arguments(context)
|
||||||
type=str)
|
_setup_logging(config.verbose)
|
||||||
parser.add_argument('--authenticate',
|
try:
|
||||||
action='store_true')
|
reddit_downloader = RedditDownloader(config)
|
||||||
parser.add_argument('--user',
|
reddit_downloader.download()
|
||||||
help='reddit username if needed. use "me" for current user',
|
except BulkDownloaderException as e:
|
||||||
required='--multireddit' in sys.argv or '--submitted' in sys.argv,
|
logger.critical(f'An error occured {e}')
|
||||||
metavar='redditor',
|
|
||||||
default=None,
|
|
||||||
type=str)
|
|
||||||
parser.add_argument('--search',
|
|
||||||
help='Searches for given query in given subreddits',
|
|
||||||
metavar='query',
|
|
||||||
default=None,
|
|
||||||
type=str)
|
|
||||||
parser.add_argument('--sort',
|
|
||||||
help='Either hot, top, new, controversial, rising or relevance default: hot',
|
|
||||||
choices=['hot', 'top', 'new', 'controversial', 'rising', 'relevance'],
|
|
||||||
metavar='SORT TYPE',
|
|
||||||
default='hot',
|
|
||||||
type=str)
|
|
||||||
parser.add_argument('--limit',
|
|
||||||
help='default: unlimited',
|
|
||||||
metavar='Limit',
|
|
||||||
default=None,
|
|
||||||
type=int)
|
|
||||||
parser.add_argument('--time',
|
|
||||||
help='Either hour, day, week, month, year or all. default: all',
|
|
||||||
choices=['all', 'hour', 'day', 'week', 'month', 'year'],
|
|
||||||
metavar='TIME_LIMIT',
|
|
||||||
default='all',
|
|
||||||
type=str)
|
|
||||||
parser.add_argument('--skip',
|
|
||||||
nargs='+',
|
|
||||||
help='Skip posts with given type',
|
|
||||||
type=str,
|
|
||||||
default=[])
|
|
||||||
parser.add_argument('--skip-domain',
|
|
||||||
nargs='+',
|
|
||||||
help='Skip posts with given domain',
|
|
||||||
type=str,
|
|
||||||
default=[])
|
|
||||||
parser.add_argument('--set-folder-scheme',
|
|
||||||
action='store_true',
|
|
||||||
help='Set custom folderpath',
|
|
||||||
default='{SUBREDDIT}'
|
|
||||||
)
|
|
||||||
parser.add_argument('--set-file-scheme',
|
|
||||||
action='store_true',
|
|
||||||
help='Set custom filename',
|
|
||||||
default='{REDDITOR}_{TITLE}_{POSTID}'
|
|
||||||
)
|
|
||||||
parser.add_argument('--no-dupes',
|
|
||||||
action='store_true',
|
|
||||||
help='Do not download duplicate posts on different subreddits',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _setup_logging(verbosity: int):
|
def _setup_logging(verbosity: int):
|
||||||
|
@ -111,23 +61,10 @@ def _setup_logging(verbosity: int):
|
||||||
stream.setLevel(logging.INFO)
|
stream.setLevel(logging.INFO)
|
||||||
else:
|
else:
|
||||||
stream.setLevel(logging.DEBUG)
|
stream.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
logging.getLogger('praw').setLevel(logging.CRITICAL)
|
logging.getLogger('praw').setLevel(logging.CRITICAL)
|
||||||
logging.getLogger('prawcore').setLevel(logging.CRITICAL)
|
logging.getLogger('prawcore').setLevel(logging.CRITICAL)
|
||||||
logging.getLogger('urllib3').setLevel(logging.CRITICAL)
|
logging.getLogger('urllib3').setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
|
|
||||||
def main(args: Configuration):
|
|
||||||
_setup_logging(args.verbose)
|
|
||||||
try:
|
|
||||||
reddit_downloader = RedditDownloader(args)
|
|
||||||
reddit_downloader.download()
|
|
||||||
except BulkDownloaderException as e:
|
|
||||||
logger.critical(f'An error occured {e}')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
_add_options()
|
cli()
|
||||||
read_configuration = Configuration()
|
|
||||||
args = parser.parse_args(namespace=read_configuration)
|
|
||||||
main(args)
|
|
||||||
|
|
Loading…
Reference in a new issue