1
0
Fork 0
mirror of synced 2024-05-19 19:52:41 +12:00

Modify code to accept pluggable logging handlers

This commit is contained in:
Serene-Arc 2023-01-08 13:33:35 +10:00
parent 9b23082a78
commit 8f17bcf43b
6 changed files with 50 additions and 34 deletions

View file

@ -111,9 +111,10 @@ def cli_download(context: click.Context, **_):
"""Used to download content posted to Reddit.""" """Used to download content posted to Reddit."""
config = Configuration() config = Configuration()
config.process_click_arguments(context) config.process_click_arguments(context)
setup_logging(config.verbose) silence_module_loggers()
stream = make_console_logging_handler(config.verbose)
try: try:
reddit_downloader = RedditDownloader(config) reddit_downloader = RedditDownloader(config, [stream])
reddit_downloader.download() reddit_downloader.download()
except Exception: except Exception:
logger.exception("Downloader exited unexpectedly") logger.exception("Downloader exited unexpectedly")
@ -131,9 +132,10 @@ def cli_archive(context: click.Context, **_):
"""Used to archive post data from Reddit.""" """Used to archive post data from Reddit."""
config = Configuration() config = Configuration()
config.process_click_arguments(context) config.process_click_arguments(context)
setup_logging(config.verbose) silence_module_loggers()
stream = make_console_logging_handler(config.verbose)
try: try:
reddit_archiver = Archiver(config) reddit_archiver = Archiver(config, [stream])
reddit_archiver.download() reddit_archiver.download()
except Exception: except Exception:
logger.exception("Archiver exited unexpectedly") logger.exception("Archiver exited unexpectedly")
@ -152,9 +154,10 @@ def cli_clone(context: click.Context, **_):
"""Combines archive and download commands.""" """Combines archive and download commands."""
config = Configuration() config = Configuration()
config.process_click_arguments(context) config.process_click_arguments(context)
setup_logging(config.verbose) silence_module_loggers()
stream = make_console_logging_handler(config.verbose)
try: try:
reddit_scraper = RedditCloner(config) reddit_scraper = RedditCloner(config, [stream])
reddit_scraper.download() reddit_scraper.download()
except Exception: except Exception:
logger.exception("Scraper exited unexpectedly") logger.exception("Scraper exited unexpectedly")
@ -187,7 +190,7 @@ def cli_completion(shell: str, uninstall: bool):
Completion(shell).install() Completion(shell).install()
def setup_logging(verbosity: int): def make_console_logging_handler(verbosity: int) -> logging.StreamHandler:
class StreamExceptionFilter(logging.Filter): class StreamExceptionFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool: def filter(self, record: logging.LogRecord) -> bool:
result = not (record.levelno == logging.ERROR and record.exc_info) result = not (record.levelno == logging.ERROR and record.exc_info)
@ -200,13 +203,16 @@ def setup_logging(verbosity: int):
formatter = logging.Formatter("[%(asctime)s - %(name)s - %(levelname)s] - %(message)s") formatter = logging.Formatter("[%(asctime)s - %(name)s - %(levelname)s] - %(message)s")
stream.setFormatter(formatter) stream.setFormatter(formatter)
logger.addHandler(stream)
if verbosity <= 0: if verbosity <= 0:
stream.setLevel(logging.INFO) stream.setLevel(logging.INFO)
elif verbosity == 1: elif verbosity == 1:
stream.setLevel(logging.DEBUG) stream.setLevel(logging.DEBUG)
else: else:
stream.setLevel(9) stream.setLevel(9)
return stream
def silence_module_loggers():
logging.getLogger("praw").setLevel(logging.CRITICAL) logging.getLogger("praw").setLevel(logging.CRITICAL)
logging.getLogger("prawcore").setLevel(logging.CRITICAL) logging.getLogger("prawcore").setLevel(logging.CRITICAL)
logging.getLogger("urllib3").setLevel(logging.CRITICAL) logging.getLogger("urllib3").setLevel(logging.CRITICAL)

View file

@ -5,7 +5,7 @@ import json
import logging import logging
import re import re
from time import sleep from time import sleep
from typing import Iterator, Union from typing import Iterable, Iterator, Union
import dict2xml import dict2xml
import praw.models import praw.models
@ -24,8 +24,8 @@ logger = logging.getLogger(__name__)
class Archiver(RedditConnector): class Archiver(RedditConnector):
def __init__(self, args: Configuration): def __init__(self, args: Configuration, logging_handlers: Iterable[logging.Handler] = ()):
super(Archiver, self).__init__(args) super(Archiver, self).__init__(args, logging_handlers)
def download(self): def download(self):
for generator in self.reddit_lists: for generator in self.reddit_lists:

View file

@ -3,6 +3,7 @@
import logging import logging
from time import sleep from time import sleep
from typing import Iterable
import prawcore import prawcore
@ -14,8 +15,8 @@ logger = logging.getLogger(__name__)
class RedditCloner(RedditDownloader, Archiver): class RedditCloner(RedditDownloader, Archiver):
def __init__(self, args: Configuration): def __init__(self, args: Configuration, logging_handlers: Iterable[logging.Handler] = ()):
super(RedditCloner, self).__init__(args) super(RedditCloner, self).__init__(args, logging_handlers)
def download(self): def download(self):
for generator in self.reddit_lists: for generator in self.reddit_lists:

View file

@ -14,7 +14,7 @@ from datetime import datetime
from enum import Enum, auto from enum import Enum, auto
from pathlib import Path from pathlib import Path
from time import sleep from time import sleep
from typing import Callable, Iterator from typing import Callable, Iterable, Iterator
import appdirs import appdirs
import praw import praw
@ -51,20 +51,20 @@ class RedditTypes:
class RedditConnector(metaclass=ABCMeta): class RedditConnector(metaclass=ABCMeta):
def __init__(self, args: Configuration): def __init__(self, args: Configuration, logging_handlers: Iterable[logging.Handler] = ()):
self.args = args self.args = args
self.config_directories = appdirs.AppDirs("bdfr", "BDFR") self.config_directories = appdirs.AppDirs("bdfr", "BDFR")
self.determine_directories()
self.load_config()
self.read_config()
file_log = self.create_file_logger()
self._apply_logging_handlers(itertools.chain(logging_handlers, [file_log]))
self.run_time = datetime.now().isoformat() self.run_time = datetime.now().isoformat()
self._setup_internal_objects() self._setup_internal_objects()
self.reddit_lists = self.retrieve_reddit_lists() self.reddit_lists = self.retrieve_reddit_lists()
def _setup_internal_objects(self): def _setup_internal_objects(self):
self.determine_directories()
self.load_config()
self.create_file_logger()
self.read_config()
self.parse_disabled_modules() self.parse_disabled_modules()
@ -94,6 +94,12 @@ class RedditConnector(metaclass=ABCMeta):
self.args.skip_subreddit = self.split_args_input(self.args.skip_subreddit) self.args.skip_subreddit = self.split_args_input(self.args.skip_subreddit)
self.args.skip_subreddit = {sub.lower() for sub in self.args.skip_subreddit} self.args.skip_subreddit = {sub.lower() for sub in self.args.skip_subreddit}
@staticmethod
def _apply_logging_handlers(handlers: Iterable[logging.Handler]):
main_logger = logging.getLogger()
for handler in handlers:
main_logger.addHandler(handler)
def read_config(self): def read_config(self):
"""Read any cfg values that need to be processed""" """Read any cfg values that need to be processed"""
if self.args.max_wait_time is None: if self.args.max_wait_time is None:
@ -203,8 +209,7 @@ class RedditConnector(metaclass=ABCMeta):
raise errors.BulkDownloaderException("Could not find a configuration file to load") raise errors.BulkDownloaderException("Could not find a configuration file to load")
self.cfg_parser.read(self.config_location) self.cfg_parser.read(self.config_location)
def create_file_logger(self): def create_file_logger(self) -> logging.handlers.RotatingFileHandler:
main_logger = logging.getLogger()
if self.args.log is None: if self.args.log is None:
log_path = Path(self.config_directory, "log_output.txt") log_path = Path(self.config_directory, "log_output.txt")
else: else:
@ -229,8 +234,7 @@ class RedditConnector(metaclass=ABCMeta):
formatter = logging.Formatter("[%(asctime)s - %(name)s - %(levelname)s] - %(message)s") formatter = logging.Formatter("[%(asctime)s - %(name)s - %(levelname)s] - %(message)s")
file_handler.setFormatter(formatter) file_handler.setFormatter(formatter)
file_handler.setLevel(0) file_handler.setLevel(0)
return file_handler
main_logger.addHandler(file_handler)
@staticmethod @staticmethod
def sanitise_subreddit_name(subreddit: str) -> str: def sanitise_subreddit_name(subreddit: str) -> str:

View file

@ -9,6 +9,7 @@ from datetime import datetime
from multiprocessing import Pool from multiprocessing import Pool
from pathlib import Path from pathlib import Path
from time import sleep from time import sleep
from typing import Iterable
import praw import praw
import praw.exceptions import praw.exceptions
@ -36,8 +37,8 @@ def _calc_hash(existing_file: Path):
class RedditDownloader(RedditConnector): class RedditDownloader(RedditConnector):
def __init__(self, args: Configuration): def __init__(self, args: Configuration, logging_handlers: Iterable[logging.Handler] = ()):
super(RedditDownloader, self).__init__(args) super(RedditDownloader, self).__init__(args, logging_handlers)
if self.args.search_existing: if self.args.search_existing:
self.master_hash_list = self.scan_existing_files(self.download_directory) self.master_hash_list = self.scan_existing_files(self.download_directory)

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import logging
import os import os
import re import re
from pathlib import Path from pathlib import Path
@ -9,12 +9,16 @@ from unittest.mock import MagicMock, patch
import praw.models import praw.models
import pytest import pytest
from bdfr.__main__ import setup_logging from bdfr.__main__ import make_console_logging_handler
from bdfr.configuration import Configuration from bdfr.configuration import Configuration
from bdfr.connector import RedditConnector from bdfr.connector import RedditConnector
from bdfr.downloader import RedditDownloader from bdfr.downloader import RedditDownloader
def add_console_handler():
logging.getLogger().addHandler(make_console_logging_handler(3))
@pytest.fixture() @pytest.fixture()
def args() -> Configuration: def args() -> Configuration:
args = Configuration() args = Configuration()
@ -134,7 +138,7 @@ def test_download_submission_hash_exists(
tmp_path: Path, tmp_path: Path,
capsys: pytest.CaptureFixture, capsys: pytest.CaptureFixture,
): ):
setup_logging(3) add_console_handler()
downloader_mock.reddit_instance = reddit_instance downloader_mock.reddit_instance = reddit_instance
downloader_mock.download_filter.check_url.return_value = True downloader_mock.download_filter.check_url.return_value = True
downloader_mock.args.folder_scheme = "" downloader_mock.args.folder_scheme = ""
@ -155,7 +159,7 @@ def test_download_submission_hash_exists(
def test_download_submission_file_exists( def test_download_submission_file_exists(
downloader_mock: MagicMock, reddit_instance: praw.Reddit, tmp_path: Path, capsys: pytest.CaptureFixture downloader_mock: MagicMock, reddit_instance: praw.Reddit, tmp_path: Path, capsys: pytest.CaptureFixture
): ):
setup_logging(3) add_console_handler()
downloader_mock.reddit_instance = reddit_instance downloader_mock.reddit_instance = reddit_instance
downloader_mock.download_filter.check_url.return_value = True downloader_mock.download_filter.check_url.return_value = True
downloader_mock.args.folder_scheme = "" downloader_mock.args.folder_scheme = ""
@ -202,7 +206,7 @@ def test_download_submission_min_score_above(
tmp_path: Path, tmp_path: Path,
capsys: pytest.CaptureFixture, capsys: pytest.CaptureFixture,
): ):
setup_logging(3) add_console_handler()
downloader_mock.reddit_instance = reddit_instance downloader_mock.reddit_instance = reddit_instance
downloader_mock.download_filter.check_url.return_value = True downloader_mock.download_filter.check_url.return_value = True
downloader_mock.args.folder_scheme = "" downloader_mock.args.folder_scheme = ""
@ -226,7 +230,7 @@ def test_download_submission_min_score_below(
tmp_path: Path, tmp_path: Path,
capsys: pytest.CaptureFixture, capsys: pytest.CaptureFixture,
): ):
setup_logging(3) add_console_handler()
downloader_mock.reddit_instance = reddit_instance downloader_mock.reddit_instance = reddit_instance
downloader_mock.download_filter.check_url.return_value = True downloader_mock.download_filter.check_url.return_value = True
downloader_mock.args.folder_scheme = "" downloader_mock.args.folder_scheme = ""
@ -250,7 +254,7 @@ def test_download_submission_max_score_below(
tmp_path: Path, tmp_path: Path,
capsys: pytest.CaptureFixture, capsys: pytest.CaptureFixture,
): ):
setup_logging(3) add_console_handler()
downloader_mock.reddit_instance = reddit_instance downloader_mock.reddit_instance = reddit_instance
downloader_mock.download_filter.check_url.return_value = True downloader_mock.download_filter.check_url.return_value = True
downloader_mock.args.folder_scheme = "" downloader_mock.args.folder_scheme = ""
@ -274,7 +278,7 @@ def test_download_submission_max_score_above(
tmp_path: Path, tmp_path: Path,
capsys: pytest.CaptureFixture, capsys: pytest.CaptureFixture,
): ):
setup_logging(3) add_console_handler()
downloader_mock.reddit_instance = reddit_instance downloader_mock.reddit_instance = reddit_instance
downloader_mock.download_filter.check_url.return_value = True downloader_mock.download_filter.check_url.return_value = True
downloader_mock.args.folder_scheme = "" downloader_mock.args.folder_scheme = ""