1
0
Fork 0
mirror of synced 2024-10-01 09:41:03 +13:00

Merge pull request #780 from OMEGARAZER/Quote-cleanup

This commit is contained in:
Serene 2023-02-19 10:39:15 +10:00 committed by GitHub
commit 911410608a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 47 additions and 49 deletions

View file

@ -80,11 +80,11 @@ bdfr download ./path/to/output --user reddituser --submitted -L 100
```
```bash
bdfr download ./path/to/output --user me --saved --authenticate -L 25 --file-scheme '{POSTID}'
bdfr download ./path/to/output --user me --saved --authenticate -L 25 --file-scheme "{POSTID}"
```
```bash
bdfr download ./path/to/output --subreddit 'Python, all, mindustry' -L 10 --make-hard-links
bdfr download ./path/to/output --subreddit "Python, all, mindustry" -L 10 --make-hard-links
```
```bash
@ -92,7 +92,7 @@ bdfr archive ./path/to/output --user reddituser --submitted --all-comments --com
```
```bash
bdfr archive ./path/to/output --subreddit all --format yaml -L 500 --folder-scheme ''
bdfr archive ./path/to/output --subreddit all --format yaml -L 500 --folder-scheme ""
```
Alternatively, you can pass options through a YAML file.
@ -191,13 +191,13 @@ The following options are common between both the `archive` and `download` comma
- This is the name of a multireddit to add as a source
- Can be specified multiple times
- This can be done by using `-m` multiple times
- Multireddits can also be used to provide CSV multireddits e.g. `-m 'chess, favourites'`
- Multireddits can also be used to provide CSV multireddits e.g. `-m "chess, favourites"`
- The specified multireddits must all belong to the user specified with the `--user` option
- `-s, --subreddit`
- This adds a subreddit as a source
- Can be used mutliple times
- This can be done by using `-s` multiple times
- Subreddits can also be used to provide CSV subreddits e.g. `-m 'all, python, mindustry'`
- Subreddits can also be used to provide CSV subreddits e.g. `-m "all, python, mindustry"`
- `-t, --time`
- This is the time filter that will be applied to all applicable sources
- This option does not apply to upvoted or saved posts when scraping from these sources

View file

@ -186,7 +186,7 @@ def cli_completion(shell: str, uninstall: bool):
Completion(shell).uninstall()
return
if shell not in ("all", "bash", "fish", "zsh"):
print(f"{shell} is not a valid option.")
print(f"{shell!r} is not a valid option.")
print("Options: all, bash, fish, zsh")
return
if click.confirm(f"Would you like to install {shell} completions for BDFR"):

View file

@ -94,7 +94,7 @@ class Archiver(RedditConnector):
elif self.args.format == "yaml":
self._write_entry_yaml(archive_entry)
else:
raise ArchiverError(f"Unknown format {self.args.format} given")
raise ArchiverError(f"Unknown format {self.args.format!r} given")
logger.info(f"Record for entry item {praw_item.id} written to disk")
def _write_entry_json(self, entry: BaseArchiveEntry):

View file

@ -63,7 +63,7 @@ class Configuration(Namespace):
self.parse_yaml_options(context.params["opts"])
for arg_key in context.params.keys():
if not hasattr(self, arg_key):
logger.warning(f"Ignoring an unknown CLI argument: {arg_key}")
logger.warning(f"Ignoring an unknown CLI argument: {arg_key!r}")
continue
val = context.params[arg_key]
if val is None or val == ():
@ -84,6 +84,6 @@ class Configuration(Namespace):
return
for arg_key, val in opts.items():
if not hasattr(self, arg_key):
logger.warning(f"Ignoring an unknown YAML argument: {arg_key}")
logger.warning(f"Ignoring an unknown YAML argument: {arg_key!r}")
continue
setattr(self, arg_key, val)

View file

@ -115,7 +115,7 @@ class RedditConnector(metaclass=ABCMeta):
self.args.filename_restriction_scheme = self.cfg_parser.get(
"DEFAULT", "filename_restriction_scheme", fallback=None
)
logger.debug(f"Setting filename restriction scheme to '{self.args.filename_restriction_scheme}'")
logger.debug(f"Setting filename restriction scheme to {self.args.filename_restriction_scheme!r}")
# Update config on disk
with Path(self.config_location).open(mode="w") as file:
self.cfg_parser.write(file)
@ -125,7 +125,7 @@ class RedditConnector(metaclass=ABCMeta):
disabled_modules = self.split_args_input(disabled_modules)
disabled_modules = {name.strip().lower() for name in disabled_modules}
self.args.disable_module = disabled_modules
logger.debug(f'Disabling the following modules: {", ".join(self.args.disable_module)}')
logger.debug(f"Disabling the following modules: {', '.join(self.args.disable_module)}")
def create_reddit_instance(self):
if self.args.authenticate:
@ -239,7 +239,7 @@ class RedditConnector(metaclass=ABCMeta):
pattern = re.compile(r"^(?:https://www\.reddit\.com/)?(?:r/)?(.*?)/?$")
match = re.match(pattern, subreddit)
if not match:
raise errors.BulkDownloaderException(f"Could not find subreddit name in string {subreddit}")
raise errors.BulkDownloaderException(f"Could not find subreddit name in string {subreddit!r}")
return match.group(1)
@staticmethod
@ -285,7 +285,7 @@ class RedditConnector(metaclass=ABCMeta):
)
)
logger.debug(
f'Added submissions from subreddit {reddit} with the search term "{self.args.search}"'
f"Added submissions from subreddit {reddit} with the search term {self.args.search!r}"
)
else:
out.append(self.create_filtered_listing_generator(reddit))
@ -301,7 +301,7 @@ class RedditConnector(metaclass=ABCMeta):
logger.log(9, f"Resolved user to {resolved_name}")
return resolved_name
else:
logger.warning('To use "me" as a user, an authenticated Reddit instance must be used')
logger.warning("To use 'me' as a user, an authenticated Reddit instance must be used")
else:
return in_name

View file

@ -35,7 +35,7 @@ class DownloadFilter:
combined_extensions = "|".join(self.excluded_extensions)
pattern = re.compile(rf".*({combined_extensions})$")
if re.match(pattern, resource_extension):
logger.log(9, f'Url "{resource_extension}" matched with "{pattern}"')
logger.log(9, f"Url {resource_extension!r} matched with {pattern!r}")
return False
else:
return True
@ -46,7 +46,7 @@ class DownloadFilter:
combined_domains = "|".join(self.excluded_domains)
pattern = re.compile(rf"https?://.*({combined_domains}).*")
if re.match(pattern, url):
logger.log(9, f'Url "{url}" matched with "{pattern}"')
logger.log(9, f"Url {url!r} matched with {pattern!r}")
return False
else:
return True

View file

@ -66,7 +66,7 @@ class RedditDownloader(RedditConnector):
):
logger.debug(
f"Submission {submission.id} in {submission.subreddit.display_name} skipped"
f' due to {submission.author.name if submission.author else "DELETED"} being an ignored user'
f" due to {submission.author.name if submission.author else 'DELETED'} being an ignored user"
)
return
elif self.args.min_score and submission.score < self.args.min_score:

View file

@ -37,7 +37,7 @@ class FileNameFormatter:
restriction_scheme: Optional[str] = None,
):
if not self.validate_string(file_format_string):
raise BulkDownloaderException(f'"{file_format_string}" is not a valid format string')
raise BulkDownloaderException(f"{file_format_string!r} is not a valid format string")
self.file_format_string = file_format_string
self.directory_format_string: list[str] = directory_format_string.split("/")
self.time_format_string = time_format_string

View file

@ -36,7 +36,7 @@ class OAuth2Authenticator:
known_scopes.append("*")
for scope in wanted_scopes:
if scope not in known_scopes:
raise BulkDownloaderException(f"Scope {scope} is not known to reddit")
raise BulkDownloaderException(f"Scope {scope!r} is not known to reddit")
@staticmethod
def split_scopes(scopes: str) -> set[str]:
@ -62,10 +62,10 @@ class OAuth2Authenticator:
if state != params["state"]:
self.send_message(client)
raise RedditAuthenticationError(f'State mismatch in OAuth2. Expected: {state} Received: {params["state"]}')
raise RedditAuthenticationError(f"State mismatch in OAuth2. Expected: {state} Received: {params['state']}")
elif "error" in params:
self.send_message(client)
raise RedditAuthenticationError(f'Error in OAuth2: {params["error"]}')
raise RedditAuthenticationError(f"Error in OAuth2: {params['error']}")
self.send_message(client, "<script>alert('You can go back to terminal window now.')</script>")
refresh_token = reddit.auth.authorize(params["code"])

View file

@ -82,7 +82,7 @@ class DownloadFactory:
"php3",
"xhtml",
)
if re.match(rf'(?i).*/.*\.({"|".join(web_extensions)})$', url):
if re.match(rf"(?i).*/.*\.({'|'.join(web_extensions)})$", url):
return True
else:
return False

View file

@ -71,10 +71,8 @@ class Redgifs(BaseDownloader):
else:
out.add(response_json["gif"]["urls"]["sd"])
elif response_json["gif"]["type"] == 2: # type 2 is an image
if response_json["gif"]["gallery"]:
content = Redgifs.retrieve_url(
f'https://api.redgifs.com/v2/gallery/{response_json["gif"]["gallery"]}'
)
if gallery := response_json["gif"]["gallery"]:
content = Redgifs.retrieve_url(f"https://api.redgifs.com/v2/gallery/{gallery}")
response_json = json.loads(content.text)
out = {p["urls"]["hd"] for p in response_json["gifs"]}
else:

View file

@ -36,7 +36,7 @@ dependencies = [
dynamic = ["version"]
[tool.setuptools]
dynamic = {"version" = {attr = 'bdfr.__version__'}}
dynamic = {"version" = {attr = "bdfr.__version__"}}
packages = ["bdfr", "bdfr.archive_entry", "bdfr.site_downloaders", "bdfr.site_downloaders.fallback_downloaders",]
data-files = {"config" = ["bdfr/default_config.cfg",]}

View file

@ -3,14 +3,14 @@
if [ -e "$1" ]; then
file="$1"
else
echo 'CANNOT FIND LOG FILE'
echo "CANNOT FIND LOG FILE"
exit 1
fi
{
grep 'Could not download submission' "$file" | awk '{ print $12 }' | rev | cut -c 2- | rev ;
grep 'Failed to download resource' "$file" | awk '{ print $15 }' ;
grep 'failed to download submission' "$file" | awk '{ print $14 }' | rev | cut -c 2- | rev ;
grep 'Failed to write file' "$file" | awk '{ print $14 }' ;
grep 'skipped due to disabled module' "$file" | awk '{ print $9 }' ;
grep "Could not download submission" "$file" | awk "{ print $12 }" | rev | cut -c 2- | rev ;
grep "Failed to download resource" "$file" | awk "{ print $15 }" ;
grep "failed to download submission" "$file" | awk "{ print $14 }" | rev | cut -c 2- | rev ;
grep "Failed to write file" "$file" | awk "{ print $14 }" ;
grep "skipped due to disabled module" "$file" | awk "{ print $9 }" ;
}

View file

@ -3,15 +3,15 @@
if [ -e "$1" ]; then
file="$1"
else
echo 'CANNOT FIND LOG FILE'
echo "CANNOT FIND LOG FILE"
exit 1
fi
{
grep 'Downloaded submission' "$file" | awk '{ print $(NF-2) }' ;
grep 'Resource hash' "$file" | awk '{ print $(NF-2) }' ;
grep 'Download filter' "$file" | awk '{ print $(NF-3) }' ;
grep 'already exists, continuing' "$file" | awk '{ print $(NF-3) }' ;
grep 'Hard link made' "$file" | awk '{ print $(NF) }' ;
grep 'filtered due to score' "$file" | awk '{ print $9 }'
grep "Downloaded submission" "$file" | awk "{ print $(NF-2) }" ;
grep "Resource hash" "$file" | awk "{ print $(NF-2) }" ;
grep "Download filter" "$file" | awk "{ print $(NF-3) }" ;
grep "already exists, continuing" "$file" | awk "{ print $(NF-3) }" ;
grep "Hard link made" "$file" | awk "{ print $(NF) }" ;
grep "filtered due to score" "$file" | awk "{ print $9 }"
}

View file

@ -3,14 +3,14 @@
if [ -e "$1" ]; then
file="$1"
else
echo 'CANNOT FIND LOG FILE'
echo "CANNOT FIND LOG FILE"
exit 1
fi
echo "Downloaded submissions: $( grep -c 'Downloaded submission' "$file" )"
echo "Failed downloads: $( grep -c 'failed to download submission' "$file" )"
echo "Files already downloaded: $( grep -c 'already exists, continuing' "$file" )"
echo "Hard linked submissions: $( grep -c 'Hard link made' "$file" )"
echo "Excluded submissions: $( grep -c 'in exclusion list' "$file" )"
echo "Files with existing hash skipped: $( grep -c 'downloaded elsewhere' "$file" )"
echo "Submissions from excluded subreddits: $( grep -c 'in skip list' "$file" )"
echo "Downloaded submissions: $( grep -c 'Downloaded submission' '$file' )"
echo "Failed downloads: $( grep -c 'failed to download submission' '$file' )"
echo "Files already downloaded: $( grep -c 'already exists, continuing' '$file' )"
echo "Hard linked submissions: $( grep -c 'Hard link made' '$file' )"
echo "Excluded submissions: $( grep -c 'in exclusion list' '$file' )"
echo "Files with existing hash skipped: $( grep -c 'downloaded elsewhere' '$file' )"
echo "Submissions from excluded subreddits: $( grep -c 'in skip list' '$file' )"

View file

@ -185,7 +185,7 @@ def test_cli_download_user_data_bad_me_unauthenticated(test_args: list[str], tmp
test_args = create_basic_args_for_download_runner(test_args, tmp_path)
result = runner.invoke(cli, test_args)
assert result.exit_code == 0
assert 'To use "me" as a user, an authenticated Reddit instance must be used' in result.output
assert "To use 'me' as a user, an authenticated Reddit instance must be used" in result.output
@pytest.mark.online