1
0
Fork 0
mirror of synced 2024-09-28 15:22:16 +12:00

Quote cleanup

cleanup of some strings to prioritize outer double quotes when both are present or switch to double quotes when only single is present.
This commit is contained in:
OMEGARAZER 2023-02-18 16:06:32 -05:00
parent 5c57de7c7d
commit 98aa3d7cb6
No known key found for this signature in database
GPG key ID: D89925310D306E35
11 changed files with 36 additions and 38 deletions

View file

@ -80,11 +80,11 @@ bdfr download ./path/to/output --user reddituser --submitted -L 100
```
```bash
bdfr download ./path/to/output --user me --saved --authenticate -L 25 --file-scheme '{POSTID}'
bdfr download ./path/to/output --user me --saved --authenticate -L 25 --file-scheme "{POSTID}"
```
```bash
bdfr download ./path/to/output --subreddit 'Python, all, mindustry' -L 10 --make-hard-links
bdfr download ./path/to/output --subreddit "Python, all, mindustry" -L 10 --make-hard-links
```
```bash
@ -92,7 +92,7 @@ bdfr archive ./path/to/output --user reddituser --submitted --all-comments --com
```
```bash
bdfr archive ./path/to/output --subreddit all --format yaml -L 500 --folder-scheme ''
bdfr archive ./path/to/output --subreddit all --format yaml -L 500 --folder-scheme ""
```
Alternatively, you can pass options through a YAML file.
@ -191,13 +191,13 @@ The following options are common between both the `archive` and `download` comma
- This is the name of a multireddit to add as a source
- Can be specified multiple times
- This can be done by using `-m` multiple times
- Multireddits can also be used to provide CSV multireddits e.g. `-m 'chess, favourites'`
- Multireddits can also be used to provide CSV multireddits e.g. `-m "chess, favourites"`
- The specified multireddits must all belong to the user specified with the `--user` option
- `-s, --subreddit`
- This adds a subreddit as a source
- Can be used mutliple times
- This can be done by using `-s` multiple times
- Subreddits can also be used to provide CSV subreddits e.g. `-m 'all, python, mindustry'`
- Subreddits can also be used to provide CSV subreddits e.g. `-m "all, python, mindustry"`
- `-t, --time`
- This is the time filter that will be applied to all applicable sources
- This option does not apply to upvoted or saved posts when scraping from these sources

View file

@ -125,7 +125,7 @@ class RedditConnector(metaclass=ABCMeta):
disabled_modules = self.split_args_input(disabled_modules)
disabled_modules = {name.strip().lower() for name in disabled_modules}
self.args.disable_module = disabled_modules
logger.debug(f'Disabling the following modules: {", ".join(self.args.disable_module)}')
logger.debug(f"Disabling the following modules: {', '.join(self.args.disable_module)}")
def create_reddit_instance(self):
if self.args.authenticate:
@ -301,7 +301,7 @@ class RedditConnector(metaclass=ABCMeta):
logger.log(9, f"Resolved user to {resolved_name}")
return resolved_name
else:
logger.warning('To use "me" as a user, an authenticated Reddit instance must be used')
logger.warning("To use 'me' as a user, an authenticated Reddit instance must be used")
else:
return in_name

View file

@ -66,7 +66,7 @@ class RedditDownloader(RedditConnector):
):
logger.debug(
f"Submission {submission.id} in {submission.subreddit.display_name} skipped"
f' due to {submission.author.name if submission.author else "DELETED"} being an ignored user'
f" due to {submission.author.name if submission.author else 'DELETED'} being an ignored user"
)
return
elif self.args.min_score and submission.score < self.args.min_score:

View file

@ -59,10 +59,10 @@ class OAuth2Authenticator:
if state != params["state"]:
self.send_message(client)
raise RedditAuthenticationError(f'State mismatch in OAuth2. Expected: {state} Received: {params["state"]}')
raise RedditAuthenticationError(f"State mismatch in OAuth2. Expected: {state} Received: {params['state']}")
elif "error" in params:
self.send_message(client)
raise RedditAuthenticationError(f'Error in OAuth2: {params["error"]}')
raise RedditAuthenticationError(f"Error in OAuth2: {params['error']}")
self.send_message(client, "<script>alert('You can go back to terminal window now.')</script>")
refresh_token = reddit.auth.authorize(params["code"])

View file

@ -82,7 +82,7 @@ class DownloadFactory:
"php3",
"xhtml",
)
if re.match(rf'(?i).*/.*\.({"|".join(web_extensions)})$', url):
if re.match(rf"(?i).*/.*\.({'|'.join(web_extensions)})$", url):
return True
else:
return False

View file

@ -72,10 +72,8 @@ class Redgifs(BaseDownloader):
else:
out.add(response_json["gif"]["urls"]["sd"])
elif response_json["gif"]["type"] == 2: # type 2 is an image
if response_json["gif"]["gallery"]:
content = Redgifs.retrieve_url(
f'https://api.redgifs.com/v2/gallery/{response_json["gif"]["gallery"]}'
)
if gallery := response_json["gif"]["gallery"]:
content = Redgifs.retrieve_url(f"https://api.redgifs.com/v2/gallery/{gallery}")
response_json = json.loads(content.text)
out = {p["urls"]["hd"] for p in response_json["gifs"]}
else:

View file

@ -36,7 +36,7 @@ dependencies = [
dynamic = ["version"]
[tool.setuptools]
dynamic = {"version" = {attr = 'bdfr.__version__'}}
dynamic = {"version" = {attr = "bdfr.__version__"}}
packages = ["bdfr", "bdfr.archive_entry", "bdfr.site_downloaders", "bdfr.site_downloaders.fallback_downloaders",]
data-files = {"config" = ["bdfr/default_config.cfg",]}

View file

@ -3,14 +3,14 @@
if [ -e "$1" ]; then
file="$1"
else
echo 'CANNOT FIND LOG FILE'
echo "CANNOT FIND LOG FILE"
exit 1
fi
{
grep 'Could not download submission' "$file" | awk '{ print $12 }' | rev | cut -c 2- | rev ;
grep 'Failed to download resource' "$file" | awk '{ print $15 }' ;
grep 'failed to download submission' "$file" | awk '{ print $14 }' | rev | cut -c 2- | rev ;
grep 'Failed to write file' "$file" | awk '{ print $14 }' ;
grep 'skipped due to disabled module' "$file" | awk '{ print $9 }' ;
grep "Could not download submission" "$file" | awk "{ print $12 }" | rev | cut -c 2- | rev ;
grep "Failed to download resource" "$file" | awk "{ print $15 }" ;
grep "failed to download submission" "$file" | awk "{ print $14 }" | rev | cut -c 2- | rev ;
grep "Failed to write file" "$file" | awk "{ print $14 }" ;
grep "skipped due to disabled module" "$file" | awk "{ print $9 }" ;
}

View file

@ -3,15 +3,15 @@
if [ -e "$1" ]; then
file="$1"
else
echo 'CANNOT FIND LOG FILE'
echo "CANNOT FIND LOG FILE"
exit 1
fi
{
grep 'Downloaded submission' "$file" | awk '{ print $(NF-2) }' ;
grep 'Resource hash' "$file" | awk '{ print $(NF-2) }' ;
grep 'Download filter' "$file" | awk '{ print $(NF-3) }' ;
grep 'already exists, continuing' "$file" | awk '{ print $(NF-3) }' ;
grep 'Hard link made' "$file" | awk '{ print $(NF) }' ;
grep 'filtered due to score' "$file" | awk '{ print $9 }'
grep "Downloaded submission" "$file" | awk "{ print $(NF-2) }" ;
grep "Resource hash" "$file" | awk "{ print $(NF-2) }" ;
grep "Download filter" "$file" | awk "{ print $(NF-3) }" ;
grep "already exists, continuing" "$file" | awk "{ print $(NF-3) }" ;
grep "Hard link made" "$file" | awk "{ print $(NF) }" ;
grep "filtered due to score" "$file" | awk "{ print $9 }"
}

View file

@ -3,14 +3,14 @@
if [ -e "$1" ]; then
file="$1"
else
echo 'CANNOT FIND LOG FILE'
echo "CANNOT FIND LOG FILE"
exit 1
fi
echo "Downloaded submissions: $( grep -c 'Downloaded submission' "$file" )"
echo "Failed downloads: $( grep -c 'failed to download submission' "$file" )"
echo "Files already downloaded: $( grep -c 'already exists, continuing' "$file" )"
echo "Hard linked submissions: $( grep -c 'Hard link made' "$file" )"
echo "Excluded submissions: $( grep -c 'in exclusion list' "$file" )"
echo "Files with existing hash skipped: $( grep -c 'downloaded elsewhere' "$file" )"
echo "Submissions from excluded subreddits: $( grep -c 'in skip list' "$file" )"
echo "Downloaded submissions: $( grep -c 'Downloaded submission' '$file' )"
echo "Failed downloads: $( grep -c 'failed to download submission' '$file' )"
echo "Files already downloaded: $( grep -c 'already exists, continuing' '$file' )"
echo "Hard linked submissions: $( grep -c 'Hard link made' '$file' )"
echo "Excluded submissions: $( grep -c 'in exclusion list' '$file' )"
echo "Files with existing hash skipped: $( grep -c 'downloaded elsewhere' '$file' )"
echo "Submissions from excluded subreddits: $( grep -c 'in skip list' '$file' )"

View file

@ -185,7 +185,7 @@ def test_cli_download_user_data_bad_me_unauthenticated(test_args: list[str], tmp
test_args = create_basic_args_for_download_runner(test_args, tmp_path)
result = runner.invoke(cli, test_args)
assert result.exit_code == 0
assert 'To use "me" as a user, an authenticated Reddit instance must be used' in result.output
assert "To use 'me' as a user, an authenticated Reddit instance must be used" in result.output
@pytest.mark.online