1
0
Fork 0
mirror of synced 2024-05-02 11:22:33 +12:00

Merge pull request #573 from aliparlakci/development

This commit is contained in:
Serene 2021-12-20 20:58:38 +10:00 committed by GitHub
commit e4fcacfd4f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 47 additions and 3 deletions

View file

@ -33,6 +33,8 @@ class DownloadFactory:
return Erome
elif re.match(r'reddit\.com/gallery/.*', sanitised_url):
return Gallery
elif re.match(r'patreon\.com.*', sanitised_url):
return Gallery
elif re.match(r'gfycat\.', sanitised_url):
return Gfycat
elif re.match(r'(m\.)?imgur.*', sanitised_url):

View file

@ -24,7 +24,7 @@ class Gallery(BaseDownloader):
except (AttributeError, TypeError):
try:
image_urls = self._get_links(self.post.crosspost_parent_list[0]['gallery_data']['items'])
except (AttributeError, IndexError, TypeError):
except (AttributeError, IndexError, TypeError, KeyError):
logger.error(f'Could not find gallery data in submission {self.post.id}')
logger.exception('Gallery image find failure')
raise SiteDownloaderError('No images found in Reddit gallery')

View file

@ -4,7 +4,7 @@ description_file = README.md
description_content_type = text/markdown
home_page = https://github.com/aliparlakci/bulk-downloader-for-reddit
keywords = reddit, download, archive
version = 2.5.1
version = 2.5.2
author = Ali Parlakci
author_email = parlakciali@gmail.com
maintainer = Serene Arc

View file

@ -47,6 +47,7 @@ from bdfr.site_downloaders.youtube import Youtube
('https://vimeo.com/channels/31259/53576664', YtdlpFallback),
('http://video.pbs.org/viralplayer/2365173446/', YtdlpFallback),
('https://www.pornhub.com/view_video.php?viewkey=ph5a2ee0461a8d0', PornHub),
('https://www.patreon.com/posts/minecart-track-59346560', Gallery),
))
def test_factory_lever_good(test_submission_url: str, expected_class: BaseDownloader, reddit_instance: praw.Reddit):
result = DownloadFactory.pull_lever(test_submission_url)

View file

@ -57,6 +57,14 @@ def test_gallery_get_links(test_ids: list[dict], expected: set[str]):
'65163f685fb28c5b776e0e77122718be',
'2a337eb5b13c34d3ca3f51b5db7c13e9',
}),
('rb3ub6', { # patreon post
'748a976c6cedf7ea85b6f90e7cb685c7',
'839796d7745e88ced6355504e1f74508',
'bcdb740367d0f19f97a77e614b48a42d',
'0f230b8c4e5d103d35a773fab9814ec3',
'e5192d6cb4f84c4f4a658355310bf0f9',
'91cbe172cd8ccbcf049fcea4204eb979',
})
))
def test_gallery_download(test_submission_id: str, expected_hashes: set[str], reddit_instance: praw.Reddit):
test_submission = reddit_instance.submission(id=test_submission_id)

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python3
# coding=utf-8
from datetime import datetime, timedelta
from pathlib import Path
from typing import Iterator
from unittest.mock import MagicMock
@ -195,6 +195,39 @@ def test_get_subreddit_normal(
assert not any([isinstance(m, MagicMock) for m in results])
@pytest.mark.online
@pytest.mark.reddit
@pytest.mark.parametrize(('test_time', 'test_delta'), (
('hour', timedelta(hours=1)),
('day', timedelta(days=1)),
('week', timedelta(days=7)),
('month', timedelta(days=31)),
('year', timedelta(days=365)),
))
def test_get_subreddit_time_verification(
test_time: str,
test_delta: timedelta,
downloader_mock: MagicMock,
reddit_instance: praw.Reddit,
):
downloader_mock.args.limit = 10
downloader_mock.args.sort = 'top'
downloader_mock.args.time = test_time
downloader_mock.time_filter = RedditConnector.create_time_filter(downloader_mock)
downloader_mock.sort_filter = RedditConnector.create_sort_filter(downloader_mock)
downloader_mock.determine_sort_function.return_value = RedditConnector.determine_sort_function(downloader_mock)
downloader_mock.args.subreddit = ['all']
downloader_mock.reddit_instance = reddit_instance
results = RedditConnector.get_subreddits(downloader_mock)
results = [sub for res1 in results for sub in res1]
assert all([isinstance(res1, praw.models.Submission) for res1 in results])
nowtime = datetime.now()
for r in results:
result_time = datetime.fromtimestamp(r.created_utc)
time_diff = nowtime - result_time
assert time_diff < test_delta
@pytest.mark.online
@pytest.mark.reddit
@pytest.mark.parametrize(('test_subreddits', 'search_term', 'limit', 'time_filter', 'max_expected_len'), (