1
0
Fork 0
mirror of synced 2024-06-03 02:44:39 +12:00
bulk-downloader-for-reddit/bulkredditdownloader/site_downloaders/imgur.py

100 lines
3.5 KiB
Python
Raw Normal View History

#!/usr/bin/env python3
import json
import logging
2021-02-25 23:40:08 +13:00
from typing import Optional
2021-02-07 01:29:13 +13:00
import requests
2021-02-11 12:10:40 +13:00
from praw.models import Submission
2021-02-25 23:40:08 +13:00
from bulkredditdownloader.authenticator import Authenticator
2021-02-15 19:30:39 +13:00
from bulkredditdownloader.errors import NotADownloadableLinkError, ResourceNotFound, SiteDownloaderError
2021-02-25 23:40:08 +13:00
from bulkredditdownloader.resource import Resource
2021-02-07 20:08:24 +13:00
from bulkredditdownloader.site_downloaders.base_downloader import BaseDownloader
from bulkredditdownloader.site_downloaders.direct import Direct
logger = logging.getLogger(__name__)
2021-02-07 14:33:19 +13:00
class Imgur(BaseDownloader):
imgur_image_domain = "https://i.imgur.com/"
2021-02-15 18:12:27 +13:00
def __init__(self, post: Submission):
super().__init__(post)
self.raw_data = {}
2021-02-25 23:40:08 +13:00
def find_resources(self, authenticator: Optional[Authenticator] = None) -> list[Resource]:
2021-02-11 12:10:40 +13:00
link = self.post.url
if link.endswith(".gifv"):
2021-02-15 18:12:27 +13:00
direct_thing = Direct(self.post)
2021-02-25 23:40:08 +13:00
return direct_thing.find_resources(authenticator)
self.raw_data = self._get_data(link)
if self._is_album:
if self.raw_data["album_images"]["count"] != 1:
2021-02-11 12:10:40 +13:00
out = self._download_album(self.raw_data["album_images"])
else:
2021-02-11 12:10:40 +13:00
out = self._download_image(self.raw_data["album_images"]["images"][0])
else:
2021-02-11 12:10:40 +13:00
out = self._download_image(self.raw_data)
return out
def _download_album(self, images: dict):
images_length = images["count"]
2021-02-11 12:10:40 +13:00
out = []
for i in range(images_length):
extension = self._validate_extension(images["images"][i]["ext"])
image_url = self.imgur_image_domain + images["images"][i]["hash"] + extension
2021-02-25 23:40:08 +13:00
out.append(Resource(self.post, image_url))
2021-02-11 12:10:40 +13:00
return out
def _download_image(self, image: dict):
extension = self._validate_extension(image["ext"])
image_url = self.imgur_image_domain + image["hash"] + extension
2021-02-25 23:40:08 +13:00
return [Resource(self.post, image_url)]
def _is_album(self) -> bool:
return "album_images" in self.raw_data
@staticmethod
def _get_data(link: str) -> dict:
cookies = {"over18": "1", "postpagebeta": "0"}
res = requests.get(link, cookies=cookies)
if res.status_code != 200:
2021-02-15 19:30:39 +13:00
raise ResourceNotFound(f"Server responded with {res.status_code} to {link}")
page_source = requests.get(link, cookies=cookies).text
starting_string = "image : "
ending_string = "group :"
starting_string_lenght = len(starting_string)
try:
start_index = page_source.index(starting_string) + starting_string_lenght
end_index = page_source.index(ending_string, start_index)
except ValueError:
raise NotADownloadableLinkError(
f"Could not read the page source on {link}")
while page_source[end_index] != "}":
end_index -= 1
try:
data = page_source[start_index:end_index + 2].strip()[:-1]
except IndexError:
page_source[end_index + 1] = '}'
data = page_source[start_index:end_index + 3].strip()[:-1]
return json.loads(data)
@staticmethod
def _validate_extension(extension_suffix: str) -> str:
possible_extensions = [".jpg", ".png", ".mp4", ".gif"]
for extension in possible_extensions:
if extension in extension_suffix:
return extension
else:
2021-02-15 19:30:39 +13:00
raise SiteDownloaderError(f'"{extension_suffix}" is not recognized as a valid extension for Imgur')