2016-12-22 12:06:32 +13:00
|
|
|
import datetime
|
|
|
|
import traceback
|
2017-07-03 15:20:01 +12:00
|
|
|
import asyncio
|
2016-12-22 12:06:32 +13:00
|
|
|
from collections import deque
|
|
|
|
from itertools import islice
|
|
|
|
from random import shuffle
|
|
|
|
|
2017-07-03 15:20:01 +12:00
|
|
|
from .source import YoutubeDLSource
|
2017-03-08 11:35:30 +13:00
|
|
|
from .entry import URLPlaylistEntry, get_header
|
2017-04-23 15:09:03 +12:00
|
|
|
from .exceptions import ExtractionError, WrongEntryTypeError, LiveStreamError
|
2016-12-22 12:06:32 +13:00
|
|
|
from .event_emitter import EventEmitter
|
|
|
|
|
|
|
|
|
|
|
|
class Playlist(EventEmitter):
|
|
|
|
"""
|
|
|
|
A playlist is manages the list of songs that will be played.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, bot):
|
|
|
|
super().__init__()
|
|
|
|
self.bot = bot
|
|
|
|
self.loop = bot.loop
|
|
|
|
self.downloader = bot.downloader
|
|
|
|
self.entries = deque()
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return iter(self.entries)
|
|
|
|
|
|
|
|
def shuffle(self):
|
|
|
|
shuffle(self.entries)
|
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
self.entries.clear()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def count(self):
|
|
|
|
if self.entries:
|
|
|
|
return len(self.entries)
|
|
|
|
else:
|
|
|
|
return 0
|
|
|
|
|
2017-06-07 20:30:19 +12:00
|
|
|
async def add_entry(self, song_url, **meta):
|
2017-07-03 15:20:01 +12:00
|
|
|
"""Adds a song to this playlist"""
|
|
|
|
entry = YoutubeDLSource(self, song_url)
|
|
|
|
await entry.prepare()
|
|
|
|
self.entries.append(entry)
|
|
|
|
return entry
|
2016-12-22 12:06:32 +13:00
|
|
|
|
2017-07-03 15:20:01 +12:00
|
|
|
async def import_from(self, playlist_url, requester):
|
2016-12-22 12:06:32 +13:00
|
|
|
"""
|
|
|
|
Imports the songs from `playlist_url` and queues them to be played.
|
|
|
|
|
|
|
|
Returns a list of `entries` that have been enqueued.
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
info = await self.downloader.safe_extract_info(self.loop, playlist_url, download=False)
|
|
|
|
except Exception as e:
|
|
|
|
raise ExtractionError('Could not extract information from {}\n\n{}'.format(playlist_url, e))
|
|
|
|
|
|
|
|
if not info:
|
|
|
|
raise ExtractionError('Could not extract information from %s' % playlist_url)
|
|
|
|
|
2017-07-28 08:27:58 +12:00
|
|
|
if info.get('playlist') is None and 'playlist' not in info.get('extractor', ''):
|
2017-07-24 11:49:12 +12:00
|
|
|
raise WrongEntryTypeError('This is not a playlist!', False, playlist_url)
|
2017-07-12 05:59:24 +12:00
|
|
|
|
2016-12-22 12:06:32 +13:00
|
|
|
# Once again, the generic extractor fucks things up.
|
|
|
|
if info.get('extractor', None) == 'generic':
|
|
|
|
url_field = 'url'
|
|
|
|
else:
|
|
|
|
url_field = 'webpage_url'
|
|
|
|
|
2017-07-03 15:20:01 +12:00
|
|
|
yield len(info['entries'])
|
|
|
|
|
2016-12-22 12:06:32 +13:00
|
|
|
for items in info['entries']:
|
|
|
|
if items:
|
2017-07-03 15:20:01 +12:00
|
|
|
entry = YoutubeDLSource(self, items[url_field])
|
2016-12-22 12:06:32 +13:00
|
|
|
try:
|
2017-07-03 15:20:01 +12:00
|
|
|
await entry.prepare()
|
2016-12-22 12:06:32 +13:00
|
|
|
except:
|
2017-07-03 15:20:01 +12:00
|
|
|
yield False
|
|
|
|
else:
|
|
|
|
entry.requester = requester
|
|
|
|
self.entries.append(entry)
|
|
|
|
yield True
|
2016-12-22 12:06:32 +13:00
|
|
|
else:
|
2017-07-03 15:20:01 +12:00
|
|
|
yield False
|
2016-12-22 12:06:32 +13:00
|
|
|
|
|
|
|
async def async_process_youtube_playlist(self, playlist_url, **meta):
|
|
|
|
"""
|
|
|
|
Processes youtube playlists links from `playlist_url` in a questionable, async fashion.
|
|
|
|
|
|
|
|
:param playlist_url: The playlist url to be cut into individual urls and added to the playlist
|
|
|
|
:param meta: Any additional metadata to add to the playlist entry
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
info = await self.downloader.safe_extract_info(self.loop, playlist_url, download=False, process=False)
|
|
|
|
except Exception as e:
|
|
|
|
raise ExtractionError('Could not extract information from {}\n\n{}'.format(playlist_url, e))
|
|
|
|
|
|
|
|
if not info:
|
|
|
|
raise ExtractionError('Could not extract information from %s' % playlist_url)
|
|
|
|
|
|
|
|
gooditems = []
|
|
|
|
baditems = 0
|
|
|
|
for entry_data in info['entries']:
|
|
|
|
if entry_data:
|
|
|
|
baseurl = info['webpage_url'].split('playlist?list=')[0]
|
|
|
|
song_url = baseurl + 'watch?v=%s' % entry_data['id']
|
|
|
|
|
|
|
|
try:
|
|
|
|
entry, elen = await self.add_entry(song_url, **meta)
|
|
|
|
gooditems.append(entry)
|
|
|
|
except ExtractionError:
|
|
|
|
baditems += 1
|
|
|
|
except Exception as e:
|
|
|
|
baditems += 1
|
|
|
|
else:
|
|
|
|
baditems += 1
|
|
|
|
|
|
|
|
return gooditems
|
|
|
|
|
|
|
|
async def async_process_sc_bc_playlist(self, playlist_url, **meta):
|
|
|
|
"""
|
|
|
|
Processes soundcloud set and bancdamp album links from `playlist_url` in a questionable, async fashion.
|
|
|
|
|
|
|
|
:param playlist_url: The playlist url to be cut into individual urls and added to the playlist
|
|
|
|
:param meta: Any additional metadata to add to the playlist entry
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
info = await self.downloader.safe_extract_info(self.loop, playlist_url, download=False, process=False)
|
|
|
|
except Exception as e:
|
|
|
|
raise ExtractionError('Could not extract information from {}\n\n{}'.format(playlist_url, e))
|
|
|
|
|
|
|
|
if not info:
|
|
|
|
raise ExtractionError('Could not extract information from %s' % playlist_url)
|
|
|
|
|
|
|
|
gooditems = []
|
|
|
|
baditems = 0
|
|
|
|
for entry_data in info['entries']:
|
|
|
|
if entry_data:
|
|
|
|
song_url = entry_data['url']
|
|
|
|
|
|
|
|
try:
|
|
|
|
entry, elen = await self.add_entry(song_url, **meta)
|
|
|
|
gooditems.append(entry)
|
|
|
|
except ExtractionError:
|
|
|
|
baditems += 1
|
|
|
|
except Exception as e:
|
|
|
|
baditems += 1
|
|
|
|
else:
|
|
|
|
baditems += 1
|
|
|
|
|
|
|
|
return gooditems
|
|
|
|
|
|
|
|
def _add_entry(self, entry):
|
|
|
|
self.entries.append(entry)
|
|
|
|
|
2017-07-03 15:20:01 +12:00
|
|
|
async def next_entry(self):
|
|
|
|
"""Get the next song in the playlist; this class will wait until the next song is ready"""
|
|
|
|
entry = self.peek()
|
2016-12-22 12:06:32 +13:00
|
|
|
|
2017-07-03 15:20:01 +12:00
|
|
|
# While we have an entry available
|
|
|
|
while entry:
|
|
|
|
# Check if we are ready or if we've errored, either way we'll pop it from the deque
|
|
|
|
if entry.ready or entry.error:
|
|
|
|
return self.entries.popleft()
|
|
|
|
# Otherwise, wait a second and check again
|
|
|
|
else:
|
|
|
|
await asyncio.sleep(1)
|
2017-07-29 01:43:58 +12:00
|
|
|
# "Refresh" the next entry, in case someone cleared the next song in the queue
|
|
|
|
entry = self.peek()
|
2016-12-22 12:06:32 +13:00
|
|
|
|
2017-07-03 15:20:01 +12:00
|
|
|
# If we've reached here, we have no entries
|
|
|
|
return None
|
2016-12-22 12:06:32 +13:00
|
|
|
|
|
|
|
def peek(self):
|
|
|
|
"""
|
|
|
|
Returns the next entry that should be scheduled to be played.
|
|
|
|
"""
|
|
|
|
if self.entries:
|
|
|
|
return self.entries[0]
|
|
|
|
|
|
|
|
def count_for_user(self, user):
|
2017-06-10 14:50:17 +12:00
|
|
|
return sum(1 for e in self.entries if e.meta.get('author', None) == user)
|