2016-07-23 23:59:02 +12:00
|
|
|
from discord.ext import commands
|
2017-01-21 12:32:45 +13:00
|
|
|
|
|
|
|
from . import utils
|
|
|
|
|
2016-10-24 15:43:59 +13:00
|
|
|
from bs4 import BeautifulSoup as bs
|
2016-08-18 09:28:45 +12:00
|
|
|
|
|
|
|
import discord
|
2016-07-24 03:33:22 +12:00
|
|
|
import random
|
2016-08-16 08:43:18 +12:00
|
|
|
import re
|
2016-09-29 20:40:29 +13:00
|
|
|
import math
|
2016-11-02 08:59:45 +13:00
|
|
|
|
2016-08-18 09:28:45 +12:00
|
|
|
|
2016-07-23 23:59:02 +12:00
|
|
|
class Links:
|
|
|
|
"""This class contains all the commands that make HTTP requests
|
|
|
|
In other words, all commands here rely on other URL's to complete their requests"""
|
2016-08-18 09:28:45 +12:00
|
|
|
|
2016-07-23 23:59:02 +12:00
|
|
|
def __init__(self, bot):
|
2016-07-24 00:00:21 +12:00
|
|
|
self.bot = bot
|
2016-11-02 08:59:45 +13:00
|
|
|
|
2017-03-06 15:45:44 +13:00
|
|
|
@commands.command(aliases=['g'])
|
2017-01-21 12:32:45 +13:00
|
|
|
@utils.custom_perms(send_messages=True)
|
2016-10-24 15:43:23 +13:00
|
|
|
async def google(self, ctx, *, query: str):
|
2016-11-29 17:55:55 +13:00
|
|
|
"""Searches google for a provided query
|
|
|
|
|
|
|
|
EXAMPLE: !g Random cat pictures!
|
|
|
|
RESULT: Links to sites with random cat pictures!"""
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.message.channel.trigger_typing()
|
|
|
|
|
2016-10-24 15:41:19 +13:00
|
|
|
url = "https://www.google.com/search"
|
|
|
|
|
|
|
|
# Turn safe filter on or off, based on whether or not this is a nsfw channel
|
2017-03-08 17:28:30 +13:00
|
|
|
nsfw = await utils.channel_is_nsfw(ctx.message.channel)
|
|
|
|
safe = 'off' if nsfw else 'on'
|
2016-10-24 15:41:19 +13:00
|
|
|
|
|
|
|
params = {'q': query,
|
2016-10-24 16:20:18 +13:00
|
|
|
'safe': safe,
|
2016-10-24 16:20:55 +13:00
|
|
|
'hl': 'en',
|
2016-10-24 16:20:18 +13:00
|
|
|
'cr': 'countryUS'}
|
2016-10-24 15:41:19 +13:00
|
|
|
|
|
|
|
# Our format we'll end up using to send to the channel
|
|
|
|
fmt = ""
|
|
|
|
|
|
|
|
# First make the request to google to get the results
|
2017-01-21 12:32:45 +13:00
|
|
|
data = await utils.request(url, payload=params, attr='text')
|
|
|
|
|
2016-11-02 08:59:45 +13:00
|
|
|
if data is None:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send("I failed to connect to google! (That can happen??)")
|
2016-11-02 08:59:45 +13:00
|
|
|
return
|
|
|
|
|
|
|
|
# Convert to a BeautifulSoup element and loop through each result clasified by h3 tags with a class of 'r'
|
|
|
|
soup = bs(data, 'html.parser')
|
2016-10-24 15:41:19 +13:00
|
|
|
|
2016-11-02 08:59:45 +13:00
|
|
|
for element in soup.find_all('h3', class_='r')[:3]:
|
|
|
|
# Get the link's href tag, which looks like q=[url here]&sa
|
|
|
|
# Use a lookahead and lookbehind to find this url exactly
|
|
|
|
try:
|
|
|
|
result_url = re.search('(?<=q=).*(?=&sa=)', element.find('a').get('href')).group(0)
|
|
|
|
except AttributeError:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send("I couldn't find any results for {}!".format(query))
|
2016-11-02 08:59:45 +13:00
|
|
|
return
|
2016-10-24 15:41:19 +13:00
|
|
|
|
2016-11-02 08:59:45 +13:00
|
|
|
# Get the next sibling, find the span where the description is, and get the text from this
|
|
|
|
try:
|
|
|
|
description = element.next_sibling.find('span', class_='st').text
|
|
|
|
except:
|
|
|
|
description = ""
|
2016-10-24 15:41:19 +13:00
|
|
|
|
2016-11-02 08:59:45 +13:00
|
|
|
# Add this to our text we'll use to send
|
|
|
|
fmt += '\n\n**URL**: <{}>\n**Description**: {}'.format(result_url, description)
|
2016-10-24 15:41:19 +13:00
|
|
|
|
2016-11-02 08:59:45 +13:00
|
|
|
fmt = "**Top 3 results for the query** _{}_:{}".format(query, fmt)
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send(fmt)
|
2016-10-24 15:41:19 +13:00
|
|
|
|
2017-03-06 15:45:44 +13:00
|
|
|
@commands.command(aliases=['yt'])
|
2017-01-21 12:32:45 +13:00
|
|
|
@utils.custom_perms(send_messages=True)
|
2016-11-02 09:25:46 +13:00
|
|
|
async def youtube(self, ctx, *, query: str):
|
2016-11-29 17:55:55 +13:00
|
|
|
"""Searches youtube for a provided query
|
|
|
|
|
|
|
|
EXAMPLE: !youtube Cat videos!
|
|
|
|
RESULT: Cat videos!"""
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.message.channel.trigger_typing()
|
|
|
|
|
2017-01-21 12:32:45 +13:00
|
|
|
key = utils.youtube_key
|
2016-10-04 15:42:47 +13:00
|
|
|
url = "https://www.googleapis.com/youtube/v3/search"
|
|
|
|
params = {'key': key,
|
2016-10-06 07:44:49 +13:00
|
|
|
'part': 'snippet, id',
|
|
|
|
'type': 'video',
|
|
|
|
'q': query}
|
2016-10-04 15:42:47 +13:00
|
|
|
|
2017-01-21 12:32:45 +13:00
|
|
|
data = await utils.request(url, payload=params)
|
|
|
|
|
2016-11-02 08:59:45 +13:00
|
|
|
if data is None:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send("Sorry but I failed to connect to youtube!")
|
2016-11-02 08:59:45 +13:00
|
|
|
return
|
2016-10-04 15:42:47 +13:00
|
|
|
|
|
|
|
try:
|
|
|
|
result = data['items'][0]
|
|
|
|
except IndexError:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send("I could not find any results with the search term {}".format(query))
|
2016-10-04 15:42:47 +13:00
|
|
|
return
|
|
|
|
|
|
|
|
result_url = "https://youtube.com/watch?v={}".format(result['id']['videoId'])
|
|
|
|
title = result['snippet']['title']
|
|
|
|
description = result['snippet']['description']
|
|
|
|
|
2016-10-29 12:52:18 +13:00
|
|
|
fmt = "**Title:** {}\n\n**Description:** {}\n\n**URL:** <{}>".format(title, description, result_url)
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send(fmt)
|
2016-10-04 15:42:47 +13:00
|
|
|
|
2017-03-06 15:45:44 +13:00
|
|
|
@commands.command()
|
2017-01-21 12:32:45 +13:00
|
|
|
@utils.custom_perms(send_messages=True)
|
2016-11-02 09:25:46 +13:00
|
|
|
async def wiki(self, ctx, *, query: str):
|
2016-11-29 17:55:55 +13:00
|
|
|
"""Pulls the top match for a specific term from wikipedia, and returns the result
|
|
|
|
|
|
|
|
EXAMPLE: !wiki Test
|
|
|
|
RESULT: A link to the wikipedia article for the word test"""
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.message.channel.trigger_typing()
|
|
|
|
|
2016-10-02 09:29:53 +13:00
|
|
|
# All we need to do is search for the term provided, so the action, list, and format never need to change
|
|
|
|
base_url = "https://en.wikipedia.org/w/api.php"
|
|
|
|
params = {"action": "query",
|
2016-10-03 07:08:20 +13:00
|
|
|
"list": "search",
|
|
|
|
"format": "json",
|
2016-10-11 16:33:47 +13:00
|
|
|
"srsearch": query}
|
2016-10-02 09:29:53 +13:00
|
|
|
|
2017-01-21 12:41:50 +13:00
|
|
|
data = await utils.request(base_url, payload=params)
|
2017-01-21 12:32:45 +13:00
|
|
|
|
2016-11-02 08:59:45 +13:00
|
|
|
if data is None:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send("Sorry but I failed to connect to Wikipedia!")
|
2016-11-02 08:59:45 +13:00
|
|
|
return
|
|
|
|
|
2016-08-18 09:28:45 +12:00
|
|
|
if len(data['query']['search']) == 0:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send("I could not find any results with that term, I tried my best :c")
|
2016-08-18 09:28:45 +12:00
|
|
|
return
|
|
|
|
# Wiki articles' URLs are in the format https://en.wikipedia.org/wiki/[Titlehere]
|
|
|
|
# Replace spaces with %20
|
|
|
|
url = "https://en.wikipedia.org/wiki/{}".format(data['query']['search'][0]['title'].replace(' ', '%20'))
|
|
|
|
snippet = data['query']['search'][0]['snippet']
|
|
|
|
# The next part replaces some of the HTML formatting that's provided
|
|
|
|
# These are the only ones I've encountered so far through testing, there may be more though
|
|
|
|
snippet = re.sub('<span class=\\"searchmatch\\">', '', snippet)
|
|
|
|
snippet = re.sub('</span>', '', snippet)
|
|
|
|
snippet = re.sub('"', '"', snippet)
|
|
|
|
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send(
|
2016-10-29 12:52:18 +13:00
|
|
|
"Here is the best match I found with the query `{}`:\nURL: <{}>\nSnippet: \n```\n{}```".format(query, url,
|
2016-11-02 08:59:45 +13:00
|
|
|
snippet))
|
2016-08-18 09:28:45 +12:00
|
|
|
|
2017-03-06 15:45:44 +13:00
|
|
|
@commands.command()
|
2017-01-21 12:32:45 +13:00
|
|
|
@utils.custom_perms(send_messages=True)
|
2016-11-02 09:25:46 +13:00
|
|
|
async def urban(self, ctx, *, msg: str):
|
2016-11-29 17:55:55 +13:00
|
|
|
"""Pulls the top urbandictionary.com definition for a term
|
|
|
|
|
|
|
|
EXAMPLE: !urban a normal phrase
|
|
|
|
RESULT: Probably something lewd; this is urban dictionary we're talking about"""
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.message.channel.trigger_typing()
|
|
|
|
|
2016-10-02 09:29:53 +13:00
|
|
|
url = "http://api.urbandictionary.com/v0/define"
|
2016-10-11 16:33:47 +13:00
|
|
|
params = {"term": msg}
|
2016-07-25 02:02:50 +12:00
|
|
|
try:
|
2017-01-21 12:32:45 +13:00
|
|
|
data = await utils.request(url, payload=params)
|
2016-11-02 08:59:45 +13:00
|
|
|
if data is None:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send("Sorry but I failed to connect to urban dictionary!")
|
2017-01-26 08:24:16 +13:00
|
|
|
return
|
2016-10-11 16:46:32 +13:00
|
|
|
|
2016-08-16 15:30:52 +12:00
|
|
|
# List is the list of definitions found, if it's empty then nothing was found
|
2016-07-23 23:59:02 +12:00
|
|
|
if len(data['list']) == 0:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send("No result with that term!")
|
2016-08-16 15:30:52 +12:00
|
|
|
# If the list is not empty, use the first result and print it's defintion
|
2016-07-23 23:59:02 +12:00
|
|
|
else:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send(data['list'][0]['definition'])
|
2016-10-11 16:46:32 +13:00
|
|
|
# Urban dictionary has some long definitions, some might not be able to be sent
|
2016-07-23 23:59:02 +12:00
|
|
|
except discord.HTTPException:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send('```\nError: Definition is too long for me to send```')
|
2016-11-02 08:59:45 +13:00
|
|
|
except KeyError:
|
2017-03-06 15:45:44 +13:00
|
|
|
await ctx.send("Sorry but I failed to connect to urban dictionary!")
|
2016-07-23 23:59:02 +12:00
|
|
|
|
2016-08-18 09:28:45 +12:00
|
|
|
|
2016-07-24 00:01:16 +12:00
|
|
|
def setup(bot):
|
|
|
|
bot.add_cog(Links(bot))
|