2019-10-26 11:46:37 -07:00
|
|
|
import aiohttp
|
2021-09-14 14:37:40 -04:00
|
|
|
import discord
|
|
|
|
|
import contextlib
|
2019-10-26 11:46:37 -07:00
|
|
|
from bs4 import BeautifulSoup
|
2021-04-07 16:02:47 -07:00
|
|
|
import json
|
2020-04-11 19:25:01 -04:00
|
|
|
import logging
|
2019-10-26 11:46:37 -07:00
|
|
|
import re
|
2019-07-23 17:06:45 -07:00
|
|
|
from redbot.core import commands
|
2020-09-09 10:17:01 -07:00
|
|
|
from redbot.core.utils.chat_formatting import pagify
|
2019-07-23 17:06:45 -07:00
|
|
|
|
|
|
|
|
|
2020-04-11 19:25:01 -04:00
|
|
|
log = logging.getLogger("red.aikaterna.dictionary")
|
|
|
|
|
|
|
|
|
|
|
2019-07-23 17:06:45 -07:00
|
|
|
class Dictionary(commands.Cog):
|
2021-04-07 16:02:47 -07:00
|
|
|
"""
|
|
|
|
|
Word, yo
|
|
|
|
|
Parts of this cog are adapted from the PyDictionary library.
|
|
|
|
|
"""
|
2019-07-23 17:06:45 -07:00
|
|
|
|
2020-08-26 17:57:43 +01:00
|
|
|
async def red_delete_data_for_user(self, **kwargs):
|
2021-04-07 16:02:47 -07:00
|
|
|
"""Nothing to delete"""
|
2020-08-26 17:57:43 +01:00
|
|
|
return
|
|
|
|
|
|
2019-07-23 17:06:45 -07:00
|
|
|
def __init__(self, bot):
|
|
|
|
|
self.bot = bot
|
2019-10-26 11:46:37 -07:00
|
|
|
self.session = aiohttp.ClientSession()
|
|
|
|
|
|
|
|
|
|
def cog_unload(self):
|
|
|
|
|
self.bot.loop.create_task(self.session.close())
|
|
|
|
|
|
2019-07-23 17:06:45 -07:00
|
|
|
@commands.command()
|
|
|
|
|
async def define(self, ctx, *, word: str):
|
|
|
|
|
"""Displays definitions of a given word."""
|
|
|
|
|
search_msg = await ctx.send("Searching...")
|
|
|
|
|
search_term = word.split(" ", 1)[0]
|
2019-10-26 11:46:37 -07:00
|
|
|
result = await self._definition(ctx, search_term)
|
2019-07-23 17:06:45 -07:00
|
|
|
str_buffer = ""
|
2019-10-26 11:46:37 -07:00
|
|
|
if not result:
|
2021-09-14 14:37:40 -04:00
|
|
|
with contextlib.suppress(discord.NotFound):
|
|
|
|
|
await search_msg.delete()
|
2020-09-09 10:17:01 -07:00
|
|
|
await ctx.send("This word is not in the dictionary.")
|
|
|
|
|
return
|
2019-07-23 17:06:45 -07:00
|
|
|
for key in result:
|
2019-10-26 11:46:37 -07:00
|
|
|
str_buffer += f"\n**{key}**: \n"
|
2019-07-23 17:06:45 -07:00
|
|
|
counter = 1
|
|
|
|
|
j = False
|
|
|
|
|
for val in result[key]:
|
|
|
|
|
if val.startswith("("):
|
2019-10-26 11:46:37 -07:00
|
|
|
str_buffer += f"{str(counter)}. *{val})* "
|
2019-07-23 17:06:45 -07:00
|
|
|
counter += 1
|
|
|
|
|
j = True
|
|
|
|
|
else:
|
|
|
|
|
if j:
|
2019-10-26 11:46:37 -07:00
|
|
|
str_buffer += f"{val}\n"
|
2019-07-23 17:06:45 -07:00
|
|
|
j = False
|
|
|
|
|
else:
|
2019-10-26 11:46:37 -07:00
|
|
|
str_buffer += f"{str(counter)}. {val}\n"
|
2019-07-23 17:06:45 -07:00
|
|
|
counter += 1
|
2021-09-14 14:37:40 -04:00
|
|
|
with contextlib.suppress(discord.NotFound):
|
|
|
|
|
await search_msg.delete()
|
2020-09-09 10:17:01 -07:00
|
|
|
for page in pagify(str_buffer, delims=["\n"]):
|
|
|
|
|
await ctx.send(page)
|
2019-10-26 11:46:37 -07:00
|
|
|
|
|
|
|
|
async def _definition(self, ctx, word):
|
2020-04-11 19:25:01 -04:00
|
|
|
data = await self._get_soup_object(f"http://wordnetweb.princeton.edu/perl/webwn?s={word}")
|
|
|
|
|
if not data:
|
|
|
|
|
return await ctx.send("Error fetching data.")
|
|
|
|
|
types = data.findAll("h3")
|
2019-10-26 11:46:37 -07:00
|
|
|
length = len(types)
|
2020-04-11 19:25:01 -04:00
|
|
|
lists = data.findAll("ul")
|
2019-10-26 11:46:37 -07:00
|
|
|
out = {}
|
|
|
|
|
if not lists:
|
|
|
|
|
return
|
|
|
|
|
for a in types:
|
|
|
|
|
reg = str(lists[types.index(a)])
|
|
|
|
|
meanings = []
|
|
|
|
|
for x in re.findall(r">\s\((.*?)\)\s<", reg):
|
|
|
|
|
if "often followed by" in x:
|
|
|
|
|
pass
|
|
|
|
|
elif len(x) > 5 or " " in str(x):
|
|
|
|
|
meanings.append(x)
|
|
|
|
|
name = a.text
|
|
|
|
|
out[name] = meanings
|
|
|
|
|
return out
|
|
|
|
|
|
2021-04-07 16:02:47 -07:00
|
|
|
@commands.command()
|
|
|
|
|
async def antonym(self, ctx, *, word: str):
|
|
|
|
|
"""Displays antonyms for a given word."""
|
|
|
|
|
search_term = word.split(" ", 1)[0]
|
|
|
|
|
result = await self._antonym_or_synonym(ctx, "antonyms", search_term)
|
|
|
|
|
if not result:
|
|
|
|
|
await ctx.send("This word is not in the dictionary or nothing was found.")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
result_text = "*, *".join(result)
|
|
|
|
|
msg = f"Antonyms for **{search_term}**: *{result_text}*"
|
|
|
|
|
for page in pagify(msg, delims=["\n"]):
|
|
|
|
|
await ctx.send(page)
|
2019-10-26 11:46:37 -07:00
|
|
|
|
|
|
|
|
@commands.command()
|
|
|
|
|
async def synonym(self, ctx, *, word: str):
|
|
|
|
|
"""Displays synonyms for a given word."""
|
|
|
|
|
search_term = word.split(" ", 1)[0]
|
2021-04-07 16:02:47 -07:00
|
|
|
result = await self._antonym_or_synonym(ctx, "synonyms", search_term)
|
2019-10-26 11:46:37 -07:00
|
|
|
if not result:
|
2021-04-07 16:02:47 -07:00
|
|
|
await ctx.send("This word is not in the dictionary or nothing was found.")
|
2020-09-09 10:17:01 -07:00
|
|
|
return
|
2019-10-26 11:46:37 -07:00
|
|
|
|
|
|
|
|
result_text = "*, *".join(result)
|
2020-09-09 10:17:01 -07:00
|
|
|
msg = f"Synonyms for **{search_term}**: *{result_text}*"
|
|
|
|
|
for page in pagify(msg, delims=["\n"]):
|
|
|
|
|
await ctx.send(page)
|
2021-04-07 16:02:47 -07:00
|
|
|
|
|
|
|
|
async def _antonym_or_synonym(self, ctx, lookup_type, word):
|
|
|
|
|
if lookup_type not in ["antonyms", "synonyms"]:
|
|
|
|
|
return None
|
|
|
|
|
data = await self._get_soup_object(f"http://www.thesaurus.com/browse/{word}")
|
|
|
|
|
if not data:
|
|
|
|
|
await ctx.send("Error getting information from the website.")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
website_data = None
|
|
|
|
|
script = data.find_all("script")
|
|
|
|
|
for item in script:
|
|
|
|
|
if item.string:
|
|
|
|
|
if "window.INITIAL_STATE" in item.string:
|
|
|
|
|
content = item.string
|
|
|
|
|
content = content.lstrip("window.INITIAL_STATE =").rstrip(";")
|
2021-10-06 12:53:21 -07:00
|
|
|
content = content.replace("undefined", '"None"').replace(": true", ': "True"').replace(": false", ': "False"')
|
2021-04-07 16:02:47 -07:00
|
|
|
try:
|
|
|
|
|
website_data = json.loads(content)
|
|
|
|
|
except json.decoder.JSONDecodeError:
|
|
|
|
|
return None
|
|
|
|
|
except Exception as e:
|
|
|
|
|
log.exception(e, exc_info=e)
|
|
|
|
|
await ctx.send("Something broke. Check your console for more information.")
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
final = []
|
|
|
|
|
if website_data:
|
2021-10-06 10:49:18 -07:00
|
|
|
tuna_api_data = website_data["searchData"]["tunaApiData"]
|
|
|
|
|
if not tuna_api_data:
|
|
|
|
|
return None
|
|
|
|
|
syn_list = tuna_api_data["posTabs"][0][lookup_type]
|
2021-04-07 16:02:47 -07:00
|
|
|
for syn in syn_list:
|
|
|
|
|
final.append(syn["term"])
|
|
|
|
|
|
|
|
|
|
return final
|
|
|
|
|
|
|
|
|
|
async def _get_soup_object(self, url):
|
|
|
|
|
try:
|
|
|
|
|
async with self.session.request("GET", url) as response:
|
|
|
|
|
return BeautifulSoup(await response.text(), "html.parser")
|
|
|
|
|
except Exception:
|
|
|
|
|
log.error("Error fetching dictionary.py related webpage", exc_info=True)
|
|
|
|
|
return None
|