[UrlFetch] Catch timeouts and allow base urls
This commit is contained in:
@@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import logging
|
||||
from urllib.parse import urlparse
|
||||
@@ -9,7 +10,7 @@ from redbot.core.utils.menus import menu, DEFAULT_CONTROLS
|
||||
log = logging.getLogger("red.aikaterna.urlfetch")
|
||||
|
||||
|
||||
__version__ = "1.0.1"
|
||||
__version__ = "1.1.0"
|
||||
|
||||
|
||||
class UrlFetch(commands.Cog):
|
||||
@@ -18,34 +19,41 @@ class UrlFetch(commands.Cog):
|
||||
def __init__(self, bot):
|
||||
self.bot = bot
|
||||
|
||||
self._headers = {'User-Agent': 'Python/3.8'}
|
||||
|
||||
@commands.command()
|
||||
async def urlfetch(self, ctx, url: str):
|
||||
"""
|
||||
Input a URL to read.
|
||||
"""
|
||||
valid_url = await self._valid_url(ctx, url)
|
||||
if valid_url:
|
||||
text = await self._get_url_content(url)
|
||||
if text:
|
||||
page_list = []
|
||||
for page in pagify(text, delims=["\n"], page_length=1800):
|
||||
page_list.append(box(page))
|
||||
if len(page_list) == 1:
|
||||
await ctx.send(box(page))
|
||||
else:
|
||||
await menu(ctx, page_list, DEFAULT_CONTROLS)
|
||||
else:
|
||||
return
|
||||
async with ctx.typing():
|
||||
valid_url = await self._valid_url(ctx, url)
|
||||
if valid_url:
|
||||
text = await self._get_url_content(url)
|
||||
if text:
|
||||
page_list = []
|
||||
for page in pagify(text, delims=["\n"], page_length=1800):
|
||||
page_list.append(box(page))
|
||||
if len(page_list) == 1:
|
||||
await ctx.send(box(page))
|
||||
else:
|
||||
await menu(ctx, page_list, DEFAULT_CONTROLS)
|
||||
else:
|
||||
return
|
||||
|
||||
async def _get_url_content(self, url: str):
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
timeout = aiohttp.ClientTimeout(total=20)
|
||||
async with aiohttp.ClientSession(headers=self._headers, timeout=timeout) as session:
|
||||
async with session.get(url) as resp:
|
||||
text = await resp.text()
|
||||
return text
|
||||
except aiohttp.client_exceptions.ClientConnectorError:
|
||||
log.error(f"aiohttp failure accessing site at url:\n\t{url}", exc_info=True)
|
||||
return None
|
||||
except asyncio.exceptions.TimeoutError:
|
||||
log.error(f"asyncio timeout while accessing feed at url:\n\t{url}")
|
||||
return None
|
||||
except Exception:
|
||||
log.error(f"General failure accessing site at url:\n\t{url}", exc_info=True)
|
||||
return None
|
||||
@@ -58,7 +66,7 @@ class UrlFetch(commands.Cog):
|
||||
await ctx.send("There was an issue trying to fetch that site. Please check your console for the error.")
|
||||
return None
|
||||
|
||||
if all([result.scheme, result.netloc, result.path]):
|
||||
if all([result.scheme, result.netloc]):
|
||||
text = await self._get_url_content(url)
|
||||
if not text:
|
||||
await ctx.send("No text present at the given url.")
|
||||
|
||||
Reference in New Issue
Block a user