diff options
| author | 2020-12-11 11:05:42 +0100 | |
|---|---|---|
| committer | 2020-12-15 05:02:08 +0100 | |
| commit | c42bf69a8b170772710c2184a3d0d3d57f597c30 (patch) | |
| tree | 661fb24eaff2fde8aa5d6b607a1004617a3fecc7 | |
| parent | Merge remote-tracking branch 'upstream/master' into doc-imp (diff) | |
Use global bot http_session instead of parameter
| -rw-r--r-- | bot/converters.py | 2 | ||||
| -rw-r--r-- | bot/exts/info/doc/_cog.py | 12 | ||||
| -rw-r--r-- | bot/exts/info/doc/_inventory_parser.py | 13 |
3 files changed, 13 insertions, 14 deletions
diff --git a/bot/converters.py b/bot/converters.py index d44b675a7..d558fa3df 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -190,7 +190,7 @@ class InventoryURL(Converter): async def convert(ctx: Context, url: str) -> str: """Convert url to Intersphinx inventory URL.""" await ctx.trigger_typing() - if await _inventory_parser.fetch_inventory(ctx.bot.http_session, url) is None: + if await _inventory_parser.fetch_inventory(url) is None: raise BadArgument(f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS}.") return url diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 524dcc829..e1be956cd 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -9,10 +9,10 @@ from contextlib import suppress from typing import Dict, List, NamedTuple, Optional, Union import discord -from aiohttp import ClientSession from bs4 import BeautifulSoup from discord.ext import commands +from bot import instance as bot_instance from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput from bot.converters import InventoryURL, PackageName, ValidURL @@ -85,7 +85,7 @@ class CachedParser: self._item_events: Dict[DocItem, asyncio.Event] = {} self._parse_task = None - async def get_markdown(self, client_session: ClientSession, doc_item: DocItem) -> str: + async def get_markdown(self, doc_item: DocItem) -> str: """ Get result markdown of `doc_item`. @@ -96,7 +96,7 @@ class CachedParser: return symbol if (symbols_to_queue := self._page_symbols.get(doc_item.url)) is not None: - async with client_session.get(doc_item.url) as response: + async with bot_instance.http_session.get(doc_item.url) as response: soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") self._queue.extend(QueueItem(symbol, soup) for symbol in symbols_to_queue) @@ -202,7 +202,7 @@ class DocCog(commands.Cog): Return True on success; False if fetching failed and was rescheduled. """ self.base_urls[api_package_name] = base_url - package = await fetch_inventory(self.bot.http_session, inventory_url) + package = await fetch_inventory(inventory_url) if not package: delay = 2*60 if inventory_url not in self.scheduled_inventories else 5*60 @@ -210,7 +210,7 @@ class DocCog(commands.Cog): self.inventory_scheduler.schedule_later( delay, api_package_name, - fetch_inventory(self.bot.http_session, inventory_url) + fetch_inventory(inventory_url) ) self.scheduled_inventories.add(api_package_name) return False @@ -302,7 +302,7 @@ class DocCog(commands.Cog): markdown = await doc_cache.get(symbol_info) if markdown is None: log.debug(f"Redis cache miss for symbol `{symbol}`.") - markdown = await self.item_fetcher.get_markdown(self.bot.http_session, symbol_info) + markdown = await self.item_fetcher.get_markdown(symbol_info) if markdown is not None: await doc_cache.set(symbol_info, markdown) else: diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py index 96df08786..0d9bd726a 100644 --- a/bot/exts/info/doc/_inventory_parser.py +++ b/bot/exts/info/doc/_inventory_parser.py @@ -6,6 +6,8 @@ from typing import AsyncIterator, DefaultDict, List, Optional, Tuple import aiohttp +import bot + log = logging.getLogger(__name__) FAILED_REQUEST_ATTEMPTS = 3 @@ -69,10 +71,10 @@ async def _load_v2(stream: aiohttp.StreamReader) -> DefaultDict[str, List[Tuple[ return invdata -async def _fetch_inventory(client_session: aiohttp.ClientSession, url: str) -> DefaultDict[str, List[Tuple[str, str]]]: +async def _fetch_inventory(url: str) -> DefaultDict[str, List[Tuple[str, str]]]: """Fetch, parse and return an intersphinx inventory file from an url.""" timeout = aiohttp.ClientTimeout(sock_connect=5, sock_read=5) - async with client_session.get(url, timeout=timeout, raise_for_status=True) as response: + async with bot.instance.http_session.get(url, timeout=timeout, raise_for_status=True) as response: stream = response.content inventory_header = (await stream.readline()).decode().rstrip() @@ -91,14 +93,11 @@ async def _fetch_inventory(client_session: aiohttp.ClientSession, url: str) -> D raise ValueError(f"Invalid inventory file at url {url}.") -async def fetch_inventory( - client_session: aiohttp.ClientSession, - url: str -) -> Optional[DefaultDict[str, List[Tuple[str, str]]]]: +async def fetch_inventory(url: str) -> Optional[DefaultDict[str, List[Tuple[str, str]]]]: """Get inventory from `url`, retrying `FAILED_REQUEST_ATTEMPTS` times on errors.""" for attempt in range(1, FAILED_REQUEST_ATTEMPTS+1): try: - inventory = await _fetch_inventory(client_session, url) + inventory = await _fetch_inventory(url) except aiohttp.ClientConnectorError: log.warning( f"Failed to connect to inventory url at {url}; " |