diff options
| author | 2020-11-14 22:59:50 +0100 | |
|---|---|---|
| committer | 2020-11-15 03:12:58 +0100 | |
| commit | b118f4cf38bdf99cf66e822c5b2280aff879123d (patch) | |
| tree | 9946b8b36d1356e8c3551b697b060946d12dcfa3 | |
| parent | Limit newlines in doc descriptions (diff) | |
Rework the doc redis cache to work with hashes
This rework requires us to delete packages caches easily with
deleting the package hash instead of having to pattern match all
keys and delete those.
The interface was also updated to accept DocItems instead of requiring
callers to construct the keys
| -rw-r--r-- | bot/exts/info/doc/_cog.py | 11 | ||||
| -rw-r--r-- | bot/exts/info/doc/_redis_cache.py | 57 | 
2 files changed, 56 insertions, 12 deletions
diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index ecc648d89..67a21ed72 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -4,7 +4,6 @@ import asyncio  import logging  import re  import sys -import urllib.parse  from collections import defaultdict  from contextlib import suppress  from typing import Dict, List, NamedTuple, Optional, Union @@ -175,6 +174,7 @@ class DocCog(commands.Cog):          self.scheduled_inventories = set()          self.bot.loop.create_task(self.init_refresh_inventory()) +        self.bot.loop.create_task(self.doc_cache.delete_expired())      async def init_refresh_inventory(self) -> None:          """Refresh documentation inventory on cog initialization.""" @@ -292,21 +292,18 @@ class DocCog(commands.Cog):              return None          self.bot.stats.incr(f"doc_fetches.{symbol_info.package.lower()}") -        item_url = f"{symbol_info.url}#{symbol_info.symbol_id}" -        redis_key = "".join(urllib.parse.urlparse(item_url)[1:])  # url without scheme - -        markdown = await self.doc_cache.get(redis_key) +        markdown = await self.doc_cache.get(symbol_info)          if markdown is None:              log.debug(f"Redis cache miss for symbol `{symbol}`.")              markdown = await self.item_fetcher.get_markdown(self.bot.http_session, symbol_info)              if markdown is not None: -                await self.doc_cache.set(redis_key, markdown) +                await self.doc_cache.set(symbol_info, markdown)              else:                  markdown = "Unable to parse the requested symbol."          embed = discord.Embed(              title=discord.utils.escape_markdown(symbol), -            url=item_url, +            url=f"{symbol_info.url}#{symbol_info.symbol_id}",              description=markdown          )          # Show all symbols with the same name that were renamed in the footer. diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py index 147394ba6..c617eba49 100644 --- a/bot/exts/info/doc/_redis_cache.py +++ b/bot/exts/info/doc/_redis_cache.py @@ -1,23 +1,70 @@ -from typing import Optional +from __future__ import annotations + +import datetime +import pickle +from typing import Optional, TYPE_CHECKING  from async_rediscache.types.base import RedisObject, namespace_lock +if TYPE_CHECKING: +    from ._cog import DocItem  class DocRedisCache(RedisObject):      """Interface for redis functionality needed by the Doc cog."""      @namespace_lock -    async def set(self, key: str, value: str) -> None: +    async def set(self, item: DocItem, value: str) -> None:          """          Set markdown `value` for `key`.          Keys expire after a week to keep data up to date.          """ +        expiry_timestamp = datetime.datetime.now().timestamp() + 7 * 24 * 60 * 60          with await self._get_pool_connection() as connection: -            await connection.setex(f"{self.namespace}:{key}", 7*24*60*60, value) +            await connection.hset( +                f"{self.namespace}:{item.package}", +                self.get_item_key(item), +                pickle.dumps((value, expiry_timestamp)) +            )      @namespace_lock -    async def get(self, key: str) -> Optional[str]: +    async def get(self, item: DocItem) -> Optional[str]:          """Get markdown contents for `key`."""          with await self._get_pool_connection() as connection: -            return await connection.get(f"{self.namespace}:{key}", encoding="utf8") +            cached_value = await connection.hget(f"{self.namespace}:{item.package}", self.get_item_key(item)) +            if cached_value is None: +                return None + +            value, expire = pickle.loads(cached_value) +            if expire <= datetime.datetime.now().timestamp(): +                await connection.hdel(f"{self.namespace}:{item.package}", self.get_item_key(item)) +                return None + +            return value + +    @namespace_lock +    async def delete(self, package: str) -> None: +        """Remove all values for `package`.""" +        with await self._get_pool_connection() as connection: +            await connection.delete(f"{self.namespace}:{package}") + +    @namespace_lock +    async def delete_expired(self) -> None: +        """Delete all expired keys.""" +        current_timestamp = datetime.datetime.now().timestamp() +        with await self._get_pool_connection() as connection: +            async for package_key in connection.iscan(match=f"{self.namespace}*"): +                expired_fields = [] + +                for field, cached_value in (await connection.hgetall(package_key)).items(): +                    _, expire = pickle.loads(cached_value) +                    if expire <= current_timestamp: +                        expired_fields.append(field) + +                if expired_fields: +                    await connection.hdel(package_key, *expired_fields) + +    @staticmethod +    def get_item_key(item: DocItem) -> str: +        """Create redis key for `item`.""" +        return item.relative_url_path + item.symbol_id  |