aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/docs.yaml2
-rw-r--r--CHANGELOG.md23
-rw-r--r--botcore/__init__.py9
-rw-r--r--botcore/exts/__init__.py4
-rw-r--r--botcore/utils/__init__.py15
-rw-r--r--botcore/utils/caching.py65
-rw-r--r--botcore/utils/channel.py54
-rw-r--r--botcore/utils/extensions.py52
-rw-r--r--botcore/utils/logging.py45
-rw-r--r--botcore/utils/members.py56
-rw-r--r--botcore/utils/regex.py (renamed from botcore/regex.py)0
-rw-r--r--botcore/utils/scheduling.py248
-rw-r--r--docs/_static/changelog.css11
-rw-r--r--docs/_static/changelog.js37
-rw-r--r--docs/changelog.rst14
-rw-r--r--docs/conf.py125
-rw-r--r--docs/index.rst6
-rw-r--r--docs/utils.py117
-rw-r--r--poetry.lock30
-rw-r--r--pyproject.toml3
-rw-r--r--tox.ini2
21 files changed, 706 insertions, 212 deletions
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
index 8018d63c..a01ea58f 100644
--- a/.github/workflows/docs.yaml
+++ b/.github/workflows/docs.yaml
@@ -31,7 +31,7 @@ jobs:
- run: pip install six
- name: Generate AutoDoc References
- run: sphinx-apidoc -o docs/output botcore -fe
+ run: sphinx-apidoc -o docs/output botcore -feM
- name: Generate HTML Site
run: sphinx-build -nW -j auto -b html docs docs/build
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 00000000..52a4acba
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,23 @@
+# Changelog
+
+## 2.0.0 22nd February 2022
+- Breaking: Moved regex to botcore.utils namespace
+- Feature: Port many common utilities from our bots
+ - caching
+ - channel
+ - extensions
+ - loggers
+ - members
+ - scheduling
+- Support: Added intersphinx to docs.
+
+## 1.2.0 9th January 2022
+- Feature: Code block detection regex
+
+## 1.1.0 2nd December 2021
+- Support: Autogenerated docs.
+- Feature: Regex utility.
+
+
+## 1.0.0 17th November 2021
+- Support: Core package, poetry, and linting CI.
diff --git a/botcore/__init__.py b/botcore/__init__.py
index c582d0df..d910f393 100644
--- a/botcore/__init__.py
+++ b/botcore/__init__.py
@@ -1,9 +1,10 @@
-from botcore import (
- regex,
-)
+"""Useful utilities and tools for discord bot development."""
+
+from botcore import exts, utils
__all__ = [
- regex,
+ exts,
+ utils,
]
__all__ = list(map(lambda module: module.__name__, __all__))
diff --git a/botcore/exts/__init__.py b/botcore/exts/__init__.py
new file mode 100644
index 00000000..029178a9
--- /dev/null
+++ b/botcore/exts/__init__.py
@@ -0,0 +1,4 @@
+"""Reusable discord cogs."""
+__all__ = []
+
+__all__ = list(map(lambda module: module.__name__, __all__))
diff --git a/botcore/utils/__init__.py b/botcore/utils/__init__.py
new file mode 100644
index 00000000..71354334
--- /dev/null
+++ b/botcore/utils/__init__.py
@@ -0,0 +1,15 @@
+"""Useful utilities and tools for discord bot development."""
+
+from botcore.utils import (caching, channel, extensions, logging, members, regex, scheduling)
+
+__all__ = [
+ caching,
+ channel,
+ extensions,
+ logging,
+ members,
+ regex,
+ scheduling,
+]
+
+__all__ = list(map(lambda module: module.__name__, __all__))
diff --git a/botcore/utils/caching.py b/botcore/utils/caching.py
new file mode 100644
index 00000000..ac34bb9b
--- /dev/null
+++ b/botcore/utils/caching.py
@@ -0,0 +1,65 @@
+"""Utilities related to custom caches."""
+
+import functools
+import typing
+from collections import OrderedDict
+
+
+class AsyncCache:
+ """
+ LRU cache implementation for coroutines.
+
+ Once the cache exceeds the maximum size, keys are deleted in FIFO order.
+
+ An offset may be optionally provided to be applied to the coroutine's arguments when creating the cache key.
+ """
+
+ def __init__(self, max_size: int = 128):
+ """
+ Initialise a new :obj:`AsyncCache` instance.
+
+ Args:
+ max_size: How many items to store in the cache.
+ """
+ self._cache = OrderedDict()
+ self._max_size = max_size
+
+ def __call__(self, arg_offset: int = 0) -> typing.Callable:
+ """
+ Decorator for async cache.
+
+ Args:
+ arg_offset: The offset for the position of the key argument.
+
+ Returns:
+ A decorator to wrap the target function.
+ """
+
+ def decorator(function: typing.Callable) -> typing.Callable:
+ """
+ Define the async cache decorator.
+
+ Args:
+ function: The function to wrap.
+
+ Returns:
+ The wrapped function.
+ """
+
+ @functools.wraps(function)
+ async def wrapper(*args) -> typing.Any:
+ """Decorator wrapper for the caching logic."""
+ key = args[arg_offset:]
+
+ if key not in self._cache:
+ if len(self._cache) > self._max_size:
+ self._cache.popitem(last=False)
+
+ self._cache[key] = await function(*args)
+ return self._cache[key]
+ return wrapper
+ return decorator
+
+ def clear(self) -> None:
+ """Clear cache instance."""
+ self._cache.clear()
diff --git a/botcore/utils/channel.py b/botcore/utils/channel.py
new file mode 100644
index 00000000..17e70a2a
--- /dev/null
+++ b/botcore/utils/channel.py
@@ -0,0 +1,54 @@
+"""Useful helper functions for interacting with various discord.py channel objects."""
+
+import discord
+from discord.ext.commands import Bot
+
+from botcore.utils import logging
+
+log = logging.get_logger(__name__)
+
+
+def is_in_category(channel: discord.TextChannel, category_id: int) -> bool:
+ """
+ Return whether the given ``channel`` in the the category with the id ``category_id``.
+
+ Args:
+ channel: The channel to check.
+ category_id: The category to check for.
+
+ Returns:
+ A bool depending on whether the channel is in the category.
+ """
+ return getattr(channel, "category_id", None) == category_id
+
+
+async def get_or_fetch_channel(bot: Bot, channel_id: int) -> discord.abc.GuildChannel:
+ """
+ Attempt to get or fetch the given ``channel_id`` from the bots cache, and return it.
+
+ Args:
+ bot: The :obj:`discord.ext.commands.Bot` instance to use for getting/fetching.
+ channel_id: The channel to get/fetch.
+
+ Raises:
+ :exc:`discord.InvalidData`
+ An unknown channel type was received from Discord.
+ :exc:`discord.HTTPException`
+ Retrieving the channel failed.
+ :exc:`discord.NotFound`
+ Invalid Channel ID.
+ :exc:`discord.Forbidden`
+ You do not have permission to fetch this channel.
+
+ Returns:
+ The channel from the ID.
+ """
+ log.trace(f"Getting the channel {channel_id}.")
+
+ channel = bot.get_channel(channel_id)
+ if not channel:
+ log.debug(f"Channel {channel_id} is not in cache; fetching from API.")
+ channel = await bot.fetch_channel(channel_id)
+
+ log.trace(f"Channel #{channel} ({channel_id}) retrieved.")
+ return channel
diff --git a/botcore/utils/extensions.py b/botcore/utils/extensions.py
new file mode 100644
index 00000000..3f8d6e6d
--- /dev/null
+++ b/botcore/utils/extensions.py
@@ -0,0 +1,52 @@
+"""Utilities for loading discord extensions."""
+
+import importlib
+import inspect
+import pkgutil
+import types
+from typing import NoReturn
+
+
+def unqualify(name: str) -> str:
+ """
+ Return an unqualified name given a qualified module/package ``name``.
+
+ Args:
+ name: The module name to unqualify.
+
+ Returns:
+ The unqualified module name.
+ """
+ return name.rsplit(".", maxsplit=1)[-1]
+
+
+def walk_extensions(module: types.ModuleType) -> frozenset[str]:
+ """
+ Yield extension names from the given module.
+
+ Args:
+ module (types.ModuleType): The module to look for extensions in.
+
+ Returns:
+ A set of strings that can be passed directly to :obj:`discord.ext.commands.Bot.load_extension`.
+ """
+
+ def on_error(name: str) -> NoReturn:
+ raise ImportError(name=name) # pragma: no cover
+
+ modules = set()
+
+ for module_info in pkgutil.walk_packages(module.__path__, f"{module.__name__}.", onerror=on_error):
+ if unqualify(module_info.name).startswith("_"):
+ # Ignore module/package names starting with an underscore.
+ continue
+
+ if module_info.ispkg:
+ imported = importlib.import_module(module_info.name)
+ if not inspect.isfunction(getattr(imported, "setup", None)):
+ # If it lacks a setup function, it's not an extension.
+ continue
+
+ modules.add(module_info.name)
+
+ return frozenset(modules)
diff --git a/botcore/utils/logging.py b/botcore/utils/logging.py
new file mode 100644
index 00000000..71ce4e66
--- /dev/null
+++ b/botcore/utils/logging.py
@@ -0,0 +1,45 @@
+"""Common logging related functions."""
+
+import logging
+import typing
+
+if typing.TYPE_CHECKING:
+ LoggerClass = logging.Logger
+else:
+ LoggerClass = logging.getLoggerClass()
+
+TRACE_LEVEL = 5
+
+
+class CustomLogger(LoggerClass):
+ """Custom implementation of the :obj:`logging.Logger` class with an added :obj:`trace` method."""
+
+ def trace(self, msg: str, *args, **kwargs) -> None:
+ """
+ Log the given message with the severity ``"TRACE"``.
+
+ To pass exception information, use the keyword argument exc_info with a true value:
+
+ .. code-block:: py
+
+ logger.trace("Houston, we have an %s", "interesting problem", exc_info=1)
+
+ Args:
+ msg: The message to be logged.
+ args, kwargs: Passed to the base log function as is.
+ """
+ if self.isEnabledFor(TRACE_LEVEL):
+ self.log(TRACE_LEVEL, msg, *args, **kwargs)
+
+
+def get_logger(name: typing.Optional[str] = None) -> CustomLogger:
+ """
+ Utility to make mypy recognise that logger is of type :obj:`CustomLogger`.
+
+ Args:
+ name: The name given to the logger.
+
+ Returns:
+ An instance of the :obj:`CustomLogger` class.
+ """
+ return typing.cast(CustomLogger, logging.getLogger(name))
diff --git a/botcore/utils/members.py b/botcore/utils/members.py
new file mode 100644
index 00000000..e89b4618
--- /dev/null
+++ b/botcore/utils/members.py
@@ -0,0 +1,56 @@
+"""Useful helper functions for interactin with :obj:`discord.Member` objects."""
+
+import typing
+
+import discord
+
+from botcore.utils import logging
+
+log = logging.get_logger(__name__)
+
+
+async def get_or_fetch_member(guild: discord.Guild, member_id: int) -> typing.Optional[discord.Member]:
+ """
+ Attempt to get a member from cache; on failure fetch from the API.
+
+ Returns:
+ The :obj:`discord.Member` or :obj:`None` to indicate the member could not be found.
+ """
+ if member := guild.get_member(member_id):
+ log.trace(f"{member} retrieved from cache.")
+ else:
+ try:
+ member = await guild.fetch_member(member_id)
+ except discord.errors.NotFound:
+ log.trace(f"Failed to fetch {member_id} from API.")
+ return None
+ log.trace(f"{member} fetched from API.")
+ return member
+
+
+async def handle_role_change(
+ member: discord.Member,
+ coro: typing.Callable[..., typing.Coroutine],
+ role: discord.Role
+) -> None:
+ """
+ Await the given ``coro`` with ``member`` as the sole argument.
+
+ Handle errors that we expect to be raised from
+ :obj:`discord.Member.add_roles` and :obj:`discord.Member.remove_roles`.
+
+ Args:
+ member: The member to pass to ``coro``.
+ coro: This is intended to be :obj:`discord.Member.add_roles` or :obj:`discord.Member.remove_roles`.
+ """
+ try:
+ await coro(role)
+ except discord.NotFound:
+ log.error(f"Failed to change role for {member} ({member.id}): member not found")
+ except discord.Forbidden:
+ log.error(
+ f"Forbidden to change role for {member} ({member.id}); "
+ f"possibly due to role hierarchy"
+ )
+ except discord.HTTPException as e:
+ log.error(f"Failed to change role for {member} ({member.id}): {e.status} {e.code}")
diff --git a/botcore/regex.py b/botcore/utils/regex.py
index 036a5113..036a5113 100644
--- a/botcore/regex.py
+++ b/botcore/utils/regex.py
diff --git a/botcore/utils/scheduling.py b/botcore/utils/scheduling.py
new file mode 100644
index 00000000..164f6b10
--- /dev/null
+++ b/botcore/utils/scheduling.py
@@ -0,0 +1,248 @@
+"""Generic python scheduler."""
+
+import asyncio
+import contextlib
+import inspect
+import typing
+from datetime import datetime
+from functools import partial
+
+from botcore.utils import logging
+
+
+class Scheduler:
+ """
+ Schedule the execution of coroutines and keep track of them.
+
+ When instantiating a :obj:`Scheduler`, a name must be provided. This name is used to distinguish the
+ instance's log messages from other instances. Using the name of the class or module containing
+ the instance is suggested.
+
+ Coroutines can be scheduled immediately with :obj:`schedule` or in the future with :obj:`schedule_at`
+ or :obj:`schedule_later`. A unique ID is required to be given in order to keep track of the
+ resulting Tasks. Any scheduled task can be cancelled prematurely using :obj:`cancel` by providing
+ the same ID used to schedule it.
+
+ The ``in`` operator is supported for checking if a task with a given ID is currently scheduled.
+
+ Any exception raised in a scheduled task is logged when the task is done.
+ """
+
+ def __init__(self, name: str):
+ """
+ Initialize a new :obj:`Scheduler` instance.
+
+ Args:
+ name: The name of the :obj:`Scheduler`. Used in logging, and namespacing.
+ """
+ self.name = name
+
+ self._log = logging.get_logger(f"{__name__}.{name}")
+ self._scheduled_tasks: typing.Dict[typing.Hashable, asyncio.Task] = {}
+
+ def __contains__(self, task_id: typing.Hashable) -> bool:
+ """
+ Return :obj:`True` if a task with the given ``task_id`` is currently scheduled.
+
+ Args:
+ task_id: The task to look for.
+
+ Returns:
+ :obj:`True` if the task was found.
+ """
+ return task_id in self._scheduled_tasks
+
+ def schedule(self, task_id: typing.Hashable, coroutine: typing.Coroutine) -> None:
+ """
+ Schedule the execution of a ``coroutine``.
+
+ If a task with ``task_id`` already exists, close ``coroutine`` instead of scheduling it. This
+ prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere.
+
+ Args:
+ task_id: A unique ID to create the task with.
+ coroutine: The function to be called.
+ """
+ self._log.trace(f"Scheduling task #{task_id}...")
+
+ msg = f"Cannot schedule an already started coroutine for #{task_id}"
+ assert inspect.getcoroutinestate(coroutine) == "CORO_CREATED", msg
+
+ if task_id in self._scheduled_tasks:
+ self._log.debug(f"Did not schedule task #{task_id}; task was already scheduled.")
+ coroutine.close()
+ return
+
+ task = asyncio.create_task(coroutine, name=f"{self.name}_{task_id}")
+ task.add_done_callback(partial(self._task_done_callback, task_id))
+
+ self._scheduled_tasks[task_id] = task
+ self._log.debug(f"Scheduled task #{task_id} {id(task)}.")
+
+ def schedule_at(self, time: datetime, task_id: typing.Hashable, coroutine: typing.Coroutine) -> None:
+ """
+ Schedule ``coroutine`` to be executed at the given ``time``.
+
+ If ``time`` is timezone aware, then use that timezone to calculate now() when subtracting.
+ If ``time`` is naïve, then use UTC.
+
+ If ``time`` is in the past, schedule ``coroutine`` immediately.
+
+ If a task with ``task_id`` already exists, close ``coroutine`` instead of scheduling it. This
+ prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere.
+
+ Args:
+ time: The time to start the task.
+ task_id: A unique ID to create the task with.
+ coroutine: The function to be called.
+ """
+ now_datetime = datetime.now(time.tzinfo) if time.tzinfo else datetime.utcnow()
+ delay = (time - now_datetime).total_seconds()
+ if delay > 0:
+ coroutine = self._await_later(delay, task_id, coroutine)
+
+ self.schedule(task_id, coroutine)
+
+ def schedule_later(
+ self,
+ delay: typing.Union[int, float],
+ task_id: typing.Hashable,
+ coroutine: typing.Coroutine
+ ) -> None:
+ """
+ Schedule ``coroutine`` to be executed after ``delay`` seconds.
+
+ If a task with ``task_id`` already exists, close ``coroutine`` instead of scheduling it. This
+ prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere.
+
+ Args:
+ delay: How long to wait before starting the task.
+ task_id: A unique ID to create the task with.
+ coroutine: The function to be called.
+ """
+ self.schedule(task_id, self._await_later(delay, task_id, coroutine))
+
+ def cancel(self, task_id: typing.Hashable) -> None:
+ """
+ Unschedule the task identified by ``task_id``. Log a warning if the task doesn't exist.
+
+ Args:
+ task_id: The task's unique ID.
+ """
+ self._log.trace(f"Cancelling task #{task_id}...")
+
+ try:
+ task = self._scheduled_tasks.pop(task_id)
+ except KeyError:
+ self._log.warning(f"Failed to unschedule {task_id} (no task found).")
+ else:
+ task.cancel()
+
+ self._log.debug(f"Unscheduled task #{task_id} {id(task)}.")
+
+ def cancel_all(self) -> None:
+ """Unschedule all known tasks."""
+ self._log.debug("Unscheduling all tasks")
+
+ for task_id in self._scheduled_tasks.copy():
+ self.cancel(task_id)
+
+ async def _await_later(
+ self,
+ delay: typing.Union[int, float],
+ task_id: typing.Hashable,
+ coroutine: typing.Coroutine
+ ) -> None:
+ """Await ``coroutine`` after ``delay`` seconds."""
+ try:
+ self._log.trace(f"Waiting {delay} seconds before awaiting coroutine for #{task_id}.")
+ await asyncio.sleep(delay)
+
+ # Use asyncio.shield to prevent the coroutine from cancelling itself.
+ self._log.trace(f"Done waiting for #{task_id}; now awaiting the coroutine.")
+ await asyncio.shield(coroutine)
+ finally:
+ # Close it to prevent unawaited coroutine warnings,
+ # which would happen if the task was cancelled during the sleep.
+ # Only close it if it's not been awaited yet. This check is important because the
+ # coroutine may cancel this task, which would also trigger the finally block.
+ state = inspect.getcoroutinestate(coroutine)
+ if state == "CORO_CREATED":
+ self._log.debug(f"Explicitly closing the coroutine for #{task_id}.")
+ coroutine.close()
+ else:
+ self._log.debug(f"Finally block reached for #{task_id}; {state=}")
+
+ def _task_done_callback(self, task_id: typing.Hashable, done_task: asyncio.Task) -> None:
+ """
+ Delete the task and raise its exception if one exists.
+
+ If ``done_task`` and the task associated with ``task_id`` are different, then the latter
+ will not be deleted. In this case, a new task was likely rescheduled with the same ID.
+ """
+ self._log.trace(f"Performing done callback for task #{task_id} {id(done_task)}.")
+
+ scheduled_task = self._scheduled_tasks.get(task_id)
+
+ if scheduled_task and done_task is scheduled_task:
+ # A task for the ID exists and is the same as the done task.
+ # Since this is the done callback, the task is already done so no need to cancel it.
+ self._log.trace(f"Deleting task #{task_id} {id(done_task)}.")
+ del self._scheduled_tasks[task_id]
+ elif scheduled_task:
+ # A new task was likely rescheduled with the same ID.
+ self._log.debug(
+ f"The scheduled task #{task_id} {id(scheduled_task)} "
+ f"and the done task {id(done_task)} differ."
+ )
+ elif not done_task.cancelled():
+ self._log.warning(
+ f"Task #{task_id} not found while handling task {id(done_task)}! "
+ f"A task somehow got unscheduled improperly (i.e. deleted but not cancelled)."
+ )
+
+ with contextlib.suppress(asyncio.CancelledError):
+ exception = done_task.exception()
+ # Log the exception if one exists.
+ if exception:
+ self._log.error(f"Error in task #{task_id} {id(done_task)}!", exc_info=exception)
+
+
+def create_task(
+ coro: typing.Awaitable,
+ *,
+ suppressed_exceptions: tuple[typing.Type[Exception]] = (),
+ event_loop: typing.Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs,
+) -> asyncio.Task:
+ """
+ Wrapper for creating an :obj:`asyncio.Task` which logs exceptions raised in the task.
+
+ If the ``event_loop`` kwarg is provided, the task is created from that event loop,
+ otherwise the running loop is used.
+
+ Args:
+ coro: The function to call.
+ suppressed_exceptions: Exceptions to be handled by the task.
+ event_loop (:obj:`asyncio.AbstractEventLoop`): The loop to create the task from.
+ kwargs: Passed to :py:func:`asyncio.create_task`.
+
+ Returns:
+ asyncio.Task: The wrapped task.
+ """
+ if event_loop is not None:
+ task = event_loop.create_task(coro, **kwargs)
+ else:
+ task = asyncio.create_task(coro, **kwargs)
+ task.add_done_callback(partial(_log_task_exception, suppressed_exceptions=suppressed_exceptions))
+ return task
+
+
+def _log_task_exception(task: asyncio.Task, *, suppressed_exceptions: typing.Tuple[typing.Type[Exception]]) -> None:
+ """Retrieve and log the exception raised in ``task`` if one exists."""
+ with contextlib.suppress(asyncio.CancelledError):
+ exception = task.exception()
+ # Log the exception if one exists.
+ if exception and not isinstance(exception, suppressed_exceptions):
+ log = logging.get_logger(__name__)
+ log.error(f"Error in task {task.get_name()} {id(task)}!", exc_info=exception)
diff --git a/docs/_static/changelog.css b/docs/_static/changelog.css
deleted file mode 100644
index 343792a1..00000000
--- a/docs/_static/changelog.css
+++ /dev/null
@@ -1,11 +0,0 @@
-[data-theme='dark'] #changelog .dark,
-[data-theme='light'] #changelog .light,
-[data-theme='auto'] #changelog .light {
- display: inline;
-}
-
-[data-theme='dark'] #changelog .light,
-[data-theme='light'] #changelog .dark,
-[data-theme='auto'] #changelog .dark {
- display: none;
-}
diff --git a/docs/_static/changelog.js b/docs/_static/changelog.js
deleted file mode 100644
index f72d025c..00000000
--- a/docs/_static/changelog.js
+++ /dev/null
@@ -1,37 +0,0 @@
-/** Update the changelog colors in dark mode */
-
-const changelog = document.getElementById("changelog");
-
-function updateEntryColor(entry) {
- const line = entry.lastChild;
- const lightColorSpan = line.childNodes.item(1);
- const darkColorSpan = lightColorSpan.cloneNode(true);
-
- line.insertBefore(darkColorSpan, lightColorSpan);
-
- lightColorSpan.classList.add("light");
- darkColorSpan.classList.add("dark");
-
- let color;
- switch (darkColorSpan.textContent) {
- case "Feature":
- color = "#5BF38E";
- break;
- case "Support":
- color = "#55A5E7";
- break;
- case "Bug":
- color = "#E14F4F";
- break;
- default:
- color = null;
- }
-
- darkColorSpan.style["color"] = color;
-}
-
-if (changelog !== null) {
- for (let collection of changelog.getElementsByClassName("simple")) {
- Array.from(collection.children).forEach(updateEntryColor);
- }
-}
diff --git a/docs/changelog.rst b/docs/changelog.rst
deleted file mode 100644
index 743fcc20..00000000
--- a/docs/changelog.rst
+++ /dev/null
@@ -1,14 +0,0 @@
-.. See docs for details on formatting your entries
- https://releases.readthedocs.io/en/latest/concepts.html
-
-
-Changelog
-=========
-
-- :release:`1.2.0 <9th January 2022>`
-- :feature:`12` Code block detection regex
-- :release:`1.1.0 <2nd December 2021>`
-- :support:`2` Autogenerated docs.
-- :feature:`2` Regex utility.
-- :release:`1.0.0 <17th November 2021>`
-- :support:`1` Core package, poetry, and linting CI.
diff --git a/docs/conf.py b/docs/conf.py
index 4ab831d3..476a4d36 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,17 +1,20 @@
# Configuration file for the Sphinx documentation builder.
# https://www.sphinx-doc.org/en/master/usage/configuration.html
-import ast
-import importlib
-import inspect
+import functools
+import os.path
import sys
-import typing
from pathlib import Path
import git
import tomli
from sphinx.application import Sphinx
+# Handle the path not being set correctly in actions.
+sys.path.insert(0, os.path.abspath('..'))
+
+from docs import utils # noqa: E402
+
# -- Project information -----------------------------------------------------
project = "Bot Core"
@@ -38,11 +41,11 @@ add_module_names = False
# ones.
extensions = [
"sphinx.ext.extlinks",
+ "sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.todo",
"sphinx.ext.napoleon",
"sphinx_autodoc_typehints",
- "releases",
"sphinx.ext.linkcode",
"sphinx.ext.githubpages",
]
@@ -80,52 +83,10 @@ html_logo = "https://raw.githubusercontent.com/python-discord/branding/main/logo
html_favicon = html_logo
html_css_files = [
- "changelog.css",
"logo.css",
]
-html_js_files = [
- "changelog.js",
-]
-
-
-# -- Autodoc cleanup ---------------------------------------------------------
-# Clean up the output generated by autodoc to produce a nicer documentation tree
-# This is kept in a function to avoid polluting the namespace
-def __cleanup() -> None:
- for file in (PROJECT_ROOT / "docs" / "output").iterdir():
- if file.name == "modules.rst":
- # We only have one module, so this is redundant
- # Remove it and flatten out the tree
- file.unlink()
-
- elif file.name == "botcore.rst":
- # We want to bring the submodule name to the top, and remove anything that's not a submodule
- result = ""
- for line in file.read_text(encoding="utf-8").splitlines(keepends=True):
- if ".." not in line and result == "":
- # We have not reached the first submodule, this is all filler
- continue
- elif "Module contents" in line:
- # We have parsed all the submodules, so let's skip the redudant module name
- break
- result += line
-
- result = "Botcore\n=======\n\n" + result
- file.write_text(result, encoding="utf-8")
-
- else:
- # Clean up the submodule name so it's just the name without the top level module name
- # example: `botcore.regex module` -> `regex`
- lines = file.read_text(encoding="utf-8").splitlines()
- lines[0] = lines[0].replace("botcore.", "").replace("module", "").strip()
-
- # Take the opportunity to configure autodoc
- lines = "\n".join(lines).replace("undoc-members", "special-members")
- file.write_text(lines, encoding="utf-8")
-
-
-__cleanup()
+utils.cleanup()
def skip(*args) -> bool:
@@ -159,70 +120,18 @@ napoleon_numpy_docstring = False
napoleon_attr_annotations = True
-# -- Options for releases extension ------------------------------------------
-releases_github_path = REPO_LINK.removeprefix("https://github.com/")
-
-
# -- Options for extlinks extension ------------------------------------------
extlinks = {
"repo-file": (f"{REPO_LINK}/blob/main/%s", "repo-file %s")
}
+# -- Options for intersphinx extension ---------------------------------------
+intersphinx_mapping = {
+ "python": ("https://docs.python.org/3", None),
+ "discord": ("https://discordpy.readthedocs.io/en/master/", None),
+}
+
+
# -- Options for the linkcode extension --------------------------------------
-def linkcode_resolve(domain: str, info: dict[str, str]) -> typing.Optional[str]:
- """
- Function called by linkcode to get the URL for a given resource.
-
- See for more details:
- https://www.sphinx-doc.org/en/master/usage/extensions/linkcode.html#confval-linkcode_resolve
- """
- if domain != "py":
- raise Exception("Unknown domain passed to linkcode function.")
-
- symbol_name = info["fullname"]
-
- module = importlib.import_module(info["module"])
-
- symbol = [module]
- for name in symbol_name.split("."):
- symbol.append(getattr(symbol[-1], name))
- symbol_name = name
-
- try:
- lines, start = inspect.getsourcelines(symbol[-1])
- end = start + len(lines)
- except TypeError:
- # Find variables by parsing the ast
- source = ast.parse(inspect.getsource(symbol[-2]))
- while isinstance(source.body[0], ast.ClassDef):
- source = source.body[0]
-
- for ast_obj in source.body:
- if isinstance(ast_obj, ast.Assign):
- names = []
- for target in ast_obj.targets:
- if isinstance(target, ast.Tuple):
- names.extend([name.id for name in target.elts])
- else:
- names.append(target.id)
-
- if symbol_name in names:
- start, end = ast_obj.lineno, ast_obj.end_lineno
- break
- else:
- raise Exception(f"Could not find symbol `{symbol_name}` in {module.__name__}.")
-
- _, offset = inspect.getsourcelines(symbol[-2])
- if offset != 0:
- offset -= 1
- start += offset
- end += offset
-
- file = Path(inspect.getfile(module)).relative_to(PROJECT_ROOT).as_posix()
-
- url = f"{SOURCE_FILE_LINK}/{file}#L{start}"
- if end != start:
- url += f"-L{end}"
-
- return url
+linkcode_resolve = functools.partial(utils.linkcode_resolve, SOURCE_FILE_LINK)
diff --git a/docs/index.rst b/docs/index.rst
index e7c25ef1..81975f35 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -13,11 +13,6 @@ Reference
output/botcore
-.. toctree::
- :caption: Other:
-
- changelog
-
Extras
==================
@@ -25,3 +20,4 @@ Extras
* :ref:`genindex`
* :ref:`search`
* :repo-file:`Information <docs/README.md>`
+* :repo-file:`Changelog <CHANGELOG.md>`
diff --git a/docs/utils.py b/docs/utils.py
new file mode 100644
index 00000000..76b3e098
--- /dev/null
+++ b/docs/utils.py
@@ -0,0 +1,117 @@
+"""Utilities used in generating docs."""
+
+import ast
+import importlib
+import inspect
+import typing
+from pathlib import Path
+
+PROJECT_ROOT = Path(__file__).parent.parent
+
+
+def linkcode_resolve(source_url: str, domain: str, info: dict[str, str]) -> typing.Optional[str]:
+ """
+ Function called by linkcode to get the URL for a given resource.
+
+ See for more details:
+ https://www.sphinx-doc.org/en/master/usage/extensions/linkcode.html#confval-linkcode_resolve
+ """
+ if domain != "py":
+ raise Exception("Unknown domain passed to linkcode function.")
+
+ symbol_name = info["fullname"]
+
+ module = importlib.import_module(info["module"])
+
+ symbol = [module]
+ for name in symbol_name.split("."):
+ symbol.append(getattr(symbol[-1], name))
+ symbol_name = name
+
+ try:
+ lines, start = inspect.getsourcelines(symbol[-1])
+ end = start + len(lines)
+ except TypeError:
+ # Find variables by parsing the ast
+ source = ast.parse(inspect.getsource(symbol[-2]))
+ while isinstance(source.body[0], ast.ClassDef):
+ source = source.body[0]
+
+ for ast_obj in source.body:
+ if isinstance(ast_obj, ast.Assign):
+ names = []
+ for target in ast_obj.targets:
+ if isinstance(target, ast.Tuple):
+ names.extend([name.id for name in target.elts])
+ else:
+ names.append(target.id)
+
+ if symbol_name in names:
+ start, end = ast_obj.lineno, ast_obj.end_lineno
+ break
+ else:
+ raise Exception(f"Could not find symbol `{symbol_name}` in {module.__name__}.")
+
+ _, offset = inspect.getsourcelines(symbol[-2])
+ if offset != 0:
+ offset -= 1
+ start += offset
+ end += offset
+
+ file = Path(inspect.getfile(module)).relative_to(PROJECT_ROOT).as_posix()
+
+ url = f"{source_url}/{file}#L{start}"
+ if end != start:
+ url += f"-L{end}"
+
+ return url
+
+
+def cleanup() -> None:
+ """Remove unneeded autogenerated doc files, and clean up others."""
+ included = __get_included()
+
+ for file in (PROJECT_ROOT / "docs" / "output").iterdir():
+ if file.name in ("botcore.rst", "botcore.exts.rst", "botcore.utils.rst") and file.name in included:
+ content = file.read_text(encoding="utf-8").splitlines(keepends=True)
+
+ # Rename the extension to be less wordy
+ # Example: botcore.exts -> Botcore Exts
+ title = content[0].split()[0].strip().replace("botcore.", "").replace(".", " ").title()
+ title = f"{title}\n{'=' * len(title)}\n\n"
+ content = title, *content[3:]
+
+ file.write_text("".join(content), encoding="utf-8")
+
+ elif file.name in included:
+ # Clean up the submodule name so it's just the name without the top level module name
+ # example: `botcore.regex module` -> `regex`
+ lines = file.read_text(encoding="utf-8").splitlines(keepends=True)
+ lines[0] = lines[0].replace("module", "").strip().split(".")[-1] + "\n"
+ file.write_text("".join(lines))
+
+ else:
+ # These are files that have not been explicitly included in the docs via __all__
+ print("Deleted file", file.name)
+ file.unlink()
+ continue
+
+ # Take the opportunity to configure autodoc
+ content = file.read_text(encoding="utf-8").replace("undoc-members", "special-members")
+ file.write_text(content, encoding="utf-8")
+
+
+def __get_included() -> set[str]:
+ """Get a list of files that should be included in the final build."""
+
+ def get_all_from_module(module_name: str) -> set[str]:
+ module = importlib.import_module(module_name)
+ _modules = {module.__name__ + ".rst"}
+
+ if hasattr(module, "__all__"):
+ for sub_module in module.__all__:
+ _modules.update(get_all_from_module(sub_module))
+
+ return _modules
+
+ return get_all_from_module("botcore")
diff --git a/poetry.lock b/poetry.lock
index 9848e951..2a4b7e31 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -672,18 +672,6 @@ optional = false
python-versions = ">=3.6"
[[package]]
-name = "releases"
-version = "1.6.3"
-description = "A Sphinx extension for changelog manipulation"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-semantic-version = "<2.7"
-sphinx = ">=1.3"
-
-[[package]]
name = "requests"
version = "2.27.1"
description = "Python HTTP for Humans."
@@ -702,14 +690,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
[[package]]
-name = "semantic-version"
-version = "2.6.0"
-description = "A library implementing the 'SemVer' scheme."
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
@@ -955,7 +935,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
[metadata]
lock-version = "1.1"
python-versions = "3.9.*"
-content-hash = "fd428068f017418a45a174c2a60af575aad76dfa2707b72010e891753e89cf91"
+content-hash = "38723e13b555129a52187dfaecf5cec88572d4a0eed1f5ff7350baccc39ecf70"
[metadata.files]
aiohttp = [
@@ -1424,18 +1404,10 @@ pyyaml = [
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
-releases = [
- {file = "releases-1.6.3-py2.py3-none-any.whl", hash = "sha256:cb3435ba372a6807433800fbe473760cfa781171513f670f3c4b76983ac80f18"},
- {file = "releases-1.6.3.tar.gz", hash = "sha256:555ae4c97a671a420281c1c782e9236be25157b449fdf20b4c4b293fe93db2f1"},
-]
requests = [
{file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
{file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
]
-semantic-version = [
- {file = "semantic_version-2.6.0-py3-none-any.whl", hash = "sha256:2d06ab7372034bcb8b54f2205370f4aa0643c133b7e6dbd129c5200b83ab394b"},
- {file = "semantic_version-2.6.0.tar.gz", hash = "sha256:2a4328680073e9b243667b201119772aefc5fc63ae32398d6afafff07c4f54c0"},
-]
six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
diff --git a/pyproject.toml b/pyproject.toml
index 99549f3a..e7f98819 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -38,14 +38,13 @@ Sphinx = "4.4.0"
tomli = "2.0.0"
GitPython = "3.1.26"
sphinx-autodoc-typehints = "1.17.0"
-releases = "1.6.3"
furo = "2022.1.2"
[tool.taskipy.tasks]
lint = "pre-commit run --all-files"
precommit = "pre-commit install"
-apidoc = "sphinx-apidoc -o docs/output botcore -fe"
+apidoc = "sphinx-apidoc -o docs/output botcore -feM"
builddoc = "sphinx-build -nW -j auto -b html docs docs/build"
docs = "task apidoc && task builddoc"
diff --git a/tox.ini b/tox.ini
index 9472c32f..e0145e7a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,7 +2,7 @@
max-line-length=120
docstring-convention=all
import-order-style=pycharm
-application_import_names=bot,tests
+application_import_names=botcore,docs,tests
exclude=.cache,.venv,.git,constants.py
ignore=
B311,W503,E226,S311,T000,E731