From b5af23252fe9186a6b1412cf67a935380f616555 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 17 Jun 2020 19:42:25 +0200 Subject: Resolve relative href urls in a html elements. Most docs will use relative urls to link across their pages, without resolving them ourselves the links remain unusable in discord's markdown and break out of codeblocks on mobile. --- bot/cogs/doc.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 204cffb37..51fb2cb82 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -7,6 +7,7 @@ from collections import OrderedDict from contextlib import suppress from types import SimpleNamespace from typing import Any, Callable, Optional, Tuple +from urllib.parse import urljoin import discord from bs4 import BeautifulSoup @@ -98,6 +99,10 @@ def async_cache(max_size: int = 128, arg_offset: int = 0) -> Callable: class DocMarkdownConverter(MarkdownConverter): """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" + def __init__(self, *, page_url: str, **options): + super().__init__(**options) + self.page_url = page_url + def convert_code(self, el: PageElement, text: str) -> str: """Undo `markdownify`s underscore escaping.""" return f"`{text}`".replace('\\', '') @@ -107,10 +112,15 @@ class DocMarkdownConverter(MarkdownConverter): code = ''.join(el.strings) return f"```py\n{code}```" + def convert_a(self, el: PageElement, text: str) -> str: + """Resolve relative URLs to `self.page_url`.""" + el["href"] = urljoin(self.page_url, el["href"]) + return super().convert_a(el, text) + -def markdownify(html: str) -> DocMarkdownConverter: +def markdownify(html: str, *, url: str = "") -> DocMarkdownConverter: """Create a DocMarkdownConverter object from the input html.""" - return DocMarkdownConverter(bullets='•').convert(html) + return DocMarkdownConverter(bullets='•', page_url=url).convert(html) class InventoryURL(commands.Converter): @@ -293,7 +303,7 @@ class Doc(commands.Cog): signatures = scraped_html[0] permalink = self.inventories[symbol] - description = markdownify(scraped_html[1]) + description = markdownify(scraped_html[1], url=permalink) # Truncate the description of the embed to the last occurrence # of a double newline (interpreted as a paragraph) before index 1000. -- cgit v1.2.3 From 5dfbec9d589f62bb1270b162d734749d5b7b069d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 17 Jun 2020 21:41:04 +0200 Subject: Make doc get greedy. This allows us to find docs for symbols with spaces in them. --- bot/cogs/doc.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 51fb2cb82..010cb9f4c 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -353,12 +353,12 @@ class Doc(commands.Cog): return embed @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) - async def docs_group(self, ctx: commands.Context, symbol: commands.clean_content = None) -> None: + async def docs_group(self, ctx: commands.Context, *, symbol: str) -> None: """Lookup documentation for Python symbols.""" - await ctx.invoke(self.get_command, symbol) + await ctx.invoke(self.get_command, symbol=symbol) @docs_group.command(name='get', aliases=('g',)) - async def get_command(self, ctx: commands.Context, symbol: commands.clean_content = None) -> None: + async def get_command(self, ctx: commands.Context, *, symbol: str) -> None: """ Return a documentation embed for a given symbol. @@ -370,7 +370,7 @@ class Doc(commands.Cog): !docs aiohttp.ClientSession !docs get aiohttp.ClientSession """ - if symbol is None: + if not symbol: inventory_embed = discord.Embed( title=f"All inventories (`{len(self.base_urls)}` total)", colour=discord.Colour.blue() @@ -392,8 +392,9 @@ class Doc(commands.Cog): doc_embed = await self.get_symbol_embed(symbol) if doc_embed is None: + symbol = await discord.ext.commands.clean_content().convert(ctx, symbol) error_embed = discord.Embed( - description=f"Sorry, I could not find any documentation for `{symbol}`.", + description=f"Sorry, I could not find any documentation for `{(symbol)}`.", colour=discord.Colour.red() ) error_message = await ctx.send(embed=error_embed) -- cgit v1.2.3 From 39aa2fbe0d19edcb61080e49d591a370820bce47 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 17 Jun 2020 21:48:55 +0200 Subject: Skip symbols with slashes in them. The symbols mostly point to autogenerated pages, and do not link to specific symbols on their pages and are thus unreachable with the current implementation. --- bot/cogs/doc.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 010cb9f4c..59c3cc729 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -191,6 +191,8 @@ class Doc(commands.Cog): for group, value in package.items(): for symbol, (package_name, _version, relative_doc_url, _) in value.items(): + if "/" in symbol: + continue # skip unreachable symbols with slashes absolute_doc_url = base_url + relative_doc_url if symbol in self.inventories: -- cgit v1.2.3 From 41e906d6b978f0745f0aff5e7065ce142282a44f Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 18 Jun 2020 00:20:25 +0200 Subject: Move symbol parsing into separate methods. --- bot/cogs/doc.py | 66 +++++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 43 insertions(+), 23 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 59c3cc729..a1364dd8b 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -6,7 +6,7 @@ import textwrap from collections import OrderedDict from contextlib import suppress from types import SimpleNamespace -from typing import Any, Callable, Optional, Tuple +from typing import Any, Callable, List, Optional, Tuple from urllib.parse import urljoin import discord @@ -265,30 +265,14 @@ class Doc(commands.Cog): return None if symbol_id == f"module-{symbol}": - # Get page content from the module headerlink to the - # first tag that has its class in `SEARCH_END_TAG_ATTRS` - start_tag = symbol_heading.find("a", attrs={"class": "headerlink"}) - if start_tag is None: - return [], "" - - end_tag = start_tag.find_next(self._match_end_tag) - if end_tag is None: - return [], "" - - description_start_index = search_html.find(str(start_tag.parent)) + len(str(start_tag.parent)) - description_end_index = search_html.find(str(end_tag)) - description = search_html[description_start_index:description_end_index] - signatures = None + parsed_module = self.parse_module_symbol(symbol_heading, search_html) + if parsed_module is None: + return None + else: + signatures, description = parsed_module else: - signatures = [] - description = str(symbol_heading.find_next_sibling("dd")) - description_pos = search_html.find(description) - # Get text of up to 3 signatures, remove unwanted symbols - for element in [symbol_heading] + symbol_heading.find_next_siblings("dt", limit=2): - signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) - if signature and search_html.find(str(element)) < description_pos: - signatures.append(signature) + signatures, description = self.parse_symbol(symbol_heading, search_html) return signatures, description.replace('¶', '') @@ -354,6 +338,42 @@ class Doc(commands.Cog): ) return embed + @classmethod + def parse_module_symbol(cls, heading: PageElement, html: str) -> Optional[Tuple[None, str]]: + """Get page content from the headerlink up to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.""" + start_tag = heading.find("a", attrs={"class": "headerlink"}) + if start_tag is None: + return None + + end_tag = start_tag.find_next(cls._match_end_tag) + if end_tag is None: + return None + + description_start_index = html.find(str(start_tag.parent)) + len(str(start_tag.parent)) + description_end_index = html.find(str(end_tag)) + description = html[description_start_index:description_end_index] + + return None, description + + @staticmethod + def parse_symbol(heading: PageElement, html: str) -> Tuple[List[str], str]: + """ + Parse the signatures and description of a symbol. + + Collects up to 3 signatures from dt tags and a description from their sibling dd tag. + """ + signatures = [] + description = str(heading.find_next_sibling("dd")) + description_pos = html.find(description) + + for element in [heading] + heading.find_next_siblings("dt", limit=2): + signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) + + if signature and html.find(str(element)) < description_pos: + signatures.append(signature) + + return signatures, description + @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) async def docs_group(self, ctx: commands.Context, *, symbol: str) -> None: """Lookup documentation for Python symbols.""" -- cgit v1.2.3 From b0f46ace7b2d4997d5002eb75199490f7828d829 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 18 Jun 2020 03:58:27 +0200 Subject: Make sure only class contents are included, without methods. When parsing classes, methods would sometimes get included causing bad looking markdown to be included in the description, this is solved by collecting all text *up to* the next dt tag. fixes: #990 --- bot/cogs/doc.py | 55 ++++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 42 insertions(+), 13 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index a1364dd8b..51323e64f 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -6,7 +6,7 @@ import textwrap from collections import OrderedDict from contextlib import suppress from types import SimpleNamespace -from typing import Any, Callable, List, Optional, Tuple +from typing import Any, Callable, List, Optional, Tuple, Union from urllib.parse import urljoin import discord @@ -265,7 +265,7 @@ class Doc(commands.Cog): return None if symbol_id == f"module-{symbol}": - parsed_module = self.parse_module_symbol(symbol_heading, search_html) + parsed_module = self.parse_module_symbol(symbol_heading) if parsed_module is None: return None else: @@ -339,32 +339,29 @@ class Doc(commands.Cog): return embed @classmethod - def parse_module_symbol(cls, heading: PageElement, html: str) -> Optional[Tuple[None, str]]: + def parse_module_symbol(cls, heading: PageElement) -> Optional[Tuple[None, str]]: """Get page content from the headerlink up to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.""" start_tag = heading.find("a", attrs={"class": "headerlink"}) if start_tag is None: return None - end_tag = start_tag.find_next(cls._match_end_tag) - if end_tag is None: + description = cls.find_all_text_until_tag(start_tag, cls._match_end_tag) + if description is None: return None - description_start_index = html.find(str(start_tag.parent)) + len(str(start_tag.parent)) - description_end_index = html.find(str(end_tag)) - description = html[description_start_index:description_end_index] - return None, description - @staticmethod - def parse_symbol(heading: PageElement, html: str) -> Tuple[List[str], str]: + @classmethod + def parse_symbol(cls, heading: PageElement, html: str) -> Tuple[List[str], str]: """ Parse the signatures and description of a symbol. Collects up to 3 signatures from dt tags and a description from their sibling dd tag. """ signatures = [] - description = str(heading.find_next_sibling("dd")) - description_pos = html.find(description) + description_element = heading.find_next_sibling("dd") + description_pos = html.find(str(description_element)) + description = "".join(cls.find_all_text_until_tag(description_element, ("dt",))) for element in [heading] + heading.find_next_siblings("dt", limit=2): signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) @@ -374,6 +371,38 @@ class Doc(commands.Cog): return signatures, description + @staticmethod + def find_all_text_until_tag( + start_element: PageElement, + tag_filter: Union[Tuple[str], Callable[[Tag], bool]] + ) -> Optional[str]: + """ + Get all text from

elements until a tag matching `tag_filter` is found, max 1000 elements searched. + + `tag_filter` can be either a tuple of string names to check against, + or a filtering callable that's applied to the tags. + If no matching end tag is found, None is returned. + """ + text = "" + element = start_element + for _ in range(1000): + if element is None: + break + + element = element.find_next() + if element.name == "p": + text += str(element) + + elif isinstance(tag_filter, tuple): + if element.name in tag_filter: + break + else: + if tag_filter(element): + break + else: + return None + return text + @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) async def docs_group(self, ctx: commands.Context, *, symbol: str) -> None: """Lookup documentation for Python symbols.""" -- cgit v1.2.3 From 8756c741035d007a5d3f3309b877f56b9ccd0ef1 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Jun 2020 00:59:32 +0200 Subject: Account for `NavigableString`s when gathering text. `find_next()` only goes to tags, leaving out text outside of them when parsing. --- bot/cogs/doc.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 51323e64f..d64e6692f 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -11,7 +11,7 @@ from urllib.parse import urljoin import discord from bs4 import BeautifulSoup -from bs4.element import PageElement, Tag +from bs4.element import NavigableString, PageElement, Tag from discord.errors import NotFound from discord.ext import commands from markdownify import MarkdownConverter @@ -377,7 +377,9 @@ class Doc(commands.Cog): tag_filter: Union[Tuple[str], Callable[[Tag], bool]] ) -> Optional[str]: """ - Get all text from

elements until a tag matching `tag_filter` is found, max 1000 elements searched. + Get all text from

elements and strings until a tag matching `tag_filter` is found. + + Max 1000 elements are searched to avoid going through whole pages when no matching tag is found. `tag_filter` can be either a tuple of string names to check against, or a filtering callable that's applied to the tags. @@ -389,7 +391,11 @@ class Doc(commands.Cog): if element is None: break - element = element.find_next() + element = element.next + while isinstance(element, NavigableString): + text += element + element = element.next + if element.name == "p": text += str(element) -- cgit v1.2.3 From e11c5a35f8f494f13323d53c0c514524902b2ae7 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Jun 2020 01:45:54 +0200 Subject: Also check signatures before selected symbol when collecting 3 signatures. --- bot/cogs/doc.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index d64e6692f..b0adc52ba 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -363,7 +363,11 @@ class Doc(commands.Cog): description_pos = html.find(str(description_element)) description = "".join(cls.find_all_text_until_tag(description_element, ("dt",))) - for element in [heading] + heading.find_next_siblings("dt", limit=2): + for element in ( + *reversed(heading.find_previous_siblings("dt", limit=2)), + heading, + *heading.find_next_siblings("dt", limit=2), + )[-3:]: signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) if signature and html.find(str(element)) < description_pos: -- cgit v1.2.3 From bdccd72747829560eddecc2ae247e5da3a936237 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Jun 2020 01:46:46 +0200 Subject: Remove unnecessary join. `find_all_text_until_tag` already returns a string so a join is not needed. --- bot/cogs/doc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index b0adc52ba..35139a050 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -361,7 +361,7 @@ class Doc(commands.Cog): signatures = [] description_element = heading.find_next_sibling("dd") description_pos = html.find(str(description_element)) - description = "".join(cls.find_all_text_until_tag(description_element, ("dt",))) + description = cls.find_all_text_until_tag(description_element, ("dt",)) for element in ( *reversed(heading.find_previous_siblings("dt", limit=2)), -- cgit v1.2.3 From d1900d537086b5d195da320cdc949e64afb99cd0 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Jun 2020 01:52:02 +0200 Subject: Add symbol group name to symbol inventory entries. --- bot/cogs/doc.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 35139a050..741fd0ddd 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -6,7 +6,7 @@ import textwrap from collections import OrderedDict from contextlib import suppress from types import SimpleNamespace -from typing import Any, Callable, List, Optional, Tuple, Union +from typing import Any, Callable, List, NamedTuple, Optional, Tuple, Union from urllib.parse import urljoin import discord @@ -67,6 +67,13 @@ FAILED_REQUEST_RETRY_AMOUNT = 3 NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay +class DocItem(NamedTuple): + """Holds inventory symbol information.""" + + url: str + group: str + + def async_cache(max_size: int = 128, arg_offset: int = 0) -> Callable: """ LRU cache implementation for coroutines. @@ -194,10 +201,10 @@ class Doc(commands.Cog): if "/" in symbol: continue # skip unreachable symbols with slashes absolute_doc_url = base_url + relative_doc_url + group_name = group.split(":")[1] if symbol in self.inventories: - group_name = group.split(":")[1] - symbol_base_url = self.inventories[symbol].split("/", 3)[2] + symbol_base_url = self.inventories[symbol].url.split("/", 3)[2] if ( group_name in NO_OVERRIDE_GROUPS or any(package in symbol_base_url for package in NO_OVERRIDE_PACKAGES) @@ -209,11 +216,11 @@ class Doc(commands.Cog): # Split `package_name` because of packages like Pillow that have spaces in them. symbol = f"{package_name.split()[0]}.{symbol}" - self.inventories[symbol] = absolute_doc_url + self.inventories[symbol] = DocItem(absolute_doc_url, group_name) self.renamed_symbols.add(symbol) continue - self.inventories[symbol] = absolute_doc_url + self.inventories[symbol] = DocItem(absolute_doc_url, group_name) log.trace(f"Fetched inventory for {package_name}.") @@ -248,15 +255,15 @@ class Doc(commands.Cog): If the given symbol is a module, returns a tuple `(None, str)` else if the symbol could not be found, returns `None`. """ - url = self.inventories.get(symbol) - if url is None: + symbol_info = self.inventories.get(symbol) + if symbol_info is None: return None - async with self.bot.http_session.get(url) as response: + async with self.bot.http_session.get(symbol_info.url) as response: html = await response.text(encoding='utf-8') # Find the signature header and parse the relevant parts. - symbol_id = url.split('#')[-1] + symbol_id = symbol_info.url.split('#')[-1] soup = BeautifulSoup(html, 'lxml') symbol_heading = soup.find(id=symbol_id) search_html = str(soup) @@ -288,7 +295,7 @@ class Doc(commands.Cog): return None signatures = scraped_html[0] - permalink = self.inventories[symbol] + permalink = self.inventories[symbol].url description = markdownify(scraped_html[1], url=permalink) # Truncate the description of the embed to the last occurrence -- cgit v1.2.3 From d790c404ca3dba3843f351d6f42e766956aa73a1 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Jun 2020 02:37:32 +0200 Subject: Renamed existing symbols from `NO_OVERRIDE_GROUPS` instead of replacing. Before, when a symbol from the group shared the name with a symbol outside of it the symbol was simply replaced and lost. The new implementation renames the old symbols to the group_name.symbol format before the new symbol takes their place. --- bot/cogs/doc.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 741fd0ddd..4eea06386 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -209,16 +209,21 @@ class Doc(commands.Cog): group_name in NO_OVERRIDE_GROUPS or any(package in symbol_base_url for package in NO_OVERRIDE_PACKAGES) ): - symbol = f"{group_name}.{symbol}" - # If renamed `symbol` already exists, add library name in front to differentiate between them. - if symbol in self.renamed_symbols: - # Split `package_name` because of packages like Pillow that have spaces in them. - symbol = f"{package_name.split()[0]}.{symbol}" - self.inventories[symbol] = DocItem(absolute_doc_url, group_name) + elif (overridden_symbol_group := self.inventories[symbol].group) in NO_OVERRIDE_GROUPS: + overridden_symbol = f"{overridden_symbol_group}.{symbol}" + if overridden_symbol in self.renamed_symbols: + overridden_symbol = f"{package_name.split()[0]}.{overridden_symbol}" + + self.inventories[overridden_symbol] = self.inventories[symbol] + self.renamed_symbols.add(overridden_symbol) + + # If renamed `symbol` already exists, add library name in front to differentiate between them. + if symbol in self.renamed_symbols: + # Split `package_name` because of packages like Pillow that have spaces in them. + symbol = f"{package_name.split()[0]}.{symbol}" self.renamed_symbols.add(symbol) - continue self.inventories[symbol] = DocItem(absolute_doc_url, group_name) -- cgit v1.2.3 From bca55c25ffb3631ba05889a88908a02ccb2beb2a Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Jun 2020 02:42:26 +0200 Subject: Fix typehint. --- bot/cogs/doc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 4eea06386..a01f6d64d 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -125,7 +125,7 @@ class DocMarkdownConverter(MarkdownConverter): return super().convert_a(el, text) -def markdownify(html: str, *, url: str = "") -> DocMarkdownConverter: +def markdownify(html: str, *, url: str = "") -> str: """Create a DocMarkdownConverter object from the input html.""" return DocMarkdownConverter(bullets='•', page_url=url).convert(html) -- cgit v1.2.3 From 38991027a38b1adc4be3c99d126dae76a3a62036 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Jun 2020 03:09:23 +0200 Subject: Correct return when a module symbol could not be parsed. --- bot/cogs/doc.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index a01f6d64d..1c9d80e47 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -279,7 +279,7 @@ class Doc(commands.Cog): if symbol_id == f"module-{symbol}": parsed_module = self.parse_module_symbol(symbol_heading) if parsed_module is None: - return None + return [], "" else: signatures, description = parsed_module @@ -538,14 +538,13 @@ class Doc(commands.Cog): old_inventories = set(self.base_urls) with ctx.typing(): await self.refresh_inventory() - # Get differences of added and removed inventories - added = ', '.join(inv for inv in self.base_urls if inv not in old_inventories) - if added: - added = f"+ {added}" - - removed = ', '.join(inv for inv in old_inventories if inv not in self.base_urls) - if removed: - removed = f"- {removed}" + new_inventories = set(self.base_urls) + + if added := ", ".join(new_inventories - old_inventories): + added = "+ " + added + + if removed := ", ".join(old_inventories - new_inventories): + removed = "- " + removed embed = discord.Embed( title="Inventories refreshed", -- cgit v1.2.3 From a28ae5dfb610151060eab9856c44b2d192131f0d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Jun 2020 15:58:55 +0200 Subject: Strip backticks from symbol input. This allows the user to wrap symbols in codeblocks to avoid markdown. --- bot/cogs/doc.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 1c9d80e47..0dc1713a3 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -458,6 +458,7 @@ class Doc(commands.Cog): await ctx.send(embed=inventory_embed) else: + symbol = symbol.strip("`") # Fetching documentation for a symbol (at least for the first time, since # caching is used) takes quite some time, so let's send typing to indicate # that we got the command, but are still working on it. -- cgit v1.2.3 From c461bef250cd3d44fac2c0e64da21072f963909d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 27 Jun 2020 15:46:47 +0200 Subject: Redesign `find_all_text_until_tag` to search through all direct children. The previous approach didn't work for arbitrary tags with text. --- bot/cogs/doc.py | 39 ++++++++++++--------------------------- 1 file changed, 12 insertions(+), 27 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 0dc1713a3..e4b54f0a5 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -11,7 +11,7 @@ from urllib.parse import urljoin import discord from bs4 import BeautifulSoup -from bs4.element import NavigableString, PageElement, Tag +from bs4.element import PageElement, Tag from discord.errors import NotFound from discord.ext import commands from markdownify import MarkdownConverter @@ -357,7 +357,7 @@ class Doc(commands.Cog): if start_tag is None: return None - description = cls.find_all_text_until_tag(start_tag, cls._match_end_tag) + description = cls.find_all_children_until_tag(start_tag, cls._match_end_tag) if description is None: return None @@ -373,7 +373,7 @@ class Doc(commands.Cog): signatures = [] description_element = heading.find_next_sibling("dd") description_pos = html.find(str(description_element)) - description = cls.find_all_text_until_tag(description_element, ("dt",)) + description = cls.find_all_children_until_tag(description_element, tag_filter=("dt", "dl")) for element in ( *reversed(heading.find_previous_siblings("dt", limit=2)), @@ -388,41 +388,26 @@ class Doc(commands.Cog): return signatures, description @staticmethod - def find_all_text_until_tag( + def find_all_children_until_tag( start_element: PageElement, - tag_filter: Union[Tuple[str], Callable[[Tag], bool]] + tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]] ) -> Optional[str]: """ - Get all text from

elements and strings until a tag matching `tag_filter` is found. - - Max 1000 elements are searched to avoid going through whole pages when no matching tag is found. + Get all direct children until a child matching `tag_filter` is found. `tag_filter` can be either a tuple of string names to check against, or a filtering callable that's applied to the tags. - If no matching end tag is found, None is returned. """ text = "" - element = start_element - for _ in range(1000): - if element is None: - break - - element = element.next - while isinstance(element, NavigableString): - text += element - element = element.next - if element.name == "p": - text += str(element) - - elif isinstance(tag_filter, tuple): + for element in start_element.find_next().find_next_siblings(): + if isinstance(tag_filter, tuple): if element.name in tag_filter: break - else: - if tag_filter(element): - break - else: - return None + elif tag_filter(element): + break + text += str(element) + return text @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) -- cgit v1.2.3 From ff3afe58548a8f1ed675c1933545e481e99bfc78 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 27 Jun 2020 15:48:28 +0200 Subject: Only include one newline for `p` tags in `li` elements. --- bot/cogs/doc.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index e4b54f0a5..c1e8cebcf 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -124,6 +124,13 @@ class DocMarkdownConverter(MarkdownConverter): el["href"] = urljoin(self.page_url, el["href"]) return super().convert_a(el, text) + def convert_p(self, el: PageElement, text: str) -> str: + """Include only one newline instead of two when the parent is a li tag.""" + parent = el.parent + if parent is not None and parent.name == "li": + return f"{text}\n" + return super().convert_p(el, text) + def markdownify(html: str, *, url: str = "") -> str: """Create a DocMarkdownConverter object from the input html.""" -- cgit v1.2.3 From 6532618a503a55653499089a2d6a4ca43be7e2bf Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 28 Jun 2020 01:45:17 +0200 Subject: Only update added inventory instead of all. --- bot/cogs/doc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index c1e8cebcf..7c4beb075 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -504,7 +504,7 @@ class Doc(commands.Cog): # Rebuilding the inventory can take some time, so lets send out a # typing event to show that the Bot is still working. async with ctx.typing(): - await self.refresh_inventory() + await self.update_single(package_name, base_url, inventory_url) await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") @docs_group.command(name='delete', aliases=('remove', 'rm', 'd')) -- cgit v1.2.3 From fd839ef3f193586c204f52ca76a84c18a8f3ba1e Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 29 Jun 2020 02:39:00 +0200 Subject: Add stat for packages of fetched symbols. An additional variable is added to the DocItem named tuple to accommodate this. The `_package_name` is separated from `api_package_name` it previously overwrote and is now used for the stats and renamed symbols because the names are in a friendlier format. --- bot/cogs/doc.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 7c4beb075..e1c25d173 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -6,7 +6,7 @@ import textwrap from collections import OrderedDict from contextlib import suppress from types import SimpleNamespace -from typing import Any, Callable, List, NamedTuple, Optional, Tuple, Union +from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Union from urllib.parse import urljoin import discord @@ -70,6 +70,7 @@ NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay class DocItem(NamedTuple): """Holds inventory symbol information.""" + package: str url: str group: str @@ -174,7 +175,7 @@ class Doc(commands.Cog): def __init__(self, bot: Bot): self.base_urls = {} self.bot = bot - self.inventories = {} + self.inventories: Dict[str, DocItem] = {} self.renamed_symbols = set() self.bot.loop.create_task(self.init_refresh_inventory()) @@ -185,7 +186,7 @@ class Doc(commands.Cog): await self.refresh_inventory() async def update_single( - self, package_name: str, base_url: str, inventory_url: str + self, api_package_name: str, base_url: str, inventory_url: str ) -> None: """ Rebuild the inventory for a single package. @@ -197,14 +198,14 @@ class Doc(commands.Cog): * `inventory_url` is the absolute URL to the intersphinx inventory, fetched by running `intersphinx.fetch_inventory` in an executor on the bot's event loop """ - self.base_urls[package_name] = base_url + self.base_urls[api_package_name] = base_url package = await self._fetch_inventory(inventory_url) if not package: return None for group, value in package.items(): - for symbol, (package_name, _version, relative_doc_url, _) in value.items(): + for symbol, (_package_name, _version, relative_doc_url, _) in value.items(): if "/" in symbol: continue # skip unreachable symbols with slashes absolute_doc_url = base_url + relative_doc_url @@ -221,7 +222,7 @@ class Doc(commands.Cog): elif (overridden_symbol_group := self.inventories[symbol].group) in NO_OVERRIDE_GROUPS: overridden_symbol = f"{overridden_symbol_group}.{symbol}" if overridden_symbol in self.renamed_symbols: - overridden_symbol = f"{package_name.split()[0]}.{overridden_symbol}" + overridden_symbol = f"{api_package_name}.{overridden_symbol}" self.inventories[overridden_symbol] = self.inventories[symbol] self.renamed_symbols.add(overridden_symbol) @@ -229,12 +230,12 @@ class Doc(commands.Cog): # If renamed `symbol` already exists, add library name in front to differentiate between them. if symbol in self.renamed_symbols: # Split `package_name` because of packages like Pillow that have spaces in them. - symbol = f"{package_name.split()[0]}.{symbol}" + symbol = f"{api_package_name}.{symbol}" self.renamed_symbols.add(symbol) - self.inventories[symbol] = DocItem(absolute_doc_url, group_name) + self.inventories[symbol] = DocItem(api_package_name, absolute_doc_url, group_name) - log.trace(f"Fetched inventory for {package_name}.") + log.trace(f"Fetched inventory for {api_package_name}.") async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" @@ -306,8 +307,10 @@ class Doc(commands.Cog): if scraped_html is None: return None + symbol_obj = self.inventories[symbol] + self.bot.stats.incr(f"doc_fetches.{symbol_obj.package.lower()}") signatures = scraped_html[0] - permalink = self.inventories[symbol].url + permalink = symbol_obj.url description = markdownify(scraped_html[1], url=permalink) # Truncate the description of the embed to the last occurrence -- cgit v1.2.3 From b6dc7536fd90e27f5dfdf3204dc2f17917d78ee2 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 29 Jun 2020 02:42:27 +0200 Subject: Trigger typing in converter instead of command. The converter does a web request so triggering typing in the command itself left out a period where the bot seemed inactive. --- bot/cogs/doc.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index e1c25d173..50aa9bbad 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -151,6 +151,7 @@ class InventoryURL(commands.Converter): @staticmethod async def convert(ctx: commands.Context, url: str) -> str: """Convert url to Intersphinx inventory URL.""" + await ctx.trigger_typing() try: intersphinx.fetch_inventory(SPHINX_MOCK_APP, '', url) except AttributeError: @@ -504,10 +505,7 @@ class Doc(commands.Cog): f"Inventory URL: {inventory_url}" ) - # Rebuilding the inventory can take some time, so lets send out a - # typing event to show that the Bot is still working. - async with ctx.typing(): - await self.update_single(package_name, base_url, inventory_url) + await self.update_single(package_name, base_url, inventory_url) await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") @docs_group.command(name='delete', aliases=('remove', 'rm', 'd')) -- cgit v1.2.3 From 782cd1771ce9254761a70bbfbfa8e883c1330c6c Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 29 Jun 2020 16:27:24 +0200 Subject: Add option for user to delete the not found message before it's auto deleted. --- bot/cogs/doc.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 50aa9bbad..b288a92b1 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -12,7 +12,6 @@ from urllib.parse import urljoin import discord from bs4 import BeautifulSoup from bs4.element import PageElement, Tag -from discord.errors import NotFound from discord.ext import commands from markdownify import MarkdownConverter from requests import ConnectTimeout, ConnectionError, HTTPError @@ -24,6 +23,7 @@ from bot.constants import MODERATION_ROLES, RedirectOutput from bot.converters import ValidPythonIdentifier, ValidURL from bot.decorators import with_role from bot.pagination import LinePaginator +from bot.utils.messages import wait_for_deletion log = logging.getLogger(__name__) @@ -468,9 +468,16 @@ class Doc(commands.Cog): colour=discord.Colour.red() ) error_message = await ctx.send(embed=error_embed) - with suppress(NotFound): - await error_message.delete(delay=NOT_FOUND_DELETE_DELAY) - await ctx.message.delete(delay=NOT_FOUND_DELETE_DELAY) + await wait_for_deletion( + error_message, + (ctx.author.id,), + timeout=NOT_FOUND_DELETE_DELAY, + client=self.bot + ) + with suppress(discord.NotFound): + await ctx.message.delete() + with suppress(discord.NotFound): + await error_message.delete() else: await ctx.send(embed=doc_embed) -- cgit v1.2.3 From 09820f5b4a55d6240a05f848ea446bd46062f444 Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Sun, 5 Jul 2020 20:33:03 +0200 Subject: Added better support for GitHub/GitLab --- bot/__main__.py | 2 + bot/cogs/print_snippets.py | 200 +++++++++++++++++++++++++++++++++++++++++++++ bot/cogs/repo_widgets.py | 123 ++++++++++++++++++++++++++++ 3 files changed, 325 insertions(+) create mode 100644 bot/cogs/print_snippets.py create mode 100644 bot/cogs/repo_widgets.py diff --git a/bot/__main__.py b/bot/__main__.py index 4e0d4a111..1d415eb20 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -71,6 +71,8 @@ bot.load_extension("bot.cogs.utils") bot.load_extension("bot.cogs.watchchannels") bot.load_extension("bot.cogs.webhook_remover") bot.load_extension("bot.cogs.wolfram") +bot.load_extension("bot.cogs.print_snippets") +bot.load_extension("bot.cogs.repo_widgets") if constants.HelpChannels.enable: bot.load_extension("bot.cogs.help_channels") diff --git a/bot/cogs/print_snippets.py b/bot/cogs/print_snippets.py new file mode 100644 index 000000000..06c9d6cc1 --- /dev/null +++ b/bot/cogs/print_snippets.py @@ -0,0 +1,200 @@ +""" +Cog that prints out snippets to Discord + +Matches each message against a regex and prints the contents +of the first matched snippet url +""" + +import os +import re +import textwrap + +from discord import Message +from discord.ext.commands import Cog +import aiohttp + +from bot.bot import Bot + + +async def fetch_http(session: aiohttp.ClientSession, url: str, response_format='text', **kwargs) -> str: + """Uses aiohttp to make http GET requests""" + + async with session.get(url, **kwargs) as response: + if response_format == 'text': + return await response.text() + elif response_format == 'json': + return await response.json() + + +async def revert_to_orig(d: dict) -> dict: + """Replace URL Encoded values back to their original""" + + for obj in d: + if d[obj] is not None: + d[obj] = d[obj].replace('%2F', '/').replace('%2E', '.') + + +async def orig_to_encode(d: dict) -> dict: + """Encode URL Parameters""" + + for obj in d: + if d[obj] is not None: + d[obj] = d[obj].replace('/', '%2F').replace('.', '%2E') + + +async def snippet_to_embed(d: dict, file_contents: str) -> str: + """ + Given a regex groupdict and file contents, creates a code block + """ + + if d['end_line']: + start_line = int(d['start_line']) + end_line = int(d['end_line']) + else: + start_line = end_line = int(d['start_line']) + + split_file_contents = file_contents.split('\n') + + if start_line > end_line: + start_line, end_line = end_line, start_line + if start_line > len(split_file_contents) or end_line < 1: + return '' + start_line = max(1, start_line) + end_line = min(len(split_file_contents), end_line) + + required = '\n'.join(split_file_contents[start_line - 1:end_line]) + required = textwrap.dedent(required).rstrip().replace('`', '`\u200b') + + language = d['file_path'].split('/')[-1].split('.')[-1] + if not language.replace('-', '').replace('+', '').replace('_', '').isalnum(): + language = '' + + if len(required) != 0: + return f'```{language}\n{required}```\n' + return '``` ```\n' + + +GITHUB_RE = re.compile( + r'https://github\.com/(?P.+?)/blob/(?P.+?)/' + + r'(?P.+?)#L(?P\d+)([-~]L(?P\d+))?\b' +) + +GITHUB_GIST_RE = re.compile( + r'https://gist\.github\.com/([^/]*)/(?P[0-9a-zA-Z]+)/*' + + r'(?P[0-9a-zA-Z]*)/*#file-(?P.+?)' + + r'-L(?P\d+)([-~]L(?P\d+))?\b' +) + +GITLAB_RE = re.compile( + r'https://gitlab\.com/(?P.+?)/\-/blob/(?P.+?)/' + + r'(?P.+?)#L(?P\d+)([-~](?P\d+))?\b' +) + +BITBUCKET_RE = re.compile( + r'https://bitbucket\.org/(?P.+?)/src/(?P.+?)/' + + r'(?P.+?)#lines-(?P\d+)(:(?P\d+))?\b' +) + + +class PrintSnippets(Cog): + def __init__(self, bot): + """Initializes the cog's bot""" + + self.bot = bot + self.session = aiohttp.ClientSession() + + @Cog.listener() + async def on_message(self, message: Message) -> None: + """ + Checks if the message starts is a GitHub snippet, then removes the embed, + then sends the snippet in Discord + """ + + gh_match = GITHUB_RE.search(message.content) + gh_gist_match = GITHUB_GIST_RE.search(message.content) + gl_match = GITLAB_RE.search(message.content) + bb_match = BITBUCKET_RE.search(message.content) + + if (gh_match or gh_gist_match or gl_match or bb_match) and not message.author.bot: + message_to_send = '' + + for gh in GITHUB_RE.finditer(message.content): + d = gh.groupdict() + headers = {'Accept': 'application/vnd.github.v3.raw'} + if 'GITHUB_TOKEN' in os.environ: + headers['Authorization'] = f'token {os.environ["GITHUB_TOKEN"]}' + file_contents = await fetch_http( + self.session, + f'https://api.github.com/repos/{d["repo"]}/contents/{d["file_path"]}?ref={d["branch"]}', + 'text', + headers=headers, + ) + message_to_send += await snippet_to_embed(d, file_contents) + + for gh_gist in GITHUB_GIST_RE.finditer(message.content): + d = gh_gist.groupdict() + gist_json = await fetch_http( + self.session, + f'https://api.github.com/gists/{d["gist_id"]}{"/" + d["revision"] if len(d["revision"]) > 0 else ""}', + 'json', + ) + for f in gist_json['files']: + if d['file_path'] == f.lower().replace('.', '-'): + d['file_path'] = f + file_contents = await fetch_http( + self.session, + gist_json['files'][f]['raw_url'], + 'text', + ) + message_to_send += await snippet_to_embed(d, file_contents) + break + + for gl in GITLAB_RE.finditer(message.content): + d = gl.groupdict() + await orig_to_encode(d) + headers = {} + if 'GITLAB_TOKEN' in os.environ: + headers['PRIVATE-TOKEN'] = os.environ["GITLAB_TOKEN"] + file_contents = await fetch_http( + self.session, + f'https://gitlab.com/api/v4/projects/{d["repo"]}/repository/files/{d["file_path"]}/raw?ref={d["branch"]}', + 'text', + headers=headers, + ) + await revert_to_orig(d) + message_to_send += await snippet_to_embed(d, file_contents) + + for bb in BITBUCKET_RE.finditer(message.content): + d = bb.groupdict() + await orig_to_encode(d) + file_contents = await fetch_http( + self.session, + f'https://bitbucket.org/{d["repo"]}/raw/{d["branch"]}/{d["file_path"]}', + 'text', + ) + await revert_to_orig(d) + message_to_send += await snippet_to_embed(d, file_contents) + + message_to_send = message_to_send[:-1] + + if len(message_to_send) > 2000: + await message.channel.send( + 'Sorry, Discord has a 2000 character limit. Please send a shorter ' + + 'snippet or split the big snippet up into several smaller ones :slight_smile:' + ) + elif len(message_to_send) == 0: + await message.channel.send( + 'Please send valid snippet links to prevent spam :slight_smile:' + ) + elif message_to_send.count('\n') > 50: + await message.channel.send( + 'Please limit the total number of lines to at most 50 to prevent spam :slight_smile:' + ) + else: + await message.channel.send(message_to_send) + await message.edit(suppress=True) + + +def setup(bot: Bot) -> None: + """Load the Utils cog.""" + bot.add_cog(PrintSnippets(bot)) diff --git a/bot/cogs/repo_widgets.py b/bot/cogs/repo_widgets.py new file mode 100644 index 000000000..70ca387ec --- /dev/null +++ b/bot/cogs/repo_widgets.py @@ -0,0 +1,123 @@ +""" +Cog that sends pretty embeds of repos + +Matches each message against a regex and prints the contents +of the first matched snippet url +""" + +import os +import re + +from discord import Embed, Message +from discord.ext.commands import Cog +import aiohttp + +from bot.bot import Bot + + +async def fetch_http(session: aiohttp.ClientSession, url: str, response_format='text', **kwargs) -> str: + """Uses aiohttp to make http GET requests""" + + async with session.get(url, **kwargs) as response: + if response_format == 'text': + return await response.text() + elif response_format == 'json': + return await response.json() + + +async def orig_to_encode(d: dict) -> dict: + """Encode URL Parameters""" + + for obj in d: + if d[obj] is not None: + d[obj] = d[obj].replace('/', '%2F').replace('.', '%2E') + + +GITHUB_RE = re.compile( + r'https://github\.com/(?P[^/]+?)/(?P[^/]+?)(?:\s|$)') + +GITLAB_RE = re.compile( + r'https://gitlab\.com/(?P[^/]+?)/(?P[^/]+?)(?:\s|$)') + + +class RepoWidgets(Cog): + def __init__(self, bot: Bot): + """Initializes the cog's bot""" + + self.bot = bot + self.session = aiohttp.ClientSession() + + @Cog.listener() + async def on_message(self, message: Message) -> None: + """ + Checks if the message starts is a GitHub repo link, then removes the embed, + then sends a rich embed to Discord + """ + + gh_match = GITHUB_RE.search(message.content) + gl_match = GITLAB_RE.search(message.content) + + if (gh_match or gl_match) and not message.author.bot: + for gh in GITHUB_RE.finditer(message.content): + d = gh.groupdict() + headers = {} + if 'GITHUB_TOKEN' in os.environ: + headers['Authorization'] = f'token {os.environ["GITHUB_TOKEN"]}' + repo = await fetch_http( + self.session, + f'https://api.github.com/repos/{d["owner"]}/{d["repo"]}', + 'json', + headers=headers, + ) + + embed = Embed( + title=repo['full_name'], + description='No description provided' if repo[ + 'description'] is None else repo['description'], + url=repo['html_url'], + color=0x111111 + ).set_footer( + text=f'Language: {repo["language"]} | ' + + f'Stars: {repo["stargazers_count"]} | ' + + f'Forks: {repo["forks_count"]} | ' + + f'Size: {repo["size"]}kb' + ).set_thumbnail(url=repo['owner']['avatar_url']) + if repo['homepage']: + embed.add_field(name='Website', value=repo['homepage']) + await message.channel.send(embed=embed) + + for gl in GITLAB_RE.finditer(message.content): + d = gl.groupdict() + await orig_to_encode(d) + headers = {} + if 'GITLAB_TOKEN' in os.environ: + headers['PRIVATE-TOKEN'] = os.environ["GITLAB_TOKEN"] + repo = await fetch_http( + self.session, + f'https://gitlab.com/api/v4/projects/{d["owner"]}%2F{d["repo"]}', + 'json', + headers=headers, + ) + + embed = Embed( + title=repo['path_with_namespace'], + description='No description provided' if repo[ + 'description'] == "" else repo['description'], + url=repo['web_url'], + color=0x111111 + ).set_footer( + text=f'Stars: {repo["star_count"]} | ' + + f'Forks: {repo["forks_count"]}' + ) + + if repo['avatar_url'] is not None: + embed.set_thumbnail(url=repo['avatar_url']) + + await message.channel.send(embed=embed) + + await message.edit(suppress=True) + + +def setup(bot: Bot) -> None: + """Load the Utils cog.""" + bot.add_cog(RepoWidgets(bot)) -- cgit v1.2.3 From 668d96e12acd76c5021ede07401cdb6062b89add Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Sun, 5 Jul 2020 20:49:46 +0200 Subject: Tried to fix some of the flake8 style errors --- bot/cogs/print_snippets.py | 43 +++++++++++++++++-------------------------- bot/cogs/repo_widgets.py | 26 +++++++++----------------- 2 files changed, 26 insertions(+), 43 deletions(-) diff --git a/bot/cogs/print_snippets.py b/bot/cogs/print_snippets.py index 06c9d6cc1..4be3653d5 100644 --- a/bot/cogs/print_snippets.py +++ b/bot/cogs/print_snippets.py @@ -1,24 +1,16 @@ -""" -Cog that prints out snippets to Discord - -Matches each message against a regex and prints the contents -of the first matched snippet url -""" - import os import re import textwrap +import aiohttp from discord import Message from discord.ext.commands import Cog -import aiohttp from bot.bot import Bot -async def fetch_http(session: aiohttp.ClientSession, url: str, response_format='text', **kwargs) -> str: +async def fetch_http(session: aiohttp.ClientSession, url: str, response_format: str, **kwargs) -> str: """Uses aiohttp to make http GET requests""" - async with session.get(url, **kwargs) as response: if response_format == 'text': return await response.text() @@ -28,7 +20,6 @@ async def fetch_http(session: aiohttp.ClientSession, url: str, response_format=' async def revert_to_orig(d: dict) -> dict: """Replace URL Encoded values back to their original""" - for obj in d: if d[obj] is not None: d[obj] = d[obj].replace('%2F', '/').replace('%2E', '.') @@ -36,17 +27,13 @@ async def revert_to_orig(d: dict) -> dict: async def orig_to_encode(d: dict) -> dict: """Encode URL Parameters""" - for obj in d: if d[obj] is not None: d[obj] = d[obj].replace('/', '%2F').replace('.', '%2E') async def snippet_to_embed(d: dict, file_contents: str) -> str: - """ - Given a regex groupdict and file contents, creates a code block - """ - + """Given a regex groupdict and file contents, creates a code block""" if d['end_line']: start_line = int(d['start_line']) end_line = int(d['end_line']) @@ -97,19 +84,20 @@ BITBUCKET_RE = re.compile( class PrintSnippets(Cog): - def __init__(self, bot): - """Initializes the cog's bot""" + """ + Cog that prints out snippets to Discord + Matches each message against a regex and prints the contents of all matched snippets + """ + + def __init__(self, bot: Bot): + """Initializes the cog's bot""" self.bot = bot self.session = aiohttp.ClientSession() @Cog.listener() async def on_message(self, message: Message) -> None: - """ - Checks if the message starts is a GitHub snippet, then removes the embed, - then sends the snippet in Discord - """ - + """Checks if the message starts is a GitHub snippet, then removes the embed, then sends the snippet in Discord""" gh_match = GITHUB_RE.search(message.content) gh_gist_match = GITHUB_GIST_RE.search(message.content) gl_match = GITLAB_RE.search(message.content) @@ -125,7 +113,8 @@ class PrintSnippets(Cog): headers['Authorization'] = f'token {os.environ["GITHUB_TOKEN"]}' file_contents = await fetch_http( self.session, - f'https://api.github.com/repos/{d["repo"]}/contents/{d["file_path"]}?ref={d["branch"]}', + f'https://api.github.com/repos/{d["repo"]}\ + /contents/{d["file_path"]}?ref={d["branch"]}', 'text', headers=headers, ) @@ -135,7 +124,8 @@ class PrintSnippets(Cog): d = gh_gist.groupdict() gist_json = await fetch_http( self.session, - f'https://api.github.com/gists/{d["gist_id"]}{"/" + d["revision"] if len(d["revision"]) > 0 else ""}', + f'https://api.github.com/gists/{d["gist_id"]}\ + {"/" + d["revision"] if len(d["revision"]) > 0 else ""}', 'json', ) for f in gist_json['files']: @@ -157,7 +147,8 @@ class PrintSnippets(Cog): headers['PRIVATE-TOKEN'] = os.environ["GITLAB_TOKEN"] file_contents = await fetch_http( self.session, - f'https://gitlab.com/api/v4/projects/{d["repo"]}/repository/files/{d["file_path"]}/raw?ref={d["branch"]}', + f'https://gitlab.com/api/v4/projects/{d["repo"]}/\ + repository/files/{d["file_path"]}/raw?ref={d["branch"]}', 'text', headers=headers, ) diff --git a/bot/cogs/repo_widgets.py b/bot/cogs/repo_widgets.py index 70ca387ec..feb931e72 100644 --- a/bot/cogs/repo_widgets.py +++ b/bot/cogs/repo_widgets.py @@ -1,23 +1,15 @@ -""" -Cog that sends pretty embeds of repos - -Matches each message against a regex and prints the contents -of the first matched snippet url -""" - import os import re +import aiohttp from discord import Embed, Message from discord.ext.commands import Cog -import aiohttp from bot.bot import Bot -async def fetch_http(session: aiohttp.ClientSession, url: str, response_format='text', **kwargs) -> str: +async def fetch_http(session: aiohttp.ClientSession, url: str, response_format: str, **kwargs) -> str: """Uses aiohttp to make http GET requests""" - async with session.get(url, **kwargs) as response: if response_format == 'text': return await response.text() @@ -27,7 +19,6 @@ async def fetch_http(session: aiohttp.ClientSession, url: str, response_format=' async def orig_to_encode(d: dict) -> dict: """Encode URL Parameters""" - for obj in d: if d[obj] is not None: d[obj] = d[obj].replace('/', '%2F').replace('.', '%2E') @@ -41,19 +32,20 @@ GITLAB_RE = re.compile( class RepoWidgets(Cog): + """ + Cog that sends pretty embeds of repos + + Matches each message against a regex and sends an embed with the details of all referenced repos + """ + def __init__(self, bot: Bot): """Initializes the cog's bot""" - self.bot = bot self.session = aiohttp.ClientSession() @Cog.listener() async def on_message(self, message: Message) -> None: - """ - Checks if the message starts is a GitHub repo link, then removes the embed, - then sends a rich embed to Discord - """ - + """Checks if the message starts is a GitHub repo link, then removes the embed, then sends a rich embed to Discord""" gh_match = GITHUB_RE.search(message.content) gl_match = GITLAB_RE.search(message.content) -- cgit v1.2.3 From 2fe46fd372a5c8a69437e3f29c0137cb11d156d9 Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Sun, 5 Jul 2020 20:54:55 +0200 Subject: Fixed all docstrings --- bot/cogs/print_snippets.py | 14 +++++++------- bot/cogs/repo_widgets.py | 20 ++++++++++---------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/bot/cogs/print_snippets.py b/bot/cogs/print_snippets.py index 4be3653d5..5c83cd62b 100644 --- a/bot/cogs/print_snippets.py +++ b/bot/cogs/print_snippets.py @@ -10,7 +10,7 @@ from bot.bot import Bot async def fetch_http(session: aiohttp.ClientSession, url: str, response_format: str, **kwargs) -> str: - """Uses aiohttp to make http GET requests""" + """Uses aiohttp to make http GET requests.""" async with session.get(url, **kwargs) as response: if response_format == 'text': return await response.text() @@ -19,21 +19,21 @@ async def fetch_http(session: aiohttp.ClientSession, url: str, response_format: async def revert_to_orig(d: dict) -> dict: - """Replace URL Encoded values back to their original""" + """Replace URL Encoded values back to their original.""" for obj in d: if d[obj] is not None: d[obj] = d[obj].replace('%2F', '/').replace('%2E', '.') async def orig_to_encode(d: dict) -> dict: - """Encode URL Parameters""" + """Encode URL Parameters.""" for obj in d: if d[obj] is not None: d[obj] = d[obj].replace('/', '%2F').replace('.', '%2E') async def snippet_to_embed(d: dict, file_contents: str) -> str: - """Given a regex groupdict and file contents, creates a code block""" + """Given a regex groupdict and file contents, creates a code block.""" if d['end_line']: start_line = int(d['start_line']) end_line = int(d['end_line']) @@ -85,9 +85,9 @@ BITBUCKET_RE = re.compile( class PrintSnippets(Cog): """ - Cog that prints out snippets to Discord + Cog that prints out snippets to Discord. - Matches each message against a regex and prints the contents of all matched snippets + Matches each message against a regex and prints the contents of all matched snippets. """ def __init__(self, bot: Bot): @@ -97,7 +97,7 @@ class PrintSnippets(Cog): @Cog.listener() async def on_message(self, message: Message) -> None: - """Checks if the message starts is a GitHub snippet, then removes the embed, then sends the snippet in Discord""" + """Checks if the message has a snippet link, removes the embed, then sends the snippet contents.""" gh_match = GITHUB_RE.search(message.content) gh_gist_match = GITHUB_GIST_RE.search(message.content) gl_match = GITLAB_RE.search(message.content) diff --git a/bot/cogs/repo_widgets.py b/bot/cogs/repo_widgets.py index feb931e72..c8fde7c8e 100644 --- a/bot/cogs/repo_widgets.py +++ b/bot/cogs/repo_widgets.py @@ -9,7 +9,7 @@ from bot.bot import Bot async def fetch_http(session: aiohttp.ClientSession, url: str, response_format: str, **kwargs) -> str: - """Uses aiohttp to make http GET requests""" + """Uses aiohttp to make http GET requests.""" async with session.get(url, **kwargs) as response: if response_format == 'text': return await response.text() @@ -18,7 +18,7 @@ async def fetch_http(session: aiohttp.ClientSession, url: str, response_format: async def orig_to_encode(d: dict) -> dict: - """Encode URL Parameters""" + """Encode URL Parameters.""" for obj in d: if d[obj] is not None: d[obj] = d[obj].replace('/', '%2F').replace('.', '%2E') @@ -33,19 +33,19 @@ GITLAB_RE = re.compile( class RepoWidgets(Cog): """ - Cog that sends pretty embeds of repos + Cog that sends pretty embeds of repos. - Matches each message against a regex and sends an embed with the details of all referenced repos + Matches each message against a regex and sends an embed with the details of all referenced repos. """ def __init__(self, bot: Bot): - """Initializes the cog's bot""" + """Initializes the cog's bot.""" self.bot = bot self.session = aiohttp.ClientSession() @Cog.listener() async def on_message(self, message: Message) -> None: - """Checks if the message starts is a GitHub repo link, then removes the embed, then sends a rich embed to Discord""" + """Checks if the message has a repo link, removes the embed, then sends a rich embed.""" gh_match = GITHUB_RE.search(message.content) gl_match = GITLAB_RE.search(message.content) @@ -69,10 +69,10 @@ class RepoWidgets(Cog): url=repo['html_url'], color=0x111111 ).set_footer( - text=f'Language: {repo["language"]} | ' + - f'Stars: {repo["stargazers_count"]} | ' + - f'Forks: {repo["forks_count"]} | ' + - f'Size: {repo["size"]}kb' + text=f'Language: {repo["language"]} | ' + + f'Stars: {repo["stargazers_count"]} | ' + + f'Forks: {repo["forks_count"]} | ' + + f'Size: {repo["size"]}kb' ).set_thumbnail(url=repo['owner']['avatar_url']) if repo['homepage']: embed.add_field(name='Website', value=repo['homepage']) -- cgit v1.2.3 From ec3cc1704c7678f6389ac5c0688be90697410bed Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Sun, 5 Jul 2020 20:59:18 +0200 Subject: Minor style fixes --- bot/cogs/print_snippets.py | 2 +- bot/cogs/repo_widgets.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/cogs/print_snippets.py b/bot/cogs/print_snippets.py index 5c83cd62b..67d411a63 100644 --- a/bot/cogs/print_snippets.py +++ b/bot/cogs/print_snippets.py @@ -91,7 +91,7 @@ class PrintSnippets(Cog): """ def __init__(self, bot: Bot): - """Initializes the cog's bot""" + """Initializes the cog's bot.""" self.bot = bot self.session = aiohttp.ClientSession() diff --git a/bot/cogs/repo_widgets.py b/bot/cogs/repo_widgets.py index c8fde7c8e..32c2451df 100644 --- a/bot/cogs/repo_widgets.py +++ b/bot/cogs/repo_widgets.py @@ -98,8 +98,8 @@ class RepoWidgets(Cog): url=repo['web_url'], color=0x111111 ).set_footer( - text=f'Stars: {repo["star_count"]} | ' + - f'Forks: {repo["forks_count"]}' + text=f'Stars: {repo["star_count"]} | ' + + f'Forks: {repo["forks_count"]}' ) if repo['avatar_url'] is not None: -- cgit v1.2.3 From fa60e51243c56e6658a91ea63be67a42e22f1512 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 6 Jul 2020 21:23:41 +0200 Subject: Intern `group_names` --- bot/cogs/doc.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index b288a92b1..0975285e8 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -2,6 +2,7 @@ import asyncio import functools import logging import re +import sys import textwrap from collections import OrderedDict from contextlib import suppress @@ -210,7 +211,9 @@ class Doc(commands.Cog): if "/" in symbol: continue # skip unreachable symbols with slashes absolute_doc_url = base_url + relative_doc_url - group_name = group.split(":")[1] + # Intern the group names since they're reused in all the DocItems + # to remove unnecessary memory consumption from them being unique objects + group_name = sys.intern(group.split(":")[1]) if symbol in self.inventories: symbol_base_url = self.inventories[symbol].url.split("/", 3)[2] -- cgit v1.2.3 From 09987afb9b1e39fc5618b4217e1f33860cdd4bb4 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 7 Jul 2020 01:25:14 +0200 Subject: Create method to fetch and create a BeautifulSoup object from an url. Moving this part of the logic into a separate method allows us to put a cache on it, which caches the whole HTML document from the given url, removing the need to do requests to the same URL for every symbol behind it. --- bot/cogs/doc.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 0975285e8..71bfcfd4a 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -275,13 +275,9 @@ class Doc(commands.Cog): symbol_info = self.inventories.get(symbol) if symbol_info is None: return None + request_url, symbol_id = symbol_info.url.rsplit('#') - async with self.bot.http_session.get(symbol_info.url) as response: - html = await response.text(encoding='utf-8') - - # Find the signature header and parse the relevant parts. - symbol_id = symbol_info.url.split('#')[-1] - soup = BeautifulSoup(html, 'lxml') + soup = await self._get_soup_from_url(request_url) symbol_heading = soup.find(id=symbol_id) search_html = str(soup) @@ -424,6 +420,15 @@ class Doc(commands.Cog): return text + @async_cache(arg_offset=1) + async def _get_soup_from_url(self, url: str) -> BeautifulSoup: + """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" + log.trace(f"Sending a request to {url}.") + async with self.bot.http_session.get(url) as response: + soup = BeautifulSoup(await response.text(encoding="utf8"), 'lxml') + soup.find("head").decompose() # the head contains no useful data so we can remove it + return soup + @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) async def docs_group(self, ctx: commands.Context, *, symbol: str) -> None: """Lookup documentation for Python symbols.""" -- cgit v1.2.3 From 8462abaa15e0f9eb7b4f861d0485686ec7470ed0 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 7 Jul 2020 01:26:34 +0200 Subject: Use the group attribute instead of checking the symbol name. --- bot/cogs/doc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 71bfcfd4a..5ebfb6c25 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -284,7 +284,7 @@ class Doc(commands.Cog): if symbol_heading is None: return None - if symbol_id == f"module-{symbol}": + if symbol_info.group == "module": parsed_module = self.parse_module_symbol(symbol_heading) if parsed_module is None: return [], "" -- cgit v1.2.3 From 5fb1203883a975d752d9c8b803bb8420ef0f7c60 Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 7 Jul 2020 19:42:53 +0200 Subject: Removed repo widget prettification and added reaction to remove lines --- bot/__main__.py | 1 - bot/cogs/print_snippets.py | 45 +++++++++--------- bot/cogs/repo_widgets.py | 115 --------------------------------------------- 3 files changed, 22 insertions(+), 139 deletions(-) delete mode 100644 bot/cogs/repo_widgets.py diff --git a/bot/__main__.py b/bot/__main__.py index 1d415eb20..3191faf85 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -72,7 +72,6 @@ bot.load_extension("bot.cogs.watchchannels") bot.load_extension("bot.cogs.webhook_remover") bot.load_extension("bot.cogs.wolfram") bot.load_extension("bot.cogs.print_snippets") -bot.load_extension("bot.cogs.repo_widgets") if constants.HelpChannels.enable: bot.load_extension("bot.cogs.help_channels") diff --git a/bot/cogs/print_snippets.py b/bot/cogs/print_snippets.py index 67d411a63..3f784d2c6 100644 --- a/bot/cogs/print_snippets.py +++ b/bot/cogs/print_snippets.py @@ -1,9 +1,10 @@ +import asyncio import os import re import textwrap import aiohttp -from discord import Message +from discord import Message, Reaction, User from discord.ext.commands import Cog from bot.bot import Bot @@ -113,8 +114,8 @@ class PrintSnippets(Cog): headers['Authorization'] = f'token {os.environ["GITHUB_TOKEN"]}' file_contents = await fetch_http( self.session, - f'https://api.github.com/repos/{d["repo"]}\ - /contents/{d["file_path"]}?ref={d["branch"]}', + f'https://api.github.com/repos/{d["repo"]}' + + f'/contents/{d["file_path"]}?ref={d["branch"]}', 'text', headers=headers, ) @@ -124,8 +125,8 @@ class PrintSnippets(Cog): d = gh_gist.groupdict() gist_json = await fetch_http( self.session, - f'https://api.github.com/gists/{d["gist_id"]}\ - {"/" + d["revision"] if len(d["revision"]) > 0 else ""}', + f'https://api.github.com/gists/{d["gist_id"]}' + + f'{"/" + d["revision"] if len(d["revision"]) > 0 else ""}', 'json', ) for f in gist_json['files']: @@ -147,8 +148,8 @@ class PrintSnippets(Cog): headers['PRIVATE-TOKEN'] = os.environ["GITLAB_TOKEN"] file_contents = await fetch_http( self.session, - f'https://gitlab.com/api/v4/projects/{d["repo"]}/\ - repository/files/{d["file_path"]}/raw?ref={d["branch"]}', + f'https://gitlab.com/api/v4/projects/{d["repo"]}/' + + f'repository/files/{d["file_path"]}/raw?ref={d["branch"]}', 'text', headers=headers, ) @@ -168,22 +169,20 @@ class PrintSnippets(Cog): message_to_send = message_to_send[:-1] - if len(message_to_send) > 2000: - await message.channel.send( - 'Sorry, Discord has a 2000 character limit. Please send a shorter ' - + 'snippet or split the big snippet up into several smaller ones :slight_smile:' - ) - elif len(message_to_send) == 0: - await message.channel.send( - 'Please send valid snippet links to prevent spam :slight_smile:' - ) - elif message_to_send.count('\n') > 50: - await message.channel.send( - 'Please limit the total number of lines to at most 50 to prevent spam :slight_smile:' - ) - else: - await message.channel.send(message_to_send) - await message.edit(suppress=True) + if 0 < len(message_to_send) <= 2000 and message_to_send.count('\n') <= 50: + sent_message = await message.channel.send(message_to_send) + await message.edit(suppress=True) + await sent_message.add_reaction('❌') + + def check(reaction: Reaction, user: User) -> bool: + return user == message.author and str(reaction.emoji) == '❌' + + try: + reaction, user = await self.bot.wait_for('reaction_add', timeout=10.0, check=check) + except asyncio.TimeoutError: + await sent_message.remove_reaction('❌', self.bot.user) + else: + await sent_message.delete() def setup(bot: Bot) -> None: diff --git a/bot/cogs/repo_widgets.py b/bot/cogs/repo_widgets.py deleted file mode 100644 index 32c2451df..000000000 --- a/bot/cogs/repo_widgets.py +++ /dev/null @@ -1,115 +0,0 @@ -import os -import re - -import aiohttp -from discord import Embed, Message -from discord.ext.commands import Cog - -from bot.bot import Bot - - -async def fetch_http(session: aiohttp.ClientSession, url: str, response_format: str, **kwargs) -> str: - """Uses aiohttp to make http GET requests.""" - async with session.get(url, **kwargs) as response: - if response_format == 'text': - return await response.text() - elif response_format == 'json': - return await response.json() - - -async def orig_to_encode(d: dict) -> dict: - """Encode URL Parameters.""" - for obj in d: - if d[obj] is not None: - d[obj] = d[obj].replace('/', '%2F').replace('.', '%2E') - - -GITHUB_RE = re.compile( - r'https://github\.com/(?P[^/]+?)/(?P[^/]+?)(?:\s|$)') - -GITLAB_RE = re.compile( - r'https://gitlab\.com/(?P[^/]+?)/(?P[^/]+?)(?:\s|$)') - - -class RepoWidgets(Cog): - """ - Cog that sends pretty embeds of repos. - - Matches each message against a regex and sends an embed with the details of all referenced repos. - """ - - def __init__(self, bot: Bot): - """Initializes the cog's bot.""" - self.bot = bot - self.session = aiohttp.ClientSession() - - @Cog.listener() - async def on_message(self, message: Message) -> None: - """Checks if the message has a repo link, removes the embed, then sends a rich embed.""" - gh_match = GITHUB_RE.search(message.content) - gl_match = GITLAB_RE.search(message.content) - - if (gh_match or gl_match) and not message.author.bot: - for gh in GITHUB_RE.finditer(message.content): - d = gh.groupdict() - headers = {} - if 'GITHUB_TOKEN' in os.environ: - headers['Authorization'] = f'token {os.environ["GITHUB_TOKEN"]}' - repo = await fetch_http( - self.session, - f'https://api.github.com/repos/{d["owner"]}/{d["repo"]}', - 'json', - headers=headers, - ) - - embed = Embed( - title=repo['full_name'], - description='No description provided' if repo[ - 'description'] is None else repo['description'], - url=repo['html_url'], - color=0x111111 - ).set_footer( - text=f'Language: {repo["language"]} | ' - + f'Stars: {repo["stargazers_count"]} | ' - + f'Forks: {repo["forks_count"]} | ' - + f'Size: {repo["size"]}kb' - ).set_thumbnail(url=repo['owner']['avatar_url']) - if repo['homepage']: - embed.add_field(name='Website', value=repo['homepage']) - await message.channel.send(embed=embed) - - for gl in GITLAB_RE.finditer(message.content): - d = gl.groupdict() - await orig_to_encode(d) - headers = {} - if 'GITLAB_TOKEN' in os.environ: - headers['PRIVATE-TOKEN'] = os.environ["GITLAB_TOKEN"] - repo = await fetch_http( - self.session, - f'https://gitlab.com/api/v4/projects/{d["owner"]}%2F{d["repo"]}', - 'json', - headers=headers, - ) - - embed = Embed( - title=repo['path_with_namespace'], - description='No description provided' if repo[ - 'description'] == "" else repo['description'], - url=repo['web_url'], - color=0x111111 - ).set_footer( - text=f'Stars: {repo["star_count"]} | ' - + f'Forks: {repo["forks_count"]}' - ) - - if repo['avatar_url'] is not None: - embed.set_thumbnail(url=repo['avatar_url']) - - await message.channel.send(embed=embed) - - await message.edit(suppress=True) - - -def setup(bot: Bot) -> None: - """Load the Utils cog.""" - bot.add_cog(RepoWidgets(bot)) -- cgit v1.2.3 From 03dbddfcae35e47d57222343817ea779d6b67ab2 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 10 Jul 2020 22:36:19 +0200 Subject: Remove codeblock from symbol embed title. The code block caused the url to not highlight the title text on mobile --- bot/cogs/doc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 5ebfb6c25..e2e3adb4e 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -350,7 +350,7 @@ class Doc(commands.Cog): embed_description += f"\n{description}" embed = discord.Embed( - title=f'`{symbol}`', + title=discord.utils.escape_markdown(symbol), url=permalink, description=embed_description ) -- cgit v1.2.3 From b59e39557ae97ac6bbc4e294651d1fe654bb2d21 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 14 Jul 2020 00:13:42 +0200 Subject: Add doc suffix to doc commands. The `set` command shadowed the `set` symbol, causing the command to seemingly not work. A suffix was added to all commands to keep them consistent and future proof; the shorthands were kept unchanged --- bot/cogs/doc.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index e2e3adb4e..7f1fb6135 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -434,7 +434,7 @@ class Doc(commands.Cog): """Lookup documentation for Python symbols.""" await ctx.invoke(self.get_command, symbol=symbol) - @docs_group.command(name='get', aliases=('g',)) + @docs_group.command(name='getdoc', aliases=('g',)) async def get_command(self, ctx: commands.Context, *, symbol: str) -> None: """ Return a documentation embed for a given symbol. @@ -489,7 +489,7 @@ class Doc(commands.Cog): else: await ctx.send(embed=doc_embed) - @docs_group.command(name='set', aliases=('s',)) + @docs_group.command(name='setdoc', aliases=('s',)) @with_role(*MODERATION_ROLES) async def set_command( self, ctx: commands.Context, package_name: ValidPythonIdentifier, @@ -523,7 +523,7 @@ class Doc(commands.Cog): await self.update_single(package_name, base_url, inventory_url) await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") - @docs_group.command(name='delete', aliases=('remove', 'rm', 'd')) + @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) @with_role(*MODERATION_ROLES) async def delete_command(self, ctx: commands.Context, package_name: ValidPythonIdentifier) -> None: """ @@ -540,7 +540,7 @@ class Doc(commands.Cog): await self.refresh_inventory() await ctx.send(f"Successfully deleted `{package_name}` and refreshed inventory.") - @docs_group.command(name="refresh", aliases=("rfsh", "r")) + @docs_group.command(name="refreshdoc", aliases=("rfsh", "r")) @with_role(*MODERATION_ROLES) async def refresh_command(self, ctx: commands.Context) -> None: """Refresh inventories and send differences to channel.""" -- cgit v1.2.3 From ea0dcabbca10c5fe2afcee2b9451e1494bc069a2 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 14 Jul 2020 00:18:58 +0200 Subject: Make the symbol parameter optional. The commands were changed to be greedy, this however made them required arguments breaking the access to the default listing of the available inventories --- bot/cogs/doc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 7f1fb6135..66c4b4ea8 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -430,12 +430,12 @@ class Doc(commands.Cog): return soup @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) - async def docs_group(self, ctx: commands.Context, *, symbol: str) -> None: + async def docs_group(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: """Lookup documentation for Python symbols.""" await ctx.invoke(self.get_command, symbol=symbol) @docs_group.command(name='getdoc', aliases=('g',)) - async def get_command(self, ctx: commands.Context, *, symbol: str) -> None: + async def get_command(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: """ Return a documentation embed for a given symbol. -- cgit v1.2.3 From 40d831fb7b5ca7192fb1bdca8be9157f206eb2bc Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 14 Jul 2020 03:40:52 +0200 Subject: Change package name converter to only accept _a-z. Package names are now directly used for stats, where the lowercase a-z characters and _ are used. --- bot/cogs/doc.py | 6 +++--- bot/converters.py | 22 ++++++++++------------ 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 66c4b4ea8..09bddb02c 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -21,7 +21,7 @@ from urllib3.exceptions import ProtocolError from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput -from bot.converters import ValidPythonIdentifier, ValidURL +from bot.converters import PackageName, ValidURL from bot.decorators import with_role from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion @@ -492,7 +492,7 @@ class Doc(commands.Cog): @docs_group.command(name='setdoc', aliases=('s',)) @with_role(*MODERATION_ROLES) async def set_command( - self, ctx: commands.Context, package_name: ValidPythonIdentifier, + self, ctx: commands.Context, package_name: PackageName, base_url: ValidURL, inventory_url: InventoryURL ) -> None: """ @@ -525,7 +525,7 @@ class Doc(commands.Cog): @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) @with_role(*MODERATION_ROLES) - async def delete_command(self, ctx: commands.Context, package_name: ValidPythonIdentifier) -> None: + async def delete_command(self, ctx: commands.Context, package_name: PackageName) -> None: """ Removes the specified package from the database. diff --git a/bot/converters.py b/bot/converters.py index 72c46fdf0..fac94e9d0 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -34,22 +34,20 @@ def allowed_strings(*values, preserve_case: bool = False) -> t.Callable[[str], s return converter -class ValidPythonIdentifier(Converter): +class PackageName(Converter): """ - A converter that checks whether the given string is a valid Python identifier. + A converter that checks whether the given string is a valid package name. - This is used to have package names that correspond to how you would use the package in your - code, e.g. `import package`. - - Raises `BadArgument` if the argument is not a valid Python identifier, and simply passes through - the given argument otherwise. + Package names are used for stats and are restricted to the a-z and _ characters. """ - @staticmethod - async def convert(ctx: Context, argument: str) -> str: - """Checks whether the given string is a valid Python identifier.""" - if not argument.isidentifier(): - raise BadArgument(f"`{argument}` is not a valid Python identifier") + PACKAGE_NAME_RE = re.compile(r"[^a-z_]") + + @classmethod + async def convert(cls, ctx: Context, argument: str) -> str: + """Checks whether the given string is a valid package name.""" + if cls.PACKAGE_NAME_RE.search(argument): + raise BadArgument("The provided package name is not valid, please only use the _ and a-z characters.") return argument -- cgit v1.2.3 From 68805bb77d56f22854508f7912d00bdaab5daf5c Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 14 Jul 2020 03:49:18 +0200 Subject: Change docstrings to use suffixed command names. --- bot/cogs/doc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 09bddb02c..673a1156f 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -445,7 +445,7 @@ class Doc(commands.Cog): !docs !docs aiohttp !docs aiohttp.ClientSession - !docs get aiohttp.ClientSession + !docs getdoc aiohttp.ClientSession """ if not symbol: inventory_embed = discord.Embed( @@ -501,7 +501,7 @@ class Doc(commands.Cog): The database will update the object, should an existing item with the specified `package_name` already exist. Example: - !docs set \ + !docs setdoc \ python \ https://docs.python.org/3/ \ https://docs.python.org/3/objects.inv @@ -530,7 +530,7 @@ class Doc(commands.Cog): Removes the specified package from the database. Examples: - !docs delete aiohttp + !docs deletedoc aiohttp """ await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') -- cgit v1.2.3 From d1413409f3cbfaaec94060df5c0fea7827fe874b Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 14 Jul 2020 23:54:03 +0200 Subject: Rename inventories to doc_symbols. --- bot/cogs/doc.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py index 673a1156f..526747bf4 100644 --- a/bot/cogs/doc.py +++ b/bot/cogs/doc.py @@ -177,7 +177,7 @@ class Doc(commands.Cog): def __init__(self, bot: Bot): self.base_urls = {} self.bot = bot - self.inventories: Dict[str, DocItem] = {} + self.doc_symbols: Dict[str, DocItem] = {} self.renamed_symbols = set() self.bot.loop.create_task(self.init_refresh_inventory()) @@ -215,20 +215,20 @@ class Doc(commands.Cog): # to remove unnecessary memory consumption from them being unique objects group_name = sys.intern(group.split(":")[1]) - if symbol in self.inventories: - symbol_base_url = self.inventories[symbol].url.split("/", 3)[2] + if symbol in self.doc_symbols: + symbol_base_url = self.doc_symbols[symbol].url.split("/", 3)[2] if ( group_name in NO_OVERRIDE_GROUPS or any(package in symbol_base_url for package in NO_OVERRIDE_PACKAGES) ): symbol = f"{group_name}.{symbol}" - elif (overridden_symbol_group := self.inventories[symbol].group) in NO_OVERRIDE_GROUPS: + elif (overridden_symbol_group := self.doc_symbols[symbol].group) in NO_OVERRIDE_GROUPS: overridden_symbol = f"{overridden_symbol_group}.{symbol}" if overridden_symbol in self.renamed_symbols: overridden_symbol = f"{api_package_name}.{overridden_symbol}" - self.inventories[overridden_symbol] = self.inventories[symbol] + self.doc_symbols[overridden_symbol] = self.doc_symbols[symbol] self.renamed_symbols.add(overridden_symbol) # If renamed `symbol` already exists, add library name in front to differentiate between them. @@ -237,7 +237,7 @@ class Doc(commands.Cog): symbol = f"{api_package_name}.{symbol}" self.renamed_symbols.add(symbol) - self.inventories[symbol] = DocItem(api_package_name, absolute_doc_url, group_name) + self.doc_symbols[symbol] = DocItem(api_package_name, absolute_doc_url, group_name) log.trace(f"Fetched inventory for {api_package_name}.") @@ -245,11 +245,11 @@ class Doc(commands.Cog): """Refresh internal documentation inventory.""" log.debug("Refreshing documentation inventory...") - # Clear the old base URLS and inventories to ensure + # Clear the old base URLS and doc symbols to ensure # that we start from a fresh local dataset. # Also, reset the cache used for fetching documentation. self.base_urls.clear() - self.inventories.clear() + self.doc_symbols.clear() self.renamed_symbols.clear() async_cache.cache = OrderedDict() @@ -272,7 +272,7 @@ class Doc(commands.Cog): If the given symbol is a module, returns a tuple `(None, str)` else if the symbol could not be found, returns `None`. """ - symbol_info = self.inventories.get(symbol) + symbol_info = self.doc_symbols.get(symbol) if symbol_info is None: return None request_url, symbol_id = symbol_info.url.rsplit('#') @@ -307,7 +307,7 @@ class Doc(commands.Cog): if scraped_html is None: return None - symbol_obj = self.inventories[symbol] + symbol_obj = self.doc_symbols[symbol] self.bot.stats.incr(f"doc_fetches.{symbol_obj.package.lower()}") signatures = scraped_html[0] permalink = symbol_obj.url -- cgit v1.2.3 From daa46eccc6518e567777240d7b94f121c5eacf57 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 18 Jul 2020 15:52:25 +0200 Subject: Create a package for the Doc cog. --- bot/cogs/doc.py | 603 ----------------------------------------------- bot/cogs/doc/__init__.py | 7 + bot/cogs/doc/cog.py | 598 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 605 insertions(+), 603 deletions(-) delete mode 100644 bot/cogs/doc.py create mode 100644 bot/cogs/doc/__init__.py create mode 100644 bot/cogs/doc/cog.py diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py deleted file mode 100644 index 526747bf4..000000000 --- a/bot/cogs/doc.py +++ /dev/null @@ -1,603 +0,0 @@ -import asyncio -import functools -import logging -import re -import sys -import textwrap -from collections import OrderedDict -from contextlib import suppress -from types import SimpleNamespace -from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Union -from urllib.parse import urljoin - -import discord -from bs4 import BeautifulSoup -from bs4.element import PageElement, Tag -from discord.ext import commands -from markdownify import MarkdownConverter -from requests import ConnectTimeout, ConnectionError, HTTPError -from sphinx.ext import intersphinx -from urllib3.exceptions import ProtocolError - -from bot.bot import Bot -from bot.constants import MODERATION_ROLES, RedirectOutput -from bot.converters import PackageName, ValidURL -from bot.decorators import with_role -from bot.pagination import LinePaginator -from bot.utils.messages import wait_for_deletion - - -log = logging.getLogger(__name__) -logging.getLogger('urllib3').setLevel(logging.WARNING) - -# Since Intersphinx is intended to be used with Sphinx, -# we need to mock its configuration. -SPHINX_MOCK_APP = SimpleNamespace( - config=SimpleNamespace( - intersphinx_timeout=3, - tls_verify=True, - user_agent="python3:python-discord/bot:1.0.0" - ) -) - -NO_OVERRIDE_GROUPS = ( - "2to3fixer", - "token", - "label", - "pdbcommand", - "term", -) -NO_OVERRIDE_PACKAGES = ( - "python", -) - -SEARCH_END_TAG_ATTRS = ( - "data", - "function", - "class", - "exception", - "seealso", - "section", - "rubric", - "sphinxsidebar", -) -UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") -WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") - -FAILED_REQUEST_RETRY_AMOUNT = 3 -NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay - - -class DocItem(NamedTuple): - """Holds inventory symbol information.""" - - package: str - url: str - group: str - - -def async_cache(max_size: int = 128, arg_offset: int = 0) -> Callable: - """ - LRU cache implementation for coroutines. - - Once the cache exceeds the maximum size, keys are deleted in FIFO order. - - An offset may be optionally provided to be applied to the coroutine's arguments when creating the cache key. - """ - # Assign the cache to the function itself so we can clear it from outside. - async_cache.cache = OrderedDict() - - def decorator(function: Callable) -> Callable: - """Define the async_cache decorator.""" - @functools.wraps(function) - async def wrapper(*args) -> Any: - """Decorator wrapper for the caching logic.""" - key = ':'.join(args[arg_offset:]) - - value = async_cache.cache.get(key) - if value is None: - if len(async_cache.cache) > max_size: - async_cache.cache.popitem(last=False) - - async_cache.cache[key] = await function(*args) - return async_cache.cache[key] - return wrapper - return decorator - - -class DocMarkdownConverter(MarkdownConverter): - """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" - - def __init__(self, *, page_url: str, **options): - super().__init__(**options) - self.page_url = page_url - - def convert_code(self, el: PageElement, text: str) -> str: - """Undo `markdownify`s underscore escaping.""" - return f"`{text}`".replace('\\', '') - - def convert_pre(self, el: PageElement, text: str) -> str: - """Wrap any codeblocks in `py` for syntax highlighting.""" - code = ''.join(el.strings) - return f"```py\n{code}```" - - def convert_a(self, el: PageElement, text: str) -> str: - """Resolve relative URLs to `self.page_url`.""" - el["href"] = urljoin(self.page_url, el["href"]) - return super().convert_a(el, text) - - def convert_p(self, el: PageElement, text: str) -> str: - """Include only one newline instead of two when the parent is a li tag.""" - parent = el.parent - if parent is not None and parent.name == "li": - return f"{text}\n" - return super().convert_p(el, text) - - -def markdownify(html: str, *, url: str = "") -> str: - """Create a DocMarkdownConverter object from the input html.""" - return DocMarkdownConverter(bullets='•', page_url=url).convert(html) - - -class InventoryURL(commands.Converter): - """ - Represents an Intersphinx inventory URL. - - This converter checks whether intersphinx accepts the given inventory URL, and raises - `BadArgument` if that is not the case. - - Otherwise, it simply passes through the given URL. - """ - - @staticmethod - async def convert(ctx: commands.Context, url: str) -> str: - """Convert url to Intersphinx inventory URL.""" - await ctx.trigger_typing() - try: - intersphinx.fetch_inventory(SPHINX_MOCK_APP, '', url) - except AttributeError: - raise commands.BadArgument(f"Failed to fetch Intersphinx inventory from URL `{url}`.") - except ConnectionError: - if url.startswith('https'): - raise commands.BadArgument( - f"Cannot establish a connection to `{url}`. Does it support HTTPS?" - ) - raise commands.BadArgument(f"Cannot connect to host with URL `{url}`.") - except ValueError: - raise commands.BadArgument( - f"Failed to read Intersphinx inventory from URL `{url}`. " - "Are you sure that it's a valid inventory file?" - ) - return url - - -class Doc(commands.Cog): - """A set of commands for querying & displaying documentation.""" - - def __init__(self, bot: Bot): - self.base_urls = {} - self.bot = bot - self.doc_symbols: Dict[str, DocItem] = {} - self.renamed_symbols = set() - - self.bot.loop.create_task(self.init_refresh_inventory()) - - async def init_refresh_inventory(self) -> None: - """Refresh documentation inventory on cog initialization.""" - await self.bot.wait_until_guild_available() - await self.refresh_inventory() - - async def update_single( - self, api_package_name: str, base_url: str, inventory_url: str - ) -> None: - """ - Rebuild the inventory for a single package. - - Where: - * `package_name` is the package name to use, appears in the log - * `base_url` is the root documentation URL for the specified package, used to build - absolute paths that link to specific symbols - * `inventory_url` is the absolute URL to the intersphinx inventory, fetched by running - `intersphinx.fetch_inventory` in an executor on the bot's event loop - """ - self.base_urls[api_package_name] = base_url - - package = await self._fetch_inventory(inventory_url) - if not package: - return None - - for group, value in package.items(): - for symbol, (_package_name, _version, relative_doc_url, _) in value.items(): - if "/" in symbol: - continue # skip unreachable symbols with slashes - absolute_doc_url = base_url + relative_doc_url - # Intern the group names since they're reused in all the DocItems - # to remove unnecessary memory consumption from them being unique objects - group_name = sys.intern(group.split(":")[1]) - - if symbol in self.doc_symbols: - symbol_base_url = self.doc_symbols[symbol].url.split("/", 3)[2] - if ( - group_name in NO_OVERRIDE_GROUPS - or any(package in symbol_base_url for package in NO_OVERRIDE_PACKAGES) - ): - symbol = f"{group_name}.{symbol}" - - elif (overridden_symbol_group := self.doc_symbols[symbol].group) in NO_OVERRIDE_GROUPS: - overridden_symbol = f"{overridden_symbol_group}.{symbol}" - if overridden_symbol in self.renamed_symbols: - overridden_symbol = f"{api_package_name}.{overridden_symbol}" - - self.doc_symbols[overridden_symbol] = self.doc_symbols[symbol] - self.renamed_symbols.add(overridden_symbol) - - # If renamed `symbol` already exists, add library name in front to differentiate between them. - if symbol in self.renamed_symbols: - # Split `package_name` because of packages like Pillow that have spaces in them. - symbol = f"{api_package_name}.{symbol}" - self.renamed_symbols.add(symbol) - - self.doc_symbols[symbol] = DocItem(api_package_name, absolute_doc_url, group_name) - - log.trace(f"Fetched inventory for {api_package_name}.") - - async def refresh_inventory(self) -> None: - """Refresh internal documentation inventory.""" - log.debug("Refreshing documentation inventory...") - - # Clear the old base URLS and doc symbols to ensure - # that we start from a fresh local dataset. - # Also, reset the cache used for fetching documentation. - self.base_urls.clear() - self.doc_symbols.clear() - self.renamed_symbols.clear() - async_cache.cache = OrderedDict() - - # Run all coroutines concurrently - since each of them performs a HTTP - # request, this speeds up fetching the inventory data heavily. - coros = [ - self.update_single( - package["package"], package["base_url"], package["inventory_url"] - ) for package in await self.bot.api_client.get('bot/documentation-links') - ] - await asyncio.gather(*coros) - - async def get_symbol_html(self, symbol: str) -> Optional[Tuple[list, str]]: - """ - Given a Python symbol, return its signature and description. - - The first tuple element is the signature of the given symbol as a markup-free string, and - the second tuple element is the description of the given symbol with HTML markup included. - - If the given symbol is a module, returns a tuple `(None, str)` - else if the symbol could not be found, returns `None`. - """ - symbol_info = self.doc_symbols.get(symbol) - if symbol_info is None: - return None - request_url, symbol_id = symbol_info.url.rsplit('#') - - soup = await self._get_soup_from_url(request_url) - symbol_heading = soup.find(id=symbol_id) - search_html = str(soup) - - if symbol_heading is None: - return None - - if symbol_info.group == "module": - parsed_module = self.parse_module_symbol(symbol_heading) - if parsed_module is None: - return [], "" - else: - signatures, description = parsed_module - - else: - signatures, description = self.parse_symbol(symbol_heading, search_html) - - return signatures, description.replace('¶', '') - - @async_cache(arg_offset=1) - async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: - """ - Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents. - - If the symbol is known, an Embed with documentation about it is returned. - """ - scraped_html = await self.get_symbol_html(symbol) - if scraped_html is None: - return None - - symbol_obj = self.doc_symbols[symbol] - self.bot.stats.incr(f"doc_fetches.{symbol_obj.package.lower()}") - signatures = scraped_html[0] - permalink = symbol_obj.url - description = markdownify(scraped_html[1], url=permalink) - - # Truncate the description of the embed to the last occurrence - # of a double newline (interpreted as a paragraph) before index 1000. - if len(description) > 1000: - shortened = description[:1000] - description_cutoff = shortened.rfind('\n\n', 100) - if description_cutoff == -1: - # Search the shortened version for cutoff points in decreasing desirability, - # cutoff at 1000 if none are found. - for string in (". ", ", ", ",", " "): - description_cutoff = shortened.rfind(string) - if description_cutoff != -1: - break - else: - description_cutoff = 1000 - description = description[:description_cutoff] - - # If there is an incomplete code block, cut it out - if description.count("```") % 2: - codeblock_start = description.rfind('```py') - description = description[:codeblock_start].rstrip() - description += f"... [read more]({permalink})" - - description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description) - if signatures is None: - # If symbol is a module, don't show signature. - embed_description = description - - elif not signatures: - # It's some "meta-page", for example: - # https://docs.djangoproject.com/en/dev/ref/views/#module-django.views - embed_description = "This appears to be a generic page not tied to a specific symbol." - - else: - embed_description = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures) - embed_description += f"\n{description}" - - embed = discord.Embed( - title=discord.utils.escape_markdown(symbol), - url=permalink, - description=embed_description - ) - # Show all symbols with the same name that were renamed in the footer. - embed.set_footer( - text=", ".join(renamed for renamed in self.renamed_symbols - {symbol} if renamed.endswith(f".{symbol}")) - ) - return embed - - @classmethod - def parse_module_symbol(cls, heading: PageElement) -> Optional[Tuple[None, str]]: - """Get page content from the headerlink up to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.""" - start_tag = heading.find("a", attrs={"class": "headerlink"}) - if start_tag is None: - return None - - description = cls.find_all_children_until_tag(start_tag, cls._match_end_tag) - if description is None: - return None - - return None, description - - @classmethod - def parse_symbol(cls, heading: PageElement, html: str) -> Tuple[List[str], str]: - """ - Parse the signatures and description of a symbol. - - Collects up to 3 signatures from dt tags and a description from their sibling dd tag. - """ - signatures = [] - description_element = heading.find_next_sibling("dd") - description_pos = html.find(str(description_element)) - description = cls.find_all_children_until_tag(description_element, tag_filter=("dt", "dl")) - - for element in ( - *reversed(heading.find_previous_siblings("dt", limit=2)), - heading, - *heading.find_next_siblings("dt", limit=2), - )[-3:]: - signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) - - if signature and html.find(str(element)) < description_pos: - signatures.append(signature) - - return signatures, description - - @staticmethod - def find_all_children_until_tag( - start_element: PageElement, - tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]] - ) -> Optional[str]: - """ - Get all direct children until a child matching `tag_filter` is found. - - `tag_filter` can be either a tuple of string names to check against, - or a filtering callable that's applied to the tags. - """ - text = "" - - for element in start_element.find_next().find_next_siblings(): - if isinstance(tag_filter, tuple): - if element.name in tag_filter: - break - elif tag_filter(element): - break - text += str(element) - - return text - - @async_cache(arg_offset=1) - async def _get_soup_from_url(self, url: str) -> BeautifulSoup: - """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" - log.trace(f"Sending a request to {url}.") - async with self.bot.http_session.get(url) as response: - soup = BeautifulSoup(await response.text(encoding="utf8"), 'lxml') - soup.find("head").decompose() # the head contains no useful data so we can remove it - return soup - - @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) - async def docs_group(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: - """Lookup documentation for Python symbols.""" - await ctx.invoke(self.get_command, symbol=symbol) - - @docs_group.command(name='getdoc', aliases=('g',)) - async def get_command(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: - """ - Return a documentation embed for a given symbol. - - If no symbol is given, return a list of all available inventories. - - Examples: - !docs - !docs aiohttp - !docs aiohttp.ClientSession - !docs getdoc aiohttp.ClientSession - """ - if not symbol: - inventory_embed = discord.Embed( - title=f"All inventories (`{len(self.base_urls)}` total)", - colour=discord.Colour.blue() - ) - - lines = sorted(f"• [`{name}`]({url})" for name, url in self.base_urls.items()) - if self.base_urls: - await LinePaginator.paginate(lines, ctx, inventory_embed, max_size=400, empty=False) - - else: - inventory_embed.description = "Hmmm, seems like there's nothing here yet." - await ctx.send(embed=inventory_embed) - - else: - symbol = symbol.strip("`") - # Fetching documentation for a symbol (at least for the first time, since - # caching is used) takes quite some time, so let's send typing to indicate - # that we got the command, but are still working on it. - async with ctx.typing(): - doc_embed = await self.get_symbol_embed(symbol) - - if doc_embed is None: - symbol = await discord.ext.commands.clean_content().convert(ctx, symbol) - error_embed = discord.Embed( - description=f"Sorry, I could not find any documentation for `{(symbol)}`.", - colour=discord.Colour.red() - ) - error_message = await ctx.send(embed=error_embed) - await wait_for_deletion( - error_message, - (ctx.author.id,), - timeout=NOT_FOUND_DELETE_DELAY, - client=self.bot - ) - with suppress(discord.NotFound): - await ctx.message.delete() - with suppress(discord.NotFound): - await error_message.delete() - else: - await ctx.send(embed=doc_embed) - - @docs_group.command(name='setdoc', aliases=('s',)) - @with_role(*MODERATION_ROLES) - async def set_command( - self, ctx: commands.Context, package_name: PackageName, - base_url: ValidURL, inventory_url: InventoryURL - ) -> None: - """ - Adds a new documentation metadata object to the site's database. - - The database will update the object, should an existing item with the specified `package_name` already exist. - - Example: - !docs setdoc \ - python \ - https://docs.python.org/3/ \ - https://docs.python.org/3/objects.inv - """ - body = { - 'package': package_name, - 'base_url': base_url, - 'inventory_url': inventory_url - } - await self.bot.api_client.post('bot/documentation-links', json=body) - - log.info( - f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n" - f"Package name: {package_name}\n" - f"Base url: {base_url}\n" - f"Inventory URL: {inventory_url}" - ) - - await self.update_single(package_name, base_url, inventory_url) - await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") - - @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) - @with_role(*MODERATION_ROLES) - async def delete_command(self, ctx: commands.Context, package_name: PackageName) -> None: - """ - Removes the specified package from the database. - - Examples: - !docs deletedoc aiohttp - """ - await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') - - async with ctx.typing(): - # Rebuild the inventory to ensure that everything - # that was from this package is properly deleted. - await self.refresh_inventory() - await ctx.send(f"Successfully deleted `{package_name}` and refreshed inventory.") - - @docs_group.command(name="refreshdoc", aliases=("rfsh", "r")) - @with_role(*MODERATION_ROLES) - async def refresh_command(self, ctx: commands.Context) -> None: - """Refresh inventories and send differences to channel.""" - old_inventories = set(self.base_urls) - with ctx.typing(): - await self.refresh_inventory() - new_inventories = set(self.base_urls) - - if added := ", ".join(new_inventories - old_inventories): - added = "+ " + added - - if removed := ", ".join(old_inventories - new_inventories): - removed = "- " + removed - - embed = discord.Embed( - title="Inventories refreshed", - description=f"```diff\n{added}\n{removed}```" if added or removed else "" - ) - await ctx.send(embed=embed) - - async def _fetch_inventory(self, inventory_url: str) -> Optional[dict]: - """Get and return inventory from `inventory_url`. If fetching fails, return None.""" - fetch_func = functools.partial(intersphinx.fetch_inventory, SPHINX_MOCK_APP, '', inventory_url) - for retry in range(1, FAILED_REQUEST_RETRY_AMOUNT+1): - try: - package = await self.bot.loop.run_in_executor(None, fetch_func) - except ConnectTimeout: - log.error( - f"Fetching of inventory {inventory_url} timed out," - f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})" - ) - except ProtocolError: - log.error( - f"Connection lost while fetching inventory {inventory_url}," - f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})" - ) - except HTTPError as e: - log.error(f"Fetching of inventory {inventory_url} failed with status code {e.response.status_code}.") - return None - except ConnectionError: - log.error(f"Couldn't establish connection to inventory {inventory_url}.") - return None - else: - return package - log.error(f"Fetching of inventory {inventory_url} failed.") - return None - - @staticmethod - def _match_end_tag(tag: Tag) -> bool: - """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" - for attr in SEARCH_END_TAG_ATTRS: - if attr in tag.get("class", ()): - return True - - return tag.name == "table" - - -def setup(bot: Bot) -> None: - """Load the Doc cog.""" - bot.add_cog(Doc(bot)) diff --git a/bot/cogs/doc/__init__.py b/bot/cogs/doc/__init__.py new file mode 100644 index 000000000..19a71ee66 --- /dev/null +++ b/bot/cogs/doc/__init__.py @@ -0,0 +1,7 @@ +from bot.bot import Bot +from .cog import DocCog + + +def setup(bot: Bot) -> None: + """Load the Doc cog.""" + bot.add_cog(DocCog(bot)) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py new file mode 100644 index 000000000..463e4ebc6 --- /dev/null +++ b/bot/cogs/doc/cog.py @@ -0,0 +1,598 @@ +import asyncio +import functools +import logging +import re +import sys +import textwrap +from collections import OrderedDict +from contextlib import suppress +from types import SimpleNamespace +from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Union +from urllib.parse import urljoin + +import discord +from bs4 import BeautifulSoup +from bs4.element import PageElement, Tag +from discord.ext import commands +from markdownify import MarkdownConverter +from requests import ConnectTimeout, ConnectionError, HTTPError +from sphinx.ext import intersphinx +from urllib3.exceptions import ProtocolError + +from bot.bot import Bot +from bot.constants import MODERATION_ROLES, RedirectOutput +from bot.converters import PackageName, ValidURL +from bot.decorators import with_role +from bot.pagination import LinePaginator +from bot.utils.messages import wait_for_deletion + + +log = logging.getLogger(__name__) +logging.getLogger('urllib3').setLevel(logging.WARNING) + +# Since Intersphinx is intended to be used with Sphinx, +# we need to mock its configuration. +SPHINX_MOCK_APP = SimpleNamespace( + config=SimpleNamespace( + intersphinx_timeout=3, + tls_verify=True, + user_agent="python3:python-discord/bot:1.0.0" + ) +) + +NO_OVERRIDE_GROUPS = ( + "2to3fixer", + "token", + "label", + "pdbcommand", + "term", +) +NO_OVERRIDE_PACKAGES = ( + "python", +) + +SEARCH_END_TAG_ATTRS = ( + "data", + "function", + "class", + "exception", + "seealso", + "section", + "rubric", + "sphinxsidebar", +) +UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") +WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") + +FAILED_REQUEST_RETRY_AMOUNT = 3 +NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay + + +class DocItem(NamedTuple): + """Holds inventory symbol information.""" + + package: str + url: str + group: str + + +def async_cache(max_size: int = 128, arg_offset: int = 0) -> Callable: + """ + LRU cache implementation for coroutines. + + Once the cache exceeds the maximum size, keys are deleted in FIFO order. + + An offset may be optionally provided to be applied to the coroutine's arguments when creating the cache key. + """ + # Assign the cache to the function itself so we can clear it from outside. + async_cache.cache = OrderedDict() + + def decorator(function: Callable) -> Callable: + """Define the async_cache decorator.""" + @functools.wraps(function) + async def wrapper(*args) -> Any: + """Decorator wrapper for the caching logic.""" + key = ':'.join(args[arg_offset:]) + + value = async_cache.cache.get(key) + if value is None: + if len(async_cache.cache) > max_size: + async_cache.cache.popitem(last=False) + + async_cache.cache[key] = await function(*args) + return async_cache.cache[key] + return wrapper + return decorator + + +class DocMarkdownConverter(MarkdownConverter): + """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" + + def __init__(self, *, page_url: str, **options): + super().__init__(**options) + self.page_url = page_url + + def convert_code(self, el: PageElement, text: str) -> str: + """Undo `markdownify`s underscore escaping.""" + return f"`{text}`".replace('\\', '') + + def convert_pre(self, el: PageElement, text: str) -> str: + """Wrap any codeblocks in `py` for syntax highlighting.""" + code = ''.join(el.strings) + return f"```py\n{code}```" + + def convert_a(self, el: PageElement, text: str) -> str: + """Resolve relative URLs to `self.page_url`.""" + el["href"] = urljoin(self.page_url, el["href"]) + return super().convert_a(el, text) + + def convert_p(self, el: PageElement, text: str) -> str: + """Include only one newline instead of two when the parent is a li tag.""" + parent = el.parent + if parent is not None and parent.name == "li": + return f"{text}\n" + return super().convert_p(el, text) + + +def markdownify(html: str, *, url: str = "") -> str: + """Create a DocMarkdownConverter object from the input html.""" + return DocMarkdownConverter(bullets='•', page_url=url).convert(html) + + +class InventoryURL(commands.Converter): + """ + Represents an Intersphinx inventory URL. + + This converter checks whether intersphinx accepts the given inventory URL, and raises + `BadArgument` if that is not the case. + + Otherwise, it simply passes through the given URL. + """ + + @staticmethod + async def convert(ctx: commands.Context, url: str) -> str: + """Convert url to Intersphinx inventory URL.""" + await ctx.trigger_typing() + try: + intersphinx.fetch_inventory(SPHINX_MOCK_APP, '', url) + except AttributeError: + raise commands.BadArgument(f"Failed to fetch Intersphinx inventory from URL `{url}`.") + except ConnectionError: + if url.startswith('https'): + raise commands.BadArgument( + f"Cannot establish a connection to `{url}`. Does it support HTTPS?" + ) + raise commands.BadArgument(f"Cannot connect to host with URL `{url}`.") + except ValueError: + raise commands.BadArgument( + f"Failed to read Intersphinx inventory from URL `{url}`. " + "Are you sure that it's a valid inventory file?" + ) + return url + + +class DocCog(commands.Cog): + """A set of commands for querying & displaying documentation.""" + + def __init__(self, bot: Bot): + self.base_urls = {} + self.bot = bot + self.doc_symbols: Dict[str, DocItem] = {} + self.renamed_symbols = set() + + self.bot.loop.create_task(self.init_refresh_inventory()) + + async def init_refresh_inventory(self) -> None: + """Refresh documentation inventory on cog initialization.""" + await self.bot.wait_until_guild_available() + await self.refresh_inventory() + + async def update_single( + self, api_package_name: str, base_url: str, inventory_url: str + ) -> None: + """ + Rebuild the inventory for a single package. + + Where: + * `package_name` is the package name to use, appears in the log + * `base_url` is the root documentation URL for the specified package, used to build + absolute paths that link to specific symbols + * `inventory_url` is the absolute URL to the intersphinx inventory, fetched by running + `intersphinx.fetch_inventory` in an executor on the bot's event loop + """ + self.base_urls[api_package_name] = base_url + + package = await self._fetch_inventory(inventory_url) + if not package: + return None + + for group, value in package.items(): + for symbol, (_package_name, _version, relative_doc_url, _) in value.items(): + if "/" in symbol: + continue # skip unreachable symbols with slashes + absolute_doc_url = base_url + relative_doc_url + # Intern the group names since they're reused in all the DocItems + # to remove unnecessary memory consumption from them being unique objects + group_name = sys.intern(group.split(":")[1]) + + if symbol in self.doc_symbols: + symbol_base_url = self.doc_symbols[symbol].url.split("/", 3)[2] + if ( + group_name in NO_OVERRIDE_GROUPS + or any(package in symbol_base_url for package in NO_OVERRIDE_PACKAGES) + ): + symbol = f"{group_name}.{symbol}" + + elif (overridden_symbol_group := self.doc_symbols[symbol].group) in NO_OVERRIDE_GROUPS: + overridden_symbol = f"{overridden_symbol_group}.{symbol}" + if overridden_symbol in self.renamed_symbols: + overridden_symbol = f"{api_package_name}.{overridden_symbol}" + + self.doc_symbols[overridden_symbol] = self.doc_symbols[symbol] + self.renamed_symbols.add(overridden_symbol) + + # If renamed `symbol` already exists, add library name in front to differentiate between them. + if symbol in self.renamed_symbols: + # Split `package_name` because of packages like Pillow that have spaces in them. + symbol = f"{api_package_name}.{symbol}" + self.renamed_symbols.add(symbol) + + self.doc_symbols[symbol] = DocItem(api_package_name, absolute_doc_url, group_name) + + log.trace(f"Fetched inventory for {api_package_name}.") + + async def refresh_inventory(self) -> None: + """Refresh internal documentation inventory.""" + log.debug("Refreshing documentation inventory...") + + # Clear the old base URLS and doc symbols to ensure + # that we start from a fresh local dataset. + # Also, reset the cache used for fetching documentation. + self.base_urls.clear() + self.doc_symbols.clear() + self.renamed_symbols.clear() + async_cache.cache = OrderedDict() + + # Run all coroutines concurrently - since each of them performs a HTTP + # request, this speeds up fetching the inventory data heavily. + coros = [ + self.update_single( + package["package"], package["base_url"], package["inventory_url"] + ) for package in await self.bot.api_client.get('bot/documentation-links') + ] + await asyncio.gather(*coros) + + async def get_symbol_html(self, symbol: str) -> Optional[Tuple[list, str]]: + """ + Given a Python symbol, return its signature and description. + + The first tuple element is the signature of the given symbol as a markup-free string, and + the second tuple element is the description of the given symbol with HTML markup included. + + If the given symbol is a module, returns a tuple `(None, str)` + else if the symbol could not be found, returns `None`. + """ + symbol_info = self.doc_symbols.get(symbol) + if symbol_info is None: + return None + request_url, symbol_id = symbol_info.url.rsplit('#') + + soup = await self._get_soup_from_url(request_url) + symbol_heading = soup.find(id=symbol_id) + search_html = str(soup) + + if symbol_heading is None: + return None + + if symbol_info.group == "module": + parsed_module = self.parse_module_symbol(symbol_heading) + if parsed_module is None: + return [], "" + else: + signatures, description = parsed_module + + else: + signatures, description = self.parse_symbol(symbol_heading, search_html) + + return signatures, description.replace('¶', '') + + @async_cache(arg_offset=1) + async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: + """ + Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents. + + If the symbol is known, an Embed with documentation about it is returned. + """ + scraped_html = await self.get_symbol_html(symbol) + if scraped_html is None: + return None + + symbol_obj = self.doc_symbols[symbol] + self.bot.stats.incr(f"doc_fetches.{symbol_obj.package.lower()}") + signatures = scraped_html[0] + permalink = symbol_obj.url + description = markdownify(scraped_html[1], url=permalink) + + # Truncate the description of the embed to the last occurrence + # of a double newline (interpreted as a paragraph) before index 1000. + if len(description) > 1000: + shortened = description[:1000] + description_cutoff = shortened.rfind('\n\n', 100) + if description_cutoff == -1: + # Search the shortened version for cutoff points in decreasing desirability, + # cutoff at 1000 if none are found. + for string in (". ", ", ", ",", " "): + description_cutoff = shortened.rfind(string) + if description_cutoff != -1: + break + else: + description_cutoff = 1000 + description = description[:description_cutoff] + + # If there is an incomplete code block, cut it out + if description.count("```") % 2: + codeblock_start = description.rfind('```py') + description = description[:codeblock_start].rstrip() + description += f"... [read more]({permalink})" + + description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description) + if signatures is None: + # If symbol is a module, don't show signature. + embed_description = description + + elif not signatures: + # It's some "meta-page", for example: + # https://docs.djangoproject.com/en/dev/ref/views/#module-django.views + embed_description = "This appears to be a generic page not tied to a specific symbol." + + else: + embed_description = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures) + embed_description += f"\n{description}" + + embed = discord.Embed( + title=discord.utils.escape_markdown(symbol), + url=permalink, + description=embed_description + ) + # Show all symbols with the same name that were renamed in the footer. + embed.set_footer( + text=", ".join(renamed for renamed in self.renamed_symbols - {symbol} if renamed.endswith(f".{symbol}")) + ) + return embed + + @classmethod + def parse_module_symbol(cls, heading: PageElement) -> Optional[Tuple[None, str]]: + """Get page content from the headerlink up to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.""" + start_tag = heading.find("a", attrs={"class": "headerlink"}) + if start_tag is None: + return None + + description = cls.find_all_children_until_tag(start_tag, cls._match_end_tag) + if description is None: + return None + + return None, description + + @classmethod + def parse_symbol(cls, heading: PageElement, html: str) -> Tuple[List[str], str]: + """ + Parse the signatures and description of a symbol. + + Collects up to 3 signatures from dt tags and a description from their sibling dd tag. + """ + signatures = [] + description_element = heading.find_next_sibling("dd") + description_pos = html.find(str(description_element)) + description = cls.find_all_children_until_tag(description_element, tag_filter=("dt", "dl")) + + for element in ( + *reversed(heading.find_previous_siblings("dt", limit=2)), + heading, + *heading.find_next_siblings("dt", limit=2), + )[-3:]: + signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) + + if signature and html.find(str(element)) < description_pos: + signatures.append(signature) + + return signatures, description + + @staticmethod + def find_all_children_until_tag( + start_element: PageElement, + tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]] + ) -> Optional[str]: + """ + Get all direct children until a child matching `tag_filter` is found. + + `tag_filter` can be either a tuple of string names to check against, + or a filtering callable that's applied to the tags. + """ + text = "" + + for element in start_element.find_next().find_next_siblings(): + if isinstance(tag_filter, tuple): + if element.name in tag_filter: + break + elif tag_filter(element): + break + text += str(element) + + return text + + @async_cache(arg_offset=1) + async def _get_soup_from_url(self, url: str) -> BeautifulSoup: + """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" + log.trace(f"Sending a request to {url}.") + async with self.bot.http_session.get(url) as response: + soup = BeautifulSoup(await response.text(encoding="utf8"), 'lxml') + soup.find("head").decompose() # the head contains no useful data so we can remove it + return soup + + @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) + async def docs_group(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: + """Lookup documentation for Python symbols.""" + await ctx.invoke(self.get_command, symbol=symbol) + + @docs_group.command(name='getdoc', aliases=('g',)) + async def get_command(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: + """ + Return a documentation embed for a given symbol. + + If no symbol is given, return a list of all available inventories. + + Examples: + !docs + !docs aiohttp + !docs aiohttp.ClientSession + !docs getdoc aiohttp.ClientSession + """ + if not symbol: + inventory_embed = discord.Embed( + title=f"All inventories (`{len(self.base_urls)}` total)", + colour=discord.Colour.blue() + ) + + lines = sorted(f"• [`{name}`]({url})" for name, url in self.base_urls.items()) + if self.base_urls: + await LinePaginator.paginate(lines, ctx, inventory_embed, max_size=400, empty=False) + + else: + inventory_embed.description = "Hmmm, seems like there's nothing here yet." + await ctx.send(embed=inventory_embed) + + else: + symbol = symbol.strip("`") + # Fetching documentation for a symbol (at least for the first time, since + # caching is used) takes quite some time, so let's send typing to indicate + # that we got the command, but are still working on it. + async with ctx.typing(): + doc_embed = await self.get_symbol_embed(symbol) + + if doc_embed is None: + symbol = await discord.ext.commands.clean_content().convert(ctx, symbol) + error_embed = discord.Embed( + description=f"Sorry, I could not find any documentation for `{(symbol)}`.", + colour=discord.Colour.red() + ) + error_message = await ctx.send(embed=error_embed) + await wait_for_deletion( + error_message, + (ctx.author.id,), + timeout=NOT_FOUND_DELETE_DELAY, + client=self.bot + ) + with suppress(discord.NotFound): + await ctx.message.delete() + with suppress(discord.NotFound): + await error_message.delete() + else: + await ctx.send(embed=doc_embed) + + @docs_group.command(name='setdoc', aliases=('s',)) + @with_role(*MODERATION_ROLES) + async def set_command( + self, ctx: commands.Context, package_name: PackageName, + base_url: ValidURL, inventory_url: InventoryURL + ) -> None: + """ + Adds a new documentation metadata object to the site's database. + + The database will update the object, should an existing item with the specified `package_name` already exist. + + Example: + !docs setdoc \ + python \ + https://docs.python.org/3/ \ + https://docs.python.org/3/objects.inv + """ + body = { + 'package': package_name, + 'base_url': base_url, + 'inventory_url': inventory_url + } + await self.bot.api_client.post('bot/documentation-links', json=body) + + log.info( + f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n" + f"Package name: {package_name}\n" + f"Base url: {base_url}\n" + f"Inventory URL: {inventory_url}" + ) + + await self.update_single(package_name, base_url, inventory_url) + await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") + + @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) + @with_role(*MODERATION_ROLES) + async def delete_command(self, ctx: commands.Context, package_name: PackageName) -> None: + """ + Removes the specified package from the database. + + Examples: + !docs deletedoc aiohttp + """ + await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') + + async with ctx.typing(): + # Rebuild the inventory to ensure that everything + # that was from this package is properly deleted. + await self.refresh_inventory() + await ctx.send(f"Successfully deleted `{package_name}` and refreshed inventory.") + + @docs_group.command(name="refreshdoc", aliases=("rfsh", "r")) + @with_role(*MODERATION_ROLES) + async def refresh_command(self, ctx: commands.Context) -> None: + """Refresh inventories and send differences to channel.""" + old_inventories = set(self.base_urls) + with ctx.typing(): + await self.refresh_inventory() + new_inventories = set(self.base_urls) + + if added := ", ".join(new_inventories - old_inventories): + added = "+ " + added + + if removed := ", ".join(old_inventories - new_inventories): + removed = "- " + removed + + embed = discord.Embed( + title="Inventories refreshed", + description=f"```diff\n{added}\n{removed}```" if added or removed else "" + ) + await ctx.send(embed=embed) + + async def _fetch_inventory(self, inventory_url: str) -> Optional[dict]: + """Get and return inventory from `inventory_url`. If fetching fails, return None.""" + fetch_func = functools.partial(intersphinx.fetch_inventory, SPHINX_MOCK_APP, '', inventory_url) + for retry in range(1, FAILED_REQUEST_RETRY_AMOUNT+1): + try: + package = await self.bot.loop.run_in_executor(None, fetch_func) + except ConnectTimeout: + log.error( + f"Fetching of inventory {inventory_url} timed out," + f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})" + ) + except ProtocolError: + log.error( + f"Connection lost while fetching inventory {inventory_url}," + f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})" + ) + except HTTPError as e: + log.error(f"Fetching of inventory {inventory_url} failed with status code {e.response.status_code}.") + return None + except ConnectionError: + log.error(f"Couldn't establish connection to inventory {inventory_url}.") + return None + else: + return package + log.error(f"Fetching of inventory {inventory_url} failed.") + return None + + @staticmethod + def _match_end_tag(tag: Tag) -> bool: + """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" + for attr in SEARCH_END_TAG_ATTRS: + if attr in tag.get("class", ()): + return True + + return tag.name == "table" -- cgit v1.2.3 From c3bda11a10e3706d7e457f727e57e6a92f604d1e Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 18 Jul 2020 16:16:49 +0200 Subject: Move async_cache into a separate module --- bot/cogs/doc/cache.py | 32 ++++++++++++++++++++++++++++++++ bot/cogs/doc/cog.py | 33 ++------------------------------- 2 files changed, 34 insertions(+), 31 deletions(-) create mode 100644 bot/cogs/doc/cache.py diff --git a/bot/cogs/doc/cache.py b/bot/cogs/doc/cache.py new file mode 100644 index 000000000..9da2a1dab --- /dev/null +++ b/bot/cogs/doc/cache.py @@ -0,0 +1,32 @@ +import functools +from collections import OrderedDict +from typing import Any, Callable + + +def async_cache(max_size: int = 128, arg_offset: int = 0) -> Callable: + """ + LRU cache implementation for coroutines. + + Once the cache exceeds the maximum size, keys are deleted in FIFO order. + + An offset may be optionally provided to be applied to the coroutine's arguments when creating the cache key. + """ + # Assign the cache to the function itself so we can clear it from outside. + async_cache.cache = OrderedDict() + + def decorator(function: Callable) -> Callable: + """Define the async_cache decorator.""" + @functools.wraps(function) + async def wrapper(*args) -> Any: + """Decorator wrapper for the caching logic.""" + key = ':'.join(args[arg_offset:]) + + value = async_cache.cache.get(key) + if value is None: + if len(async_cache.cache) > max_size: + async_cache.cache.popitem(last=False) + + async_cache.cache[key] = await function(*args) + return async_cache.cache[key] + return wrapper + return decorator diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 463e4ebc6..2627951e8 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -7,7 +7,7 @@ import textwrap from collections import OrderedDict from contextlib import suppress from types import SimpleNamespace -from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Union +from typing import Callable, Dict, List, NamedTuple, Optional, Tuple, Union from urllib.parse import urljoin import discord @@ -25,7 +25,7 @@ from bot.converters import PackageName, ValidURL from bot.decorators import with_role from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion - +from .cache import async_cache log = logging.getLogger(__name__) logging.getLogger('urllib3').setLevel(logging.WARNING) @@ -76,35 +76,6 @@ class DocItem(NamedTuple): group: str -def async_cache(max_size: int = 128, arg_offset: int = 0) -> Callable: - """ - LRU cache implementation for coroutines. - - Once the cache exceeds the maximum size, keys are deleted in FIFO order. - - An offset may be optionally provided to be applied to the coroutine's arguments when creating the cache key. - """ - # Assign the cache to the function itself so we can clear it from outside. - async_cache.cache = OrderedDict() - - def decorator(function: Callable) -> Callable: - """Define the async_cache decorator.""" - @functools.wraps(function) - async def wrapper(*args) -> Any: - """Decorator wrapper for the caching logic.""" - key = ':'.join(args[arg_offset:]) - - value = async_cache.cache.get(key) - if value is None: - if len(async_cache.cache) > max_size: - async_cache.cache.popitem(last=False) - - async_cache.cache[key] = await function(*args) - return async_cache.cache[key] - return wrapper - return decorator - - class DocMarkdownConverter(MarkdownConverter): """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" -- cgit v1.2.3 From 53213ec69208370342498cdc417f3c90d35b8f3e Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 18 Jul 2020 16:37:19 +0200 Subject: Move main parsing methods into a new module --- bot/cogs/doc/cog.py | 102 +++---------------------------------------------- bot/cogs/doc/parser.py | 102 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 108 insertions(+), 96 deletions(-) create mode 100644 bot/cogs/doc/parser.py diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 2627951e8..4a275c7c6 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -7,12 +7,11 @@ import textwrap from collections import OrderedDict from contextlib import suppress from types import SimpleNamespace -from typing import Callable, Dict, List, NamedTuple, Optional, Tuple, Union +from typing import Dict, NamedTuple, Optional, Tuple from urllib.parse import urljoin import discord -from bs4 import BeautifulSoup -from bs4.element import PageElement, Tag +from bs4.element import PageElement from discord.ext import commands from markdownify import MarkdownConverter from requests import ConnectTimeout, ConnectionError, HTTPError @@ -26,6 +25,7 @@ from bot.decorators import with_role from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion from .cache import async_cache +from .parser import get_soup_from_url, parse_module_symbol, parse_symbol log = logging.getLogger(__name__) logging.getLogger('urllib3').setLevel(logging.WARNING) @@ -51,19 +51,7 @@ NO_OVERRIDE_PACKAGES = ( "python", ) -SEARCH_END_TAG_ATTRS = ( - "data", - "function", - "class", - "exception", - "seealso", - "section", - "rubric", - "sphinxsidebar", -) -UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") - FAILED_REQUEST_RETRY_AMOUNT = 3 NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay @@ -248,7 +236,7 @@ class DocCog(commands.Cog): return None request_url, symbol_id = symbol_info.url.rsplit('#') - soup = await self._get_soup_from_url(request_url) + soup = await get_soup_from_url(self.bot.http_session, request_url) symbol_heading = soup.find(id=symbol_id) search_html = str(soup) @@ -256,14 +244,14 @@ class DocCog(commands.Cog): return None if symbol_info.group == "module": - parsed_module = self.parse_module_symbol(symbol_heading) + parsed_module = parse_module_symbol(symbol_heading) if parsed_module is None: return [], "" else: signatures, description = parsed_module else: - signatures, description = self.parse_symbol(symbol_heading, search_html) + signatures, description = parse_symbol(symbol_heading, search_html) return signatures, description.replace('¶', '') @@ -331,75 +319,6 @@ class DocCog(commands.Cog): ) return embed - @classmethod - def parse_module_symbol(cls, heading: PageElement) -> Optional[Tuple[None, str]]: - """Get page content from the headerlink up to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.""" - start_tag = heading.find("a", attrs={"class": "headerlink"}) - if start_tag is None: - return None - - description = cls.find_all_children_until_tag(start_tag, cls._match_end_tag) - if description is None: - return None - - return None, description - - @classmethod - def parse_symbol(cls, heading: PageElement, html: str) -> Tuple[List[str], str]: - """ - Parse the signatures and description of a symbol. - - Collects up to 3 signatures from dt tags and a description from their sibling dd tag. - """ - signatures = [] - description_element = heading.find_next_sibling("dd") - description_pos = html.find(str(description_element)) - description = cls.find_all_children_until_tag(description_element, tag_filter=("dt", "dl")) - - for element in ( - *reversed(heading.find_previous_siblings("dt", limit=2)), - heading, - *heading.find_next_siblings("dt", limit=2), - )[-3:]: - signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) - - if signature and html.find(str(element)) < description_pos: - signatures.append(signature) - - return signatures, description - - @staticmethod - def find_all_children_until_tag( - start_element: PageElement, - tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]] - ) -> Optional[str]: - """ - Get all direct children until a child matching `tag_filter` is found. - - `tag_filter` can be either a tuple of string names to check against, - or a filtering callable that's applied to the tags. - """ - text = "" - - for element in start_element.find_next().find_next_siblings(): - if isinstance(tag_filter, tuple): - if element.name in tag_filter: - break - elif tag_filter(element): - break - text += str(element) - - return text - - @async_cache(arg_offset=1) - async def _get_soup_from_url(self, url: str) -> BeautifulSoup: - """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" - log.trace(f"Sending a request to {url}.") - async with self.bot.http_session.get(url) as response: - soup = BeautifulSoup(await response.text(encoding="utf8"), 'lxml') - soup.find("head").decompose() # the head contains no useful data so we can remove it - return soup - @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) async def docs_group(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: """Lookup documentation for Python symbols.""" @@ -558,12 +477,3 @@ class DocCog(commands.Cog): return package log.error(f"Fetching of inventory {inventory_url} failed.") return None - - @staticmethod - def _match_end_tag(tag: Tag) -> bool: - """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" - for attr in SEARCH_END_TAG_ATTRS: - if attr in tag.get("class", ()): - return True - - return tag.name == "table" diff --git a/bot/cogs/doc/parser.py b/bot/cogs/doc/parser.py new file mode 100644 index 000000000..67621591b --- /dev/null +++ b/bot/cogs/doc/parser.py @@ -0,0 +1,102 @@ +import logging +import re +from typing import Callable, List, Optional, Tuple, Union + +from aiohttp import ClientSession +from bs4 import BeautifulSoup +from bs4.element import PageElement, Tag + +from .cache import async_cache + +log = logging.getLogger(__name__) + +UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") +SEARCH_END_TAG_ATTRS = ( + "data", + "function", + "class", + "exception", + "seealso", + "section", + "rubric", + "sphinxsidebar", +) + + +def parse_module_symbol(heading: PageElement) -> Optional[Tuple[None, str]]: + """Get page content from the headerlink up to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.""" + start_tag = heading.find("a", attrs={"class": "headerlink"}) + if start_tag is None: + return None + + description = find_all_children_until_tag(start_tag, _match_end_tag) + if description is None: + return None + + return None, description + + +def parse_symbol(heading: PageElement, html: str) -> Tuple[List[str], str]: + """ + Parse the signatures and description of a symbol. + + Collects up to 3 signatures from dt tags and a description from their sibling dd tag. + """ + signatures = [] + description_element = heading.find_next_sibling("dd") + description_pos = html.find(str(description_element)) + description = find_all_children_until_tag(description_element, tag_filter=("dt", "dl")) + + for element in ( + *reversed(heading.find_previous_siblings("dt", limit=2)), + heading, + *heading.find_next_siblings("dt", limit=2), + )[-3:]: + signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) + + if signature and html.find(str(element)) < description_pos: + signatures.append(signature) + + return signatures, description + + +def find_all_children_until_tag( + start_element: PageElement, + tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]] +) -> Optional[str]: + """ + Get all direct children until a child matching `tag_filter` is found. + + `tag_filter` can be either a tuple of string names to check against, + or a filtering callable that's applied to the tags. + """ + text = "" + + for element in start_element.find_next().find_next_siblings(): + if isinstance(tag_filter, tuple): + if element.name in tag_filter: + break + elif tag_filter(element): + break + text += str(element) + + return text + + +@async_cache(arg_offset=1) +async def get_soup_from_url(http_session: ClientSession, url: str) -> BeautifulSoup: + """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" + log.trace(f"Sending a request to {url}.") + async with http_session.get(url) as response: + soup = BeautifulSoup(await response.text(encoding="utf8"), 'lxml') + soup.find("head").decompose() # the head contains no useful data so we can remove it + return soup + + +def _match_end_tag(tag: Tag) -> bool: + """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" + for attr in SEARCH_END_TAG_ATTRS: + if attr in tag.get("class", ()): + return True + + return tag.name == "table" -- cgit v1.2.3 From eb8361d7fa9d0eb0dd5982c6df0fd35b80d40ba6 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 19 Jul 2020 03:13:02 +0200 Subject: Move markdown truncation into parser module --- bot/cogs/doc/cog.py | 27 ++------------------------- bot/cogs/doc/parser.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 25 deletions(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 4a275c7c6..bd4e9d4d1 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -25,7 +25,7 @@ from bot.decorators import with_role from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion from .cache import async_cache -from .parser import get_soup_from_url, parse_module_symbol, parse_symbol +from .parser import get_soup_from_url, parse_module_symbol, parse_symbol, truncate_markdown log = logging.getLogger(__name__) logging.getLogger('urllib3').setLevel(logging.WARNING) @@ -270,30 +270,7 @@ class DocCog(commands.Cog): self.bot.stats.incr(f"doc_fetches.{symbol_obj.package.lower()}") signatures = scraped_html[0] permalink = symbol_obj.url - description = markdownify(scraped_html[1], url=permalink) - - # Truncate the description of the embed to the last occurrence - # of a double newline (interpreted as a paragraph) before index 1000. - if len(description) > 1000: - shortened = description[:1000] - description_cutoff = shortened.rfind('\n\n', 100) - if description_cutoff == -1: - # Search the shortened version for cutoff points in decreasing desirability, - # cutoff at 1000 if none are found. - for string in (". ", ", ", ",", " "): - description_cutoff = shortened.rfind(string) - if description_cutoff != -1: - break - else: - description_cutoff = 1000 - description = description[:description_cutoff] - - # If there is an incomplete code block, cut it out - if description.count("```") % 2: - codeblock_start = description.rfind('```py') - description = description[:codeblock_start].rstrip() - description += f"... [read more]({permalink})" - + description = truncate_markdown(markdownify(scraped_html[1], url=permalink), permalink, 1000) description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description) if signatures is None: # If symbol is a module, don't show signature. diff --git a/bot/cogs/doc/parser.py b/bot/cogs/doc/parser.py index 67621591b..010826a96 100644 --- a/bot/cogs/doc/parser.py +++ b/bot/cogs/doc/parser.py @@ -83,6 +83,35 @@ def find_all_children_until_tag( return text +def truncate_markdown(markdown: str, permalink: str, max_length: int) -> str: + """ + Truncate `markdown` to be at most `max_length` characters. + + The markdown string is searched for substrings to cut at, to keep its structure, + but if none are found the string is simply sliced. + """ + if len(markdown) > max_length: + shortened = markdown[:max_length] + description_cutoff = shortened.rfind('\n\n', 100) + if description_cutoff == -1: + # Search the shortened version for cutoff points in decreasing desirability, + # cutoff at 1000 if none are found. + for string in (". ", ", ", ",", " "): + description_cutoff = shortened.rfind(string) + if description_cutoff != -1: + break + else: + description_cutoff = max_length + markdown = markdown[:description_cutoff] + + # If there is an incomplete code block, cut it out + if markdown.count("```") % 2: + codeblock_start = markdown.rfind('```py') + markdown = markdown[:codeblock_start].rstrip() + markdown += f"... [read more]({permalink})" + return markdown + + @async_cache(arg_offset=1) async def get_soup_from_url(http_session: ClientSession, url: str) -> BeautifulSoup: """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" -- cgit v1.2.3 From 0f8b991fffce8b808bf25f1ad9ed710bb1ff4919 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 20 Jul 2020 02:24:19 +0200 Subject: Rename parser.py to parsing.py. Parser is a stdlib module name, a rename avoids shadowing it. --- bot/cogs/doc/cog.py | 2 +- bot/cogs/doc/parser.py | 131 ------------------------------------------------ bot/cogs/doc/parsing.py | 131 ++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 132 insertions(+), 132 deletions(-) delete mode 100644 bot/cogs/doc/parser.py create mode 100644 bot/cogs/doc/parsing.py diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index bd4e9d4d1..4e4f3b737 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -25,7 +25,7 @@ from bot.decorators import with_role from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion from .cache import async_cache -from .parser import get_soup_from_url, parse_module_symbol, parse_symbol, truncate_markdown +from .parsing import get_soup_from_url, parse_module_symbol, parse_symbol, truncate_markdown log = logging.getLogger(__name__) logging.getLogger('urllib3').setLevel(logging.WARNING) diff --git a/bot/cogs/doc/parser.py b/bot/cogs/doc/parser.py deleted file mode 100644 index 010826a96..000000000 --- a/bot/cogs/doc/parser.py +++ /dev/null @@ -1,131 +0,0 @@ -import logging -import re -from typing import Callable, List, Optional, Tuple, Union - -from aiohttp import ClientSession -from bs4 import BeautifulSoup -from bs4.element import PageElement, Tag - -from .cache import async_cache - -log = logging.getLogger(__name__) - -UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") -SEARCH_END_TAG_ATTRS = ( - "data", - "function", - "class", - "exception", - "seealso", - "section", - "rubric", - "sphinxsidebar", -) - - -def parse_module_symbol(heading: PageElement) -> Optional[Tuple[None, str]]: - """Get page content from the headerlink up to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.""" - start_tag = heading.find("a", attrs={"class": "headerlink"}) - if start_tag is None: - return None - - description = find_all_children_until_tag(start_tag, _match_end_tag) - if description is None: - return None - - return None, description - - -def parse_symbol(heading: PageElement, html: str) -> Tuple[List[str], str]: - """ - Parse the signatures and description of a symbol. - - Collects up to 3 signatures from dt tags and a description from their sibling dd tag. - """ - signatures = [] - description_element = heading.find_next_sibling("dd") - description_pos = html.find(str(description_element)) - description = find_all_children_until_tag(description_element, tag_filter=("dt", "dl")) - - for element in ( - *reversed(heading.find_previous_siblings("dt", limit=2)), - heading, - *heading.find_next_siblings("dt", limit=2), - )[-3:]: - signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) - - if signature and html.find(str(element)) < description_pos: - signatures.append(signature) - - return signatures, description - - -def find_all_children_until_tag( - start_element: PageElement, - tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]] -) -> Optional[str]: - """ - Get all direct children until a child matching `tag_filter` is found. - - `tag_filter` can be either a tuple of string names to check against, - or a filtering callable that's applied to the tags. - """ - text = "" - - for element in start_element.find_next().find_next_siblings(): - if isinstance(tag_filter, tuple): - if element.name in tag_filter: - break - elif tag_filter(element): - break - text += str(element) - - return text - - -def truncate_markdown(markdown: str, permalink: str, max_length: int) -> str: - """ - Truncate `markdown` to be at most `max_length` characters. - - The markdown string is searched for substrings to cut at, to keep its structure, - but if none are found the string is simply sliced. - """ - if len(markdown) > max_length: - shortened = markdown[:max_length] - description_cutoff = shortened.rfind('\n\n', 100) - if description_cutoff == -1: - # Search the shortened version for cutoff points in decreasing desirability, - # cutoff at 1000 if none are found. - for string in (". ", ", ", ",", " "): - description_cutoff = shortened.rfind(string) - if description_cutoff != -1: - break - else: - description_cutoff = max_length - markdown = markdown[:description_cutoff] - - # If there is an incomplete code block, cut it out - if markdown.count("```") % 2: - codeblock_start = markdown.rfind('```py') - markdown = markdown[:codeblock_start].rstrip() - markdown += f"... [read more]({permalink})" - return markdown - - -@async_cache(arg_offset=1) -async def get_soup_from_url(http_session: ClientSession, url: str) -> BeautifulSoup: - """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" - log.trace(f"Sending a request to {url}.") - async with http_session.get(url) as response: - soup = BeautifulSoup(await response.text(encoding="utf8"), 'lxml') - soup.find("head").decompose() # the head contains no useful data so we can remove it - return soup - - -def _match_end_tag(tag: Tag) -> bool: - """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" - for attr in SEARCH_END_TAG_ATTRS: - if attr in tag.get("class", ()): - return True - - return tag.name == "table" diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py new file mode 100644 index 000000000..010826a96 --- /dev/null +++ b/bot/cogs/doc/parsing.py @@ -0,0 +1,131 @@ +import logging +import re +from typing import Callable, List, Optional, Tuple, Union + +from aiohttp import ClientSession +from bs4 import BeautifulSoup +from bs4.element import PageElement, Tag + +from .cache import async_cache + +log = logging.getLogger(__name__) + +UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") +SEARCH_END_TAG_ATTRS = ( + "data", + "function", + "class", + "exception", + "seealso", + "section", + "rubric", + "sphinxsidebar", +) + + +def parse_module_symbol(heading: PageElement) -> Optional[Tuple[None, str]]: + """Get page content from the headerlink up to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.""" + start_tag = heading.find("a", attrs={"class": "headerlink"}) + if start_tag is None: + return None + + description = find_all_children_until_tag(start_tag, _match_end_tag) + if description is None: + return None + + return None, description + + +def parse_symbol(heading: PageElement, html: str) -> Tuple[List[str], str]: + """ + Parse the signatures and description of a symbol. + + Collects up to 3 signatures from dt tags and a description from their sibling dd tag. + """ + signatures = [] + description_element = heading.find_next_sibling("dd") + description_pos = html.find(str(description_element)) + description = find_all_children_until_tag(description_element, tag_filter=("dt", "dl")) + + for element in ( + *reversed(heading.find_previous_siblings("dt", limit=2)), + heading, + *heading.find_next_siblings("dt", limit=2), + )[-3:]: + signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) + + if signature and html.find(str(element)) < description_pos: + signatures.append(signature) + + return signatures, description + + +def find_all_children_until_tag( + start_element: PageElement, + tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]] +) -> Optional[str]: + """ + Get all direct children until a child matching `tag_filter` is found. + + `tag_filter` can be either a tuple of string names to check against, + or a filtering callable that's applied to the tags. + """ + text = "" + + for element in start_element.find_next().find_next_siblings(): + if isinstance(tag_filter, tuple): + if element.name in tag_filter: + break + elif tag_filter(element): + break + text += str(element) + + return text + + +def truncate_markdown(markdown: str, permalink: str, max_length: int) -> str: + """ + Truncate `markdown` to be at most `max_length` characters. + + The markdown string is searched for substrings to cut at, to keep its structure, + but if none are found the string is simply sliced. + """ + if len(markdown) > max_length: + shortened = markdown[:max_length] + description_cutoff = shortened.rfind('\n\n', 100) + if description_cutoff == -1: + # Search the shortened version for cutoff points in decreasing desirability, + # cutoff at 1000 if none are found. + for string in (". ", ", ", ",", " "): + description_cutoff = shortened.rfind(string) + if description_cutoff != -1: + break + else: + description_cutoff = max_length + markdown = markdown[:description_cutoff] + + # If there is an incomplete code block, cut it out + if markdown.count("```") % 2: + codeblock_start = markdown.rfind('```py') + markdown = markdown[:codeblock_start].rstrip() + markdown += f"... [read more]({permalink})" + return markdown + + +@async_cache(arg_offset=1) +async def get_soup_from_url(http_session: ClientSession, url: str) -> BeautifulSoup: + """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" + log.trace(f"Sending a request to {url}.") + async with http_session.get(url) as response: + soup = BeautifulSoup(await response.text(encoding="utf8"), 'lxml') + soup.find("head").decompose() # the head contains no useful data so we can remove it + return soup + + +def _match_end_tag(tag: Tag) -> bool: + """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" + for attr in SEARCH_END_TAG_ATTRS: + if attr in tag.get("class", ()): + return True + + return tag.name == "table" -- cgit v1.2.3 From 4560f0f89b52cfcb8b18abeb1efa707c334a86d4 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 20 Jul 2020 02:28:25 +0200 Subject: Remove permalink from truncated markdown. The permalink serves no functional purpose in the embed, as it is already included in the title. But it does add the complexity of passing in the url to the parser. --- bot/cogs/doc/cog.py | 2 +- bot/cogs/doc/parsing.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 4e4f3b737..36fbe9010 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -270,7 +270,7 @@ class DocCog(commands.Cog): self.bot.stats.incr(f"doc_fetches.{symbol_obj.package.lower()}") signatures = scraped_html[0] permalink = symbol_obj.url - description = truncate_markdown(markdownify(scraped_html[1], url=permalink), permalink, 1000) + description = truncate_markdown(markdownify(scraped_html[1], url=permalink), 1000) description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description) if signatures is None: # If symbol is a module, don't show signature. diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 010826a96..3b79e0a93 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -83,7 +83,7 @@ def find_all_children_until_tag( return text -def truncate_markdown(markdown: str, permalink: str, max_length: int) -> str: +def truncate_markdown(markdown: str, max_length: int) -> str: """ Truncate `markdown` to be at most `max_length` characters. @@ -108,7 +108,7 @@ def truncate_markdown(markdown: str, permalink: str, max_length: int) -> str: if markdown.count("```") % 2: codeblock_start = markdown.rfind('```py') markdown = markdown[:codeblock_start].rstrip() - markdown += f"... [read more]({permalink})" + markdown += "... read more" return markdown -- cgit v1.2.3 From cecd2c8e320a2a0ff0095cd1fa197552d43c6684 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 20 Jul 2020 02:31:56 +0200 Subject: Simplify cutoff text. "read more" seemed out of place with no permalink over it. --- bot/cogs/doc/parsing.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 3b79e0a93..994124e92 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -1,5 +1,6 @@ import logging import re +import string from typing import Callable, List, Optional, Tuple, Union from aiohttp import ClientSession @@ -96,8 +97,8 @@ def truncate_markdown(markdown: str, max_length: int) -> str: if description_cutoff == -1: # Search the shortened version for cutoff points in decreasing desirability, # cutoff at 1000 if none are found. - for string in (". ", ", ", ",", " "): - description_cutoff = shortened.rfind(string) + for cutoff_string in (". ", ", ", ",", " "): + description_cutoff = shortened.rfind(cutoff_string) if description_cutoff != -1: break else: @@ -108,7 +109,7 @@ def truncate_markdown(markdown: str, max_length: int) -> str: if markdown.count("```") % 2: codeblock_start = markdown.rfind('```py') markdown = markdown[:codeblock_start].rstrip() - markdown += "... read more" + markdown = markdown.rstrip(string.punctuation) + "..." return markdown -- cgit v1.2.3 From 2b24579b49ced873e05e375051bbbb4ec2855b12 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 20 Jul 2020 03:55:31 +0200 Subject: Add function for finding tags until a matching tag This will allow flexibility in the future when collecting tags for the description and signature of symbols. The base is a function which accepts a callable which is called and iterated over, but 3 names with a partial function that has the callable supplied are provided to keep the outside interface neater. --- bot/cogs/doc/parsing.py | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 994124e92..5e5a5be66 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -1,6 +1,7 @@ import logging import re import string +from functools import partial from typing import Callable, List, Optional, Tuple, Union from aiohttp import ClientSession @@ -24,6 +25,40 @@ SEARCH_END_TAG_ATTRS = ( ) +def find_elements_until_tag( + start_element: PageElement, + tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]], + *, + func: Callable, + limit: int = None, +) -> List[str]: + """ + Get all tags until a tag matching `tag_filter` is found. + + `tag_filter` can be either a tuple of string names to check against, + or a filtering t.Callable that's applied to the tags. + + `func` takes in a BeautifulSoup unbound method for finding multiple elements, such as `BeautifulSoup.find_all`. + That method is then iterated over and all tags until the matching tag are added to the return list as strings. + """ + elements = [] + + for element in func(start_element, limit=limit): + if isinstance(tag_filter, tuple): + if element.name in tag_filter: + break + elif tag_filter(element): + break + elements.append(str(element)) + + return elements + + +find_next_children_until_tag = partial(find_elements_until_tag, func=partial(BeautifulSoup.find_all, recursive=False)) +find_next_siblings_until_tag = partial(find_elements_until_tag, func=BeautifulSoup.find_next_siblings) +find_previous_siblings_until_tag = partial(find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) + + def parse_module_symbol(heading: PageElement) -> Optional[Tuple[None, str]]: """Get page content from the headerlink up to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.""" start_tag = heading.find("a", attrs={"class": "headerlink"}) -- cgit v1.2.3 From 9f78dbafc3bc532bbfb5ffa0ef110fdeb0c3e8a5 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 20 Jul 2020 03:57:27 +0200 Subject: Simplify module parsing method. Instead of returning None and multiple values, the method now only returns the string of the description. Previously the parsing returned None and quit when appropriate tags for shortening the description were not found, but the new implementation simply defaults to the provided start tag if a better alternative is not found. --- bot/cogs/doc/parsing.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 5e5a5be66..368feeb68 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -59,17 +59,18 @@ find_next_siblings_until_tag = partial(find_elements_until_tag, func=BeautifulSo find_previous_siblings_until_tag = partial(find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) -def parse_module_symbol(heading: PageElement) -> Optional[Tuple[None, str]]: - """Get page content from the headerlink up to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.""" - start_tag = heading.find("a", attrs={"class": "headerlink"}) - if start_tag is None: - return None +def get_module_description(start_element: PageElement) -> Optional[str]: + """ + Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. - description = find_all_children_until_tag(start_tag, _match_end_tag) - if description is None: - return None + A headerlink a tag is attempted to be found to skip repeating the module name in the description, + if it's found it's used as the tag to search from instead of the `start_element`. + """ + header = start_element.find("a", attrs={"class": "headerlink"}) + start_tag = header.parent if header is not None else start_element + description = "".join(str(tag) for tag in find_next_siblings_until_tag(start_tag, _match_end_tag)) - return None, description + return description def parse_symbol(heading: PageElement, html: str) -> Tuple[List[str], str]: -- cgit v1.2.3 From 082867253cd19c70516102a3d4972da6d501ff6f Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 20 Jul 2020 17:35:07 +0200 Subject: Create a function for collecting signatures. By getting the signatures without the description we get more flexibility of parsing different symbol groups and decouple the logic from the description which can be parsed directly with the new `find_elements_until_tag` based function. --- bot/cogs/doc/parsing.py | 46 ++++++++++------------------------------------ 1 file changed, 10 insertions(+), 36 deletions(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 368feeb68..5b60f1609 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -73,51 +73,25 @@ def get_module_description(start_element: PageElement) -> Optional[str]: return description -def parse_symbol(heading: PageElement, html: str) -> Tuple[List[str], str]: +def get_signatures(start_signature: PageElement) -> List[str]: """ - Parse the signatures and description of a symbol. + Collect up to 3 signatures from dt tags around the `start_signature` dt tag. - Collects up to 3 signatures from dt tags and a description from their sibling dd tag. + First the signatures under the `start_signature` are included; + if less than 2 are found, tags above the start signature are added to the result if any are present. """ signatures = [] - description_element = heading.find_next_sibling("dd") - description_pos = html.find(str(description_element)) - description = find_all_children_until_tag(description_element, tag_filter=("dt", "dl")) - for element in ( - *reversed(heading.find_previous_siblings("dt", limit=2)), - heading, - *heading.find_next_siblings("dt", limit=2), + *reversed(find_previous_siblings_until_tag(start_signature, ("dd",), limit=2)), + start_signature, + *find_next_siblings_until_tag(start_signature, ("dd",), limit=2), )[-3:]: - signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) + signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element) - if signature and html.find(str(element)) < description_pos: + if signature: signatures.append(signature) - return signatures, description - - -def find_all_children_until_tag( - start_element: PageElement, - tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]] -) -> Optional[str]: - """ - Get all direct children until a child matching `tag_filter` is found. - - `tag_filter` can be either a tuple of string names to check against, - or a filtering callable that's applied to the tags. - """ - text = "" - - for element in start_element.find_next().find_next_siblings(): - if isinstance(tag_filter, tuple): - if element.name in tag_filter: - break - elif tag_filter(element): - break - text += str(element) - - return text + return signatures def truncate_markdown(markdown: str, max_length: int) -> str: -- cgit v1.2.3 From caedfb0c16bc98eb94d723caff42dfe0799f8f17 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 01:38:00 +0200 Subject: Remove conversion to str when finding elements. The tags need to be processed down the line, which is not viable on strings. --- bot/cogs/doc/parsing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 5b60f1609..acf3a0804 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -31,7 +31,7 @@ def find_elements_until_tag( *, func: Callable, limit: int = None, -) -> List[str]: +) -> List[Tag]: """ Get all tags until a tag matching `tag_filter` is found. @@ -49,7 +49,7 @@ def find_elements_until_tag( break elif tag_filter(element): break - elements.append(str(element)) + elements.append(element) return elements -- cgit v1.2.3 From 1c997846f282f76d17700f0f16c0a0abb5c49a30 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 01:39:43 +0200 Subject: Fix handling of elements when fetching signatures. After the change to `find_elements_until_tag`, the text contentsneed to be extracted from the tags instead of passing them directly to re. --- bot/cogs/doc/parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index acf3a0804..725fe47cd 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -86,7 +86,7 @@ def get_signatures(start_signature: PageElement) -> List[str]: start_signature, *find_next_siblings_until_tag(start_signature, ("dd",), limit=2), )[-3:]: - signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element) + signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) if signature: signatures.append(signature) -- cgit v1.2.3 From e10def8a3d79dffd8cc53acd6b30fa43741d140c Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 02:03:31 +0200 Subject: Move DocMarkdownConverter to parsing. --- bot/cogs/doc/cog.py | 34 ---------------------------------- bot/cogs/doc/parsing.py | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 34 deletions(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 36fbe9010..a7dcd9020 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -64,40 +64,6 @@ class DocItem(NamedTuple): group: str -class DocMarkdownConverter(MarkdownConverter): - """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" - - def __init__(self, *, page_url: str, **options): - super().__init__(**options) - self.page_url = page_url - - def convert_code(self, el: PageElement, text: str) -> str: - """Undo `markdownify`s underscore escaping.""" - return f"`{text}`".replace('\\', '') - - def convert_pre(self, el: PageElement, text: str) -> str: - """Wrap any codeblocks in `py` for syntax highlighting.""" - code = ''.join(el.strings) - return f"```py\n{code}```" - - def convert_a(self, el: PageElement, text: str) -> str: - """Resolve relative URLs to `self.page_url`.""" - el["href"] = urljoin(self.page_url, el["href"]) - return super().convert_a(el, text) - - def convert_p(self, el: PageElement, text: str) -> str: - """Include only one newline instead of two when the parent is a li tag.""" - parent = el.parent - if parent is not None and parent.name == "li": - return f"{text}\n" - return super().convert_p(el, text) - - -def markdownify(html: str, *, url: str = "") -> str: - """Create a DocMarkdownConverter object from the input html.""" - return DocMarkdownConverter(bullets='•', page_url=url).convert(html) - - class InventoryURL(commands.Converter): """ Represents an Intersphinx inventory URL. diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 725fe47cd..8f6688bd2 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -25,6 +25,40 @@ SEARCH_END_TAG_ATTRS = ( ) +class DocMarkdownConverter(MarkdownConverter): + """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" + + def __init__(self, *, page_url: str, **options): + super().__init__(**options) + self.page_url = page_url + + def convert_code(self, el: PageElement, text: str) -> str: + """Undo `markdownify`s underscore escaping.""" + return f"`{text}`".replace('\\', '') + + def convert_pre(self, el: PageElement, text: str) -> str: + """Wrap any codeblocks in `py` for syntax highlighting.""" + code = ''.join(el.strings) + return f"```py\n{code}```" + + def convert_a(self, el: PageElement, text: str) -> str: + """Resolve relative URLs to `self.page_url`.""" + el["href"] = urljoin(self.page_url, el["href"]) + return super().convert_a(el, text) + + def convert_p(self, el: PageElement, text: str) -> str: + """Include only one newline instead of two when the parent is a li tag.""" + parent = el.parent + if parent is not None and parent.name == "li": + return f"{text}\n" + return super().convert_p(el, text) + + +def markdownify(html: str, *, url: str = "") -> str: + """Create a DocMarkdownConverter object from the input html.""" + return DocMarkdownConverter(bullets='•', page_url=url).convert(html) + + def find_elements_until_tag( start_element: PageElement, tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]], -- cgit v1.2.3 From 6795a7f05e3720f375a9195182b996a14d754ea0 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 02:06:50 +0200 Subject: Fix ordered list indices in markdown converter. markdownify relies on the parent tag's index method, which goes through all of its contents, if there is anything else in the contents apart from the li tags, those indices are then shifted. --- bot/cogs/doc/parsing.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 8f6688bd2..25001b83d 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -3,10 +3,12 @@ import re import string from functools import partial from typing import Callable, List, Optional, Tuple, Union +from urllib.parse import urljoin from aiohttp import ClientSession from bs4 import BeautifulSoup from bs4.element import PageElement, Tag +from markdownify import MarkdownConverter from .cache import async_cache @@ -32,6 +34,22 @@ class DocMarkdownConverter(MarkdownConverter): super().__init__(**options) self.page_url = page_url + def convert_li(self, el: PageElement, text: str) -> str: + """Fix markdownify's erroneous indexing in ol tags.""" + parent = el.parent + if parent is not None and parent.name == 'ol': + li_tags = parent.find_all("li") + bullet = '%s.' % (li_tags.index(el)+1) + else: + depth = -1 + while el: + if el.name == 'ul': + depth += 1 + el = el.parent + bullets = self.options['bullets'] + bullet = bullets[depth % len(bullets)] + return '%s %s\n' % (bullet, text or '') + def convert_code(self, el: PageElement, text: str) -> str: """Undo `markdownify`s underscore escaping.""" return f"`{text}`".replace('\\', '') -- cgit v1.2.3 From 4e9ffb210f6a8f0184ac97cb16703777cc1e0ca0 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 02:34:11 +0200 Subject: Create a function for getting the result markdown. --- bot/cogs/doc/parsing.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 25001b83d..8756e0694 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -1,6 +1,7 @@ import logging import re import string +import textwrap from functools import partial from typing import Callable, List, Optional, Tuple, Union from urllib.parse import urljoin @@ -15,6 +16,8 @@ from .cache import async_cache log = logging.getLogger(__name__) UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") +WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") + SEARCH_END_TAG_ATTRS = ( "data", "function", @@ -175,6 +178,24 @@ def truncate_markdown(markdown: str, max_length: int) -> str: return markdown +def _parse_into_markdown(signatures: Optional[List[str]], description: str, url: str) -> str: + """ + Create a markdown string with the signatures at the top, and the converted html description below them. + + The signatures are wrapped in python codeblocks, separated from the description by a newline. + The result string is truncated to be max 1000 symbols long. + """ + description = truncate_markdown(markdownify(description, url=url), 1000) + description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description) + if signatures is not None: + formatted_markdown = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures) + else: + formatted_markdown = "" + formatted_markdown += f"\n{description}" + + return formatted_markdown + + @async_cache(arg_offset=1) async def get_soup_from_url(http_session: ClientSession, url: str) -> BeautifulSoup: """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" -- cgit v1.2.3 From f562c4b4551caa8ed3710ac5e9841150cb8a2492 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 02:35:13 +0200 Subject: Create the parsing interface function. Other functions from the module are not intended to be used directly, with the interface of it being the added function which accepts the symbol and calls internals. All other names except imports and log had the underscore prefix added to accommodate this. --- bot/cogs/doc/parsing.py | 92 ++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 71 insertions(+), 21 deletions(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 8756e0694..a2c6564b3 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -3,7 +3,7 @@ import re import string import textwrap from functools import partial -from typing import Callable, List, Optional, Tuple, Union +from typing import Callable, List, Optional, TYPE_CHECKING, Tuple, Union from urllib.parse import urljoin from aiohttp import ClientSession @@ -12,13 +12,15 @@ from bs4.element import PageElement, Tag from markdownify import MarkdownConverter from .cache import async_cache +if TYPE_CHECKING: + from .cog import DocItem log = logging.getLogger(__name__) -UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") -WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") +_UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") +_WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") -SEARCH_END_TAG_ATTRS = ( +_SEARCH_END_TAG_ATTRS = ( "data", "function", "class", @@ -29,8 +31,17 @@ SEARCH_END_TAG_ATTRS = ( "sphinxsidebar", ) +_NO_SIGNATURE_GROUPS = { + "attribute", + "envvar", + "setting", + "tempaltefilter", + "templatetag", + "term", +} -class DocMarkdownConverter(MarkdownConverter): + +class _DocMarkdownConverter(MarkdownConverter): """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" def __init__(self, *, page_url: str, **options): @@ -75,12 +86,12 @@ class DocMarkdownConverter(MarkdownConverter): return super().convert_p(el, text) -def markdownify(html: str, *, url: str = "") -> str: +def _markdownify(html: str, *, url: str = "") -> str: """Create a DocMarkdownConverter object from the input html.""" - return DocMarkdownConverter(bullets='•', page_url=url).convert(html) + return _DocMarkdownConverter(bullets='•', page_url=url).convert(html) -def find_elements_until_tag( +def _find_elements_until_tag( start_element: PageElement, tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]], *, @@ -109,9 +120,9 @@ def find_elements_until_tag( return elements -find_next_children_until_tag = partial(find_elements_until_tag, func=partial(BeautifulSoup.find_all, recursive=False)) -find_next_siblings_until_tag = partial(find_elements_until_tag, func=BeautifulSoup.find_next_siblings) -find_previous_siblings_until_tag = partial(find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) +_find_next_children_until_tag = partial(_find_elements_until_tag, func=partial(BeautifulSoup.find_all, recursive=False)) +_find_next_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_next_siblings) +_find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) def get_module_description(start_element: PageElement) -> Optional[str]: @@ -123,12 +134,19 @@ def get_module_description(start_element: PageElement) -> Optional[str]: """ header = start_element.find("a", attrs={"class": "headerlink"}) start_tag = header.parent if header is not None else start_element - description = "".join(str(tag) for tag in find_next_siblings_until_tag(start_tag, _match_end_tag)) + description = "".join(str(tag) for tag in _find_next_siblings_until_tag(start_tag, _match_end_tag)) return description -def get_signatures(start_signature: PageElement) -> List[str]: +def _get_symbol_description(symbol: PageElement) -> str: + """Get the string contents of the next dd tag, up to a dt or a dl tag.""" + description_tag = symbol.find_next("dd") + description_contents = _find_next_children_until_tag(description_tag, ("dt", "dl")) + return "".join(str(tag) for tag in description_contents) + + +def _get_signatures(start_signature: PageElement) -> List[str]: """ Collect up to 3 signatures from dt tags around the `start_signature` dt tag. @@ -137,11 +155,11 @@ def get_signatures(start_signature: PageElement) -> List[str]: """ signatures = [] for element in ( - *reversed(find_previous_siblings_until_tag(start_signature, ("dd",), limit=2)), + *reversed(_find_previous_siblings_until_tag(start_signature, ("dd",), limit=2)), start_signature, - *find_next_siblings_until_tag(start_signature, ("dd",), limit=2), + *_find_next_siblings_until_tag(start_signature, ("dd",), limit=2), )[-3:]: - signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) + signature = _UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) if signature: signatures.append(signature) @@ -149,7 +167,7 @@ def get_signatures(start_signature: PageElement) -> List[str]: return signatures -def truncate_markdown(markdown: str, max_length: int) -> str: +def _truncate_markdown(markdown: str, max_length: int) -> str: """ Truncate `markdown` to be at most `max_length` characters. @@ -185,8 +203,8 @@ def _parse_into_markdown(signatures: Optional[List[str]], description: str, url: The signatures are wrapped in python codeblocks, separated from the description by a newline. The result string is truncated to be max 1000 symbols long. """ - description = truncate_markdown(markdownify(description, url=url), 1000) - description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description) + description = _truncate_markdown(_markdownify(description, url=url), 1000) + description = _WHITESPACE_AFTER_NEWLINES_RE.sub('', description) if signatures is not None: formatted_markdown = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures) else: @@ -197,7 +215,7 @@ def _parse_into_markdown(signatures: Optional[List[str]], description: str, url: @async_cache(arg_offset=1) -async def get_soup_from_url(http_session: ClientSession, url: str) -> BeautifulSoup: +async def _get_soup_from_url(http_session: ClientSession, url: str) -> BeautifulSoup: """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" log.trace(f"Sending a request to {url}.") async with http_session.get(url) as response: @@ -208,8 +226,40 @@ async def get_soup_from_url(http_session: ClientSession, url: str) -> BeautifulS def _match_end_tag(tag: Tag) -> bool: """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" - for attr in SEARCH_END_TAG_ATTRS: + for attr in _SEARCH_END_TAG_ATTRS: if attr in tag.get("class", ()): return True return tag.name == "table" + + +async def get_symbol_markdown(http_session: ClientSession, symbol_data: "DocItem") -> str: + """ + Return parsed markdown of the passed symbol, truncated to 1000 characters. + + A request through `http_session` is made to the url associated with `symbol_data` for the html contents; + the contents are then parsed depending on what group the symbol belongs to. + """ + if "#" in symbol_data.url: + request_url, symbol_id = symbol_data.url.rsplit('#') + else: + request_url = symbol_data.url + symbol_id = None + + soup = await _get_soup_from_url(http_session, request_url) + symbol_heading = soup.find(id=symbol_id) + + # Handle doc symbols as modules, because they either link to the page of a module, + # or don't contain any useful info to be parsed. + signature = None + if symbol_data.group in {"module", "doc"}: + description = get_module_description(symbol_heading) + + elif symbol_data.group in _NO_SIGNATURE_GROUPS: + description = _get_symbol_description(symbol_heading) + + else: + signature = _get_signatures(symbol_heading) + description = _get_symbol_description(symbol_heading) + + return _parse_into_markdown(signature, description, symbol_data.url) -- cgit v1.2.3 From 6f4731714aa9df086ec287f768556a4c4443b635 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 02:50:49 +0200 Subject: Change DocCog to use the new parsing module fully. The parsing module provides an interface for fetching the markdown from the symbol data provided to it. Because it's now fully done in an another module we can remove the needless parts from the cog. --- bot/cogs/doc/cog.py | 69 ++++++----------------------------------------------- 1 file changed, 7 insertions(+), 62 deletions(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index a7dcd9020..6cd066f1b 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -3,17 +3,13 @@ import functools import logging import re import sys -import textwrap from collections import OrderedDict from contextlib import suppress from types import SimpleNamespace -from typing import Dict, NamedTuple, Optional, Tuple -from urllib.parse import urljoin +from typing import Dict, NamedTuple, Optional import discord -from bs4.element import PageElement from discord.ext import commands -from markdownify import MarkdownConverter from requests import ConnectTimeout, ConnectionError, HTTPError from sphinx.ext import intersphinx from urllib3.exceptions import ProtocolError @@ -25,7 +21,7 @@ from bot.decorators import with_role from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion from .cache import async_cache -from .parsing import get_soup_from_url, parse_module_symbol, parse_symbol, truncate_markdown +from .parsing import get_symbol_markdown log = logging.getLogger(__name__) logging.getLogger('urllib3').setLevel(logging.WARNING) @@ -187,40 +183,6 @@ class DocCog(commands.Cog): ] await asyncio.gather(*coros) - async def get_symbol_html(self, symbol: str) -> Optional[Tuple[list, str]]: - """ - Given a Python symbol, return its signature and description. - - The first tuple element is the signature of the given symbol as a markup-free string, and - the second tuple element is the description of the given symbol with HTML markup included. - - If the given symbol is a module, returns a tuple `(None, str)` - else if the symbol could not be found, returns `None`. - """ - symbol_info = self.doc_symbols.get(symbol) - if symbol_info is None: - return None - request_url, symbol_id = symbol_info.url.rsplit('#') - - soup = await get_soup_from_url(self.bot.http_session, request_url) - symbol_heading = soup.find(id=symbol_id) - search_html = str(soup) - - if symbol_heading is None: - return None - - if symbol_info.group == "module": - parsed_module = parse_module_symbol(symbol_heading) - if parsed_module is None: - return [], "" - else: - signatures, description = parsed_module - - else: - signatures, description = parse_symbol(symbol_heading, search_html) - - return signatures, description.replace('¶', '') - @async_cache(arg_offset=1) async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: """ @@ -228,32 +190,15 @@ class DocCog(commands.Cog): If the symbol is known, an Embed with documentation about it is returned. """ - scraped_html = await self.get_symbol_html(symbol) - if scraped_html is None: + symbol_info = self.doc_symbols.get(symbol) + if symbol_info is None: return None - - symbol_obj = self.doc_symbols[symbol] - self.bot.stats.incr(f"doc_fetches.{symbol_obj.package.lower()}") - signatures = scraped_html[0] - permalink = symbol_obj.url - description = truncate_markdown(markdownify(scraped_html[1], url=permalink), 1000) - description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description) - if signatures is None: - # If symbol is a module, don't show signature. - embed_description = description - - elif not signatures: - # It's some "meta-page", for example: - # https://docs.djangoproject.com/en/dev/ref/views/#module-django.views - embed_description = "This appears to be a generic page not tied to a specific symbol." - - else: - embed_description = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures) - embed_description += f"\n{description}" + self.bot.stats.incr(f"doc_fetches.{symbol_info.package.lower()}") + embed_description = await get_symbol_markdown(self.bot.http_session, symbol_info) embed = discord.Embed( title=discord.utils.escape_markdown(symbol), - url=permalink, + url=symbol_info.url, description=embed_description ) # Show all symbols with the same name that were renamed in the footer. -- cgit v1.2.3 From e875142a0f937ab190208523ef17068e5988dca3 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 14:25:47 +0200 Subject: Remove caching from get_symbol_embed. The web request is already cached, and parsing doesn't much more time, but without moving the logic around the cache prevents the stat increase when a symbol is requested. --- bot/cogs/doc/cog.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 6cd066f1b..05cedcaaf 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -183,7 +183,6 @@ class DocCog(commands.Cog): ] await asyncio.gather(*coros) - @async_cache(arg_offset=1) async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: """ Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents. -- cgit v1.2.3 From 6731de62e3a3f5d188e73538a718d2b30cc2f442 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 14:28:07 +0200 Subject: Hold url parts in DocItem separately. This allows us to save up some memory by not creating unique strings with the base url repeated between them. --- bot/cogs/doc/cog.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 05cedcaaf..bd27dde01 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -55,10 +55,16 @@ NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay class DocItem(NamedTuple): """Holds inventory symbol information.""" + base_url: str + relative_url: str package: str - url: str group: str + @property + def url(self) -> str: + """Return the absolute url to the symbol.""" + return self.base_url + self.relative_url + class InventoryURL(commands.Converter): """ @@ -131,7 +137,6 @@ class DocCog(commands.Cog): for symbol, (_package_name, _version, relative_doc_url, _) in value.items(): if "/" in symbol: continue # skip unreachable symbols with slashes - absolute_doc_url = base_url + relative_doc_url # Intern the group names since they're reused in all the DocItems # to remove unnecessary memory consumption from them being unique objects group_name = sys.intern(group.split(":")[1]) @@ -158,7 +163,7 @@ class DocCog(commands.Cog): symbol = f"{api_package_name}.{symbol}" self.renamed_symbols.add(symbol) - self.doc_symbols[symbol] = DocItem(api_package_name, absolute_doc_url, group_name) + self.doc_symbols[symbol] = DocItem(base_url, relative_doc_url, api_package_name, group_name) log.trace(f"Fetched inventory for {api_package_name}.") -- cgit v1.2.3 From 6ca72a68a75a1e5f56cb6a6ebec5a5b533c77eff Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 14:52:04 +0200 Subject: Remove paragraph chars from descriptions --- bot/cogs/doc/parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index a2c6564b3..79f3bbf69 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -262,4 +262,4 @@ async def get_symbol_markdown(http_session: ClientSession, symbol_data: "DocItem signature = _get_signatures(symbol_heading) description = _get_symbol_description(symbol_heading) - return _parse_into_markdown(signature, description, symbol_data.url) + return _parse_into_markdown(signature, description.replace('¶', ''), symbol_data.url) -- cgit v1.2.3 From 9f4d602bfa02fce088aaed28ee598c116b655683 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 22 Jul 2020 16:20:48 +0200 Subject: Change ValidPythonIdentifier tests to PackageName. --- tests/bot/test_converters.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/tests/bot/test_converters.py b/tests/bot/test_converters.py index ca8cb6825..a3c071168 100644 --- a/tests/bot/test_converters.py +++ b/tests/bot/test_converters.py @@ -10,9 +10,9 @@ from bot.converters import ( Duration, HushDurationConverter, ISODateTime, + PackageName, TagContentConverter, TagNameConverter, - ValidPythonIdentifier, ) @@ -78,24 +78,23 @@ class ConverterTests(unittest.TestCase): with self.assertRaises(BadArgument, msg=exception_message): asyncio.run(TagNameConverter.convert(self.context, invalid_name)) - def test_valid_python_identifier_for_valid(self): - """ValidPythonIdentifier returns valid identifiers unchanged.""" - test_values = ('foo', 'lemon') + def test_package_name_for_valid(self): + """PackageName returns valid package names unchanged.""" + test_values = ('foo', 'le_mon') for name in test_values: with self.subTest(identifier=name): - conversion = asyncio.run(ValidPythonIdentifier.convert(self.context, name)) + conversion = asyncio.run(PackageName.convert(self.context, name)) self.assertEqual(name, conversion) - def test_valid_python_identifier_for_invalid(self): - """ValidPythonIdentifier raises the proper exception for invalid identifiers.""" - test_values = ('nested.stuff', '#####') + def test_package_name_for_invalid(self): + """PackageName raises the proper exception for invalid package names.""" + test_values = ('text_with_a_dot.', 'UpperCaseName', "num83r") for name in test_values: with self.subTest(identifier=name): - exception_message = f'`{name}` is not a valid Python identifier' - with self.assertRaises(BadArgument, msg=exception_message): - asyncio.run(ValidPythonIdentifier.convert(self.context, name)) + with self.assertRaises(BadArgument): + asyncio.run(PackageName.convert(self.context, name)) def test_duration_converter_for_valid(self): """Duration returns the correct `datetime` for valid duration strings.""" -- cgit v1.2.3 From 7e367ce4a5df3fbd768c6dce1acc39e786a376ea Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 25 Jul 2020 03:13:20 +0200 Subject: Ensure all renamed symbols are kept After the restructure behaviour change in d790c404ca3dba3843f351d6f42e766956aa73a1, the add to renamed_symbols was not readded and symbols that only passed the first check were being missed. --- bot/cogs/doc/cog.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index bd27dde01..e52ee95c1 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -148,6 +148,7 @@ class DocCog(commands.Cog): or any(package in symbol_base_url for package in NO_OVERRIDE_PACKAGES) ): symbol = f"{group_name}.{symbol}" + self.renamed_symbols.add(symbol) elif (overridden_symbol_group := self.doc_symbols[symbol].group) in NO_OVERRIDE_GROUPS: overridden_symbol = f"{overridden_symbol_group}.{symbol}" @@ -158,7 +159,7 @@ class DocCog(commands.Cog): self.renamed_symbols.add(overridden_symbol) # If renamed `symbol` already exists, add library name in front to differentiate between them. - if symbol in self.renamed_symbols: + elif symbol in self.renamed_symbols: # Split `package_name` because of packages like Pillow that have spaces in them. symbol = f"{api_package_name}.{symbol}" self.renamed_symbols.add(symbol) -- cgit v1.2.3 From 2cc7ec9e26b013b2967841372898f1f8954d8f8f Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 26 Jul 2020 15:06:35 +0200 Subject: Parse NavigableStrings in symbol descriptions. When a symbol, such as [term.numpy](https://matplotlib.org/3.1.1/glossary/index.html#term-numpy) had NavigableStrings as direct children, they were not included as bs4's SoupStrainer won't include both strings and tags in its filters. The implementation goes around the limitation by introducing a new optional flag, bypassing the default check which skips matching tags when the `text` argument is present. --- bot/cogs/doc/html.py | 33 +++++++++++++++++++++++++++++++++ bot/cogs/doc/parsing.py | 36 ++++++++++++++++++++++-------------- 2 files changed, 55 insertions(+), 14 deletions(-) create mode 100644 bot/cogs/doc/html.py diff --git a/bot/cogs/doc/html.py b/bot/cogs/doc/html.py new file mode 100644 index 000000000..bc705130d --- /dev/null +++ b/bot/cogs/doc/html.py @@ -0,0 +1,33 @@ +from collections.abc import Iterable +from typing import List, Union + +from bs4.element import NavigableString, PageElement, SoupStrainer, Tag + + +class Strainer(SoupStrainer): + """Subclass of SoupStrainer to allow matching of both `Tag`s and `NavigableString`s.""" + + def __init__(self, *, include_strings: bool, **kwargs): + self.include_strings = include_strings + super().__init__(**kwargs) + + markup_hint = Union[PageElement, List["markup_hint"]] + + def search(self, markup: markup_hint) -> Union[PageElement, str]: + """Extend default SoupStrainer behaviour to allow matching both `Tag`s` and `NavigableString`s.""" + if isinstance(markup, Iterable) and not isinstance(markup, (Tag, str)): + for element in markup: + if isinstance(element, NavigableString) and self.search(element): + return element + elif isinstance(markup, Tag): + # Also include tags while we're searching for strings and tags. + if self.include_strings or (not self.text or self.name or self.attrs): + return self.search_tag(markup) + + elif isinstance(markup, str): + # Let everything through the text filter if we're including strings and tags. + text_filter = None if not self.include_strings else True + if not self.name and not self.attrs and self._matches(markup, text_filter): + return markup + else: + raise Exception(f"I don't know how to match against a {markup.__class__}") diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 79f3bbf69..050c49447 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -8,10 +8,11 @@ from urllib.parse import urljoin from aiohttp import ClientSession from bs4 import BeautifulSoup -from bs4.element import PageElement, Tag +from bs4.element import NavigableString, PageElement, Tag from markdownify import MarkdownConverter from .cache import async_cache +from .html import Strainer if TYPE_CHECKING: from .cog import DocItem @@ -96,25 +97,30 @@ def _find_elements_until_tag( tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]], *, func: Callable, + include_strings: bool = False, limit: int = None, -) -> List[Tag]: +) -> List[Union[Tag, NavigableString]]: """ - Get all tags until a tag matching `tag_filter` is found. + Get all elements up to `limit` or until a tag matching `tag_filter` is found. `tag_filter` can be either a tuple of string names to check against, - or a filtering t.Callable that's applied to the tags. + or a filtering callable that's applied to tags. + + When `include_strings` is True, `NavigableString`s from the document will be included in the result along `Tag`s. `func` takes in a BeautifulSoup unbound method for finding multiple elements, such as `BeautifulSoup.find_all`. - That method is then iterated over and all tags until the matching tag are added to the return list as strings. + The method is then iterated over and all elements until the matching tag or the limit are added to the return list. """ + use_tuple_filter = isinstance(tag_filter, tuple) elements = [] - for element in func(start_element, limit=limit): - if isinstance(tag_filter, tuple): - if element.name in tag_filter: + for element in func(start_element, name=Strainer(include_strings=include_strings), limit=limit): + if isinstance(element, Tag): + if use_tuple_filter: + if element.name in tag_filter: + break + elif tag_filter(element): break - elif tag_filter(element): - break elements.append(element) return elements @@ -125,7 +131,7 @@ _find_next_siblings_until_tag = partial(_find_elements_until_tag, func=Beautiful _find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) -def get_module_description(start_element: PageElement) -> Optional[str]: +def _get_module_description(start_element: PageElement) -> Optional[str]: """ Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. @@ -134,7 +140,9 @@ def get_module_description(start_element: PageElement) -> Optional[str]: """ header = start_element.find("a", attrs={"class": "headerlink"}) start_tag = header.parent if header is not None else start_element - description = "".join(str(tag) for tag in _find_next_siblings_until_tag(start_tag, _match_end_tag)) + description = "".join( + str(tag) for tag in _find_next_siblings_until_tag(start_tag, _match_end_tag, include_strings=True) + ) return description @@ -142,7 +150,7 @@ def get_module_description(start_element: PageElement) -> Optional[str]: def _get_symbol_description(symbol: PageElement) -> str: """Get the string contents of the next dd tag, up to a dt or a dl tag.""" description_tag = symbol.find_next("dd") - description_contents = _find_next_children_until_tag(description_tag, ("dt", "dl")) + description_contents = _find_next_children_until_tag(description_tag, ("dt", "dl"), include_strings=True) return "".join(str(tag) for tag in description_contents) @@ -253,7 +261,7 @@ async def get_symbol_markdown(http_session: ClientSession, symbol_data: "DocItem # or don't contain any useful info to be parsed. signature = None if symbol_data.group in {"module", "doc"}: - description = get_module_description(symbol_heading) + description = _get_module_description(symbol_heading) elif symbol_data.group in _NO_SIGNATURE_GROUPS: description = _get_symbol_description(symbol_heading) -- cgit v1.2.3 From 6ea6f732e719f93f88588f1d6c435262261e2650 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 26 Jul 2020 15:09:53 +0200 Subject: Fix markdownify's handling of h tags. Discord only allows `**` for bolding while the markdown from the default MarkdownConverter tries to use # time n with h*n* tags for different font weights. --- bot/cogs/doc/parsing.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 050c49447..ac8a94e3f 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -65,6 +65,10 @@ class _DocMarkdownConverter(MarkdownConverter): bullet = bullets[depth % len(bullets)] return '%s %s\n' % (bullet, text or '') + def convert_hn(self, _n: int, el: PageElement, text: str) -> str: + """Convert h tags to bold text with ** instead of adding #.""" + return f"**{text}**\n\n" + def convert_code(self, el: PageElement, text: str) -> str: """Undo `markdownify`s underscore escaping.""" return f"`{text}`".replace('\\', '') -- cgit v1.2.3 From 13030b8c54dd2ed37047349c5b09e4ded2c83391 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 26 Jul 2020 15:11:45 +0200 Subject: Move MarkdownConverter subclass to separate module --- bot/cogs/doc/markdown.py | 58 +++++++++++++++++++++++++++++++++++++++++++++++ bot/cogs/doc/parsing.py | 59 ++---------------------------------------------- 2 files changed, 60 insertions(+), 57 deletions(-) create mode 100644 bot/cogs/doc/markdown.py diff --git a/bot/cogs/doc/markdown.py b/bot/cogs/doc/markdown.py new file mode 100644 index 000000000..dca477d35 --- /dev/null +++ b/bot/cogs/doc/markdown.py @@ -0,0 +1,58 @@ +from urllib.parse import urljoin + +from bs4.element import PageElement +from markdownify import MarkdownConverter + + +class _DocMarkdownConverter(MarkdownConverter): + """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" + + def __init__(self, *, page_url: str, **options): + super().__init__(**options) + self.page_url = page_url + + def convert_li(self, el: PageElement, text: str) -> str: + """Fix markdownify's erroneous indexing in ol tags.""" + parent = el.parent + if parent is not None and parent.name == 'ol': + li_tags = parent.find_all("li") + bullet = '%s.' % (li_tags.index(el)+1) + else: + depth = -1 + while el: + if el.name == 'ul': + depth += 1 + el = el.parent + bullets = self.options['bullets'] + bullet = bullets[depth % len(bullets)] + return '%s %s\n' % (bullet, text or '') + + def convert_hn(self, _n: int, el: PageElement, text: str) -> str: + """Convert h tags to bold text with ** instead of adding #.""" + return f"**{text}**\n\n" + + def convert_code(self, el: PageElement, text: str) -> str: + """Undo `markdownify`s underscore escaping.""" + return f"`{text}`".replace('\\', '') + + def convert_pre(self, el: PageElement, text: str) -> str: + """Wrap any codeblocks in `py` for syntax highlighting.""" + code = ''.join(el.strings) + return f"```py\n{code}```" + + def convert_a(self, el: PageElement, text: str) -> str: + """Resolve relative URLs to `self.page_url`.""" + el["href"] = urljoin(self.page_url, el["href"]) + return super().convert_a(el, text) + + def convert_p(self, el: PageElement, text: str) -> str: + """Include only one newline instead of two when the parent is a li tag.""" + parent = el.parent + if parent is not None and parent.name == "li": + return f"{text}\n" + return super().convert_p(el, text) + + +def markdownify(html: str, *, url: str = "") -> str: + """Create a DocMarkdownConverter object from the input html.""" + return _DocMarkdownConverter(bullets='•', page_url=url).convert(html) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index ac8a94e3f..93daf3faf 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -4,15 +4,14 @@ import string import textwrap from functools import partial from typing import Callable, List, Optional, TYPE_CHECKING, Tuple, Union -from urllib.parse import urljoin from aiohttp import ClientSession from bs4 import BeautifulSoup from bs4.element import NavigableString, PageElement, Tag -from markdownify import MarkdownConverter from .cache import async_cache from .html import Strainer +from .markdown import markdownify if TYPE_CHECKING: from .cog import DocItem @@ -42,60 +41,6 @@ _NO_SIGNATURE_GROUPS = { } -class _DocMarkdownConverter(MarkdownConverter): - """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" - - def __init__(self, *, page_url: str, **options): - super().__init__(**options) - self.page_url = page_url - - def convert_li(self, el: PageElement, text: str) -> str: - """Fix markdownify's erroneous indexing in ol tags.""" - parent = el.parent - if parent is not None and parent.name == 'ol': - li_tags = parent.find_all("li") - bullet = '%s.' % (li_tags.index(el)+1) - else: - depth = -1 - while el: - if el.name == 'ul': - depth += 1 - el = el.parent - bullets = self.options['bullets'] - bullet = bullets[depth % len(bullets)] - return '%s %s\n' % (bullet, text or '') - - def convert_hn(self, _n: int, el: PageElement, text: str) -> str: - """Convert h tags to bold text with ** instead of adding #.""" - return f"**{text}**\n\n" - - def convert_code(self, el: PageElement, text: str) -> str: - """Undo `markdownify`s underscore escaping.""" - return f"`{text}`".replace('\\', '') - - def convert_pre(self, el: PageElement, text: str) -> str: - """Wrap any codeblocks in `py` for syntax highlighting.""" - code = ''.join(el.strings) - return f"```py\n{code}```" - - def convert_a(self, el: PageElement, text: str) -> str: - """Resolve relative URLs to `self.page_url`.""" - el["href"] = urljoin(self.page_url, el["href"]) - return super().convert_a(el, text) - - def convert_p(self, el: PageElement, text: str) -> str: - """Include only one newline instead of two when the parent is a li tag.""" - parent = el.parent - if parent is not None and parent.name == "li": - return f"{text}\n" - return super().convert_p(el, text) - - -def _markdownify(html: str, *, url: str = "") -> str: - """Create a DocMarkdownConverter object from the input html.""" - return _DocMarkdownConverter(bullets='•', page_url=url).convert(html) - - def _find_elements_until_tag( start_element: PageElement, tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]], @@ -215,7 +160,7 @@ def _parse_into_markdown(signatures: Optional[List[str]], description: str, url: The signatures are wrapped in python codeblocks, separated from the description by a newline. The result string is truncated to be max 1000 symbols long. """ - description = _truncate_markdown(_markdownify(description, url=url), 1000) + description = _truncate_markdown(markdownify(description, url=url), 1000) description = _WHITESPACE_AFTER_NEWLINES_RE.sub('', description) if signatures is not None: formatted_markdown = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures) -- cgit v1.2.3 From 994b828254cc8e40a52cf604910d5aa3eba2293d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 26 Jul 2020 15:21:40 +0200 Subject: Add more logging --- bot/cogs/doc/parsing.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 93daf3faf..2ea21ed98 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -197,6 +197,7 @@ async def get_symbol_markdown(http_session: ClientSession, symbol_data: "DocItem A request through `http_session` is made to the url associated with `symbol_data` for the html contents; the contents are then parsed depending on what group the symbol belongs to. """ + log.trace(f"Parsing symbol from url {symbol_data.url}.") if "#" in symbol_data.url: request_url, symbol_id = symbol_data.url.rsplit('#') else: @@ -210,12 +211,15 @@ async def get_symbol_markdown(http_session: ClientSession, symbol_data: "DocItem # or don't contain any useful info to be parsed. signature = None if symbol_data.group in {"module", "doc"}: + log.trace("Symbol is a module or doc, parsing as module.") description = _get_module_description(symbol_heading) elif symbol_data.group in _NO_SIGNATURE_GROUPS: + log.trace("Symbol's group is in the group signature blacklist, skipping parsing of signature.") description = _get_symbol_description(symbol_heading) else: + log.trace("Parsing both signature and description of symbol.") signature = _get_signatures(symbol_heading) description = _get_symbol_description(symbol_heading) -- cgit v1.2.3 From 83989d28fb83801acdea4b6f51cf48e974e21891 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 26 Jul 2020 15:29:09 +0200 Subject: Rename description functions to be more general --- bot/cogs/doc/parsing.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 2ea21ed98..96bb1dfb4 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -80,14 +80,14 @@ _find_next_siblings_until_tag = partial(_find_elements_until_tag, func=Beautiful _find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) -def _get_module_description(start_element: PageElement) -> Optional[str]: +def _get_general_description(start_element: PageElement) -> Optional[str]: """ Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. - A headerlink a tag is attempted to be found to skip repeating the module name in the description, - if it's found it's used as the tag to search from instead of the `start_element`. + A headerlink a tag is attempted to be found to skip repeating the symbol information in the description, + if it's found it's used as the tag to start the search from instead of the `start_element`. """ - header = start_element.find("a", attrs={"class": "headerlink"}) + header = start_element.find_next("a", attrs={"class": "headerlink"}) start_tag = header.parent if header is not None else start_element description = "".join( str(tag) for tag in _find_next_siblings_until_tag(start_tag, _match_end_tag, include_strings=True) @@ -96,7 +96,7 @@ def _get_module_description(start_element: PageElement) -> Optional[str]: return description -def _get_symbol_description(symbol: PageElement) -> str: +def _get_dd_description(symbol: PageElement) -> str: """Get the string contents of the next dd tag, up to a dt or a dl tag.""" description_tag = symbol.find_next("dd") description_contents = _find_next_children_until_tag(description_tag, ("dt", "dl"), include_strings=True) @@ -212,15 +212,15 @@ async def get_symbol_markdown(http_session: ClientSession, symbol_data: "DocItem signature = None if symbol_data.group in {"module", "doc"}: log.trace("Symbol is a module or doc, parsing as module.") - description = _get_module_description(symbol_heading) + description = _get_general_description(symbol_heading) elif symbol_data.group in _NO_SIGNATURE_GROUPS: log.trace("Symbol's group is in the group signature blacklist, skipping parsing of signature.") - description = _get_symbol_description(symbol_heading) + description = _get_dd_description(symbol_heading) else: log.trace("Parsing both signature and description of symbol.") signature = _get_signatures(symbol_heading) - description = _get_symbol_description(symbol_heading) + description = _get_dd_description(symbol_heading) return _parse_into_markdown(signature, description.replace('¶', ''), symbol_data.url) -- cgit v1.2.3 From 5290fcf0fff23e4979746c51b77be9a51fe82ae7 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 26 Jul 2020 15:51:34 +0200 Subject: Properly parse labels add fallback for non dt tags Labels point to tags that aren't in description lists, like modules or doc symbols which we already handle. If by chance we get a symbol that we don't have in the group for general parsing and which isn't a dt tag, log it and don't attempt to parse signature and use general description parsing instead of parsing a dd tag. --- bot/cogs/doc/parsing.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 96bb1dfb4..1271953d4 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -206,12 +206,20 @@ async def get_symbol_markdown(http_session: ClientSession, symbol_data: "DocItem soup = await _get_soup_from_url(http_session, request_url) symbol_heading = soup.find(id=symbol_id) - - # Handle doc symbols as modules, because they either link to the page of a module, - # or don't contain any useful info to be parsed. signature = None - if symbol_data.group in {"module", "doc"}: - log.trace("Symbol is a module or doc, parsing as module.") + # Modules, doc pages and labels don't point to description list tags but to tags like divs, + # no special parsing can be done so we only try to include what's under them. + if symbol_data.group in {"module", "doc", "label"}: + log.trace("Symbol is a module, doc or a label; using general description parsing.") + description = _get_general_description(symbol_heading) + + elif symbol_heading.name != "dt": + # Use the general parsing for symbols that aren't modules, docs or labels and aren't dt tags, + # log info the tag can be looked at. + log.info( + f"Symbol heading at url {symbol_data.url} was not a dt tag or from known groups that lack it," + f"handling as general description." + ) description = _get_general_description(symbol_heading) elif symbol_data.group in _NO_SIGNATURE_GROUPS: -- cgit v1.2.3 From b759a940a097effd16b761e0c62231ae0ca9562b Mon Sep 17 00:00:00 2001 From: dolphingarlic Date: Thu, 30 Jul 2020 20:13:15 +0200 Subject: Cleaned the code for CodeSnippets --- bot/__main__.py | 2 +- bot/cogs/code_snippets.py | 216 +++++++++++++++++++++++++++++++++++++++++++++ bot/cogs/print_snippets.py | 190 --------------------------------------- 3 files changed, 217 insertions(+), 191 deletions(-) create mode 100644 bot/cogs/code_snippets.py delete mode 100644 bot/cogs/print_snippets.py diff --git a/bot/__main__.py b/bot/__main__.py index 3191faf85..3d414c4b8 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -71,7 +71,7 @@ bot.load_extension("bot.cogs.utils") bot.load_extension("bot.cogs.watchchannels") bot.load_extension("bot.cogs.webhook_remover") bot.load_extension("bot.cogs.wolfram") -bot.load_extension("bot.cogs.print_snippets") +bot.load_extension("bot.cogs.code_snippets") if constants.HelpChannels.enable: bot.load_extension("bot.cogs.help_channels") diff --git a/bot/cogs/code_snippets.py b/bot/cogs/code_snippets.py new file mode 100644 index 000000000..9bd06f6ff --- /dev/null +++ b/bot/cogs/code_snippets.py @@ -0,0 +1,216 @@ +import re +import textwrap +from urllib.parse import quote_plus + +from aiohttp import ClientSession +from discord import Message +from discord.ext.commands import Cog + +from bot.bot import Bot +from bot.utils.messages import wait_for_deletion + + +async def fetch_http(session: ClientSession, url: str, response_format: str, **kwargs) -> str: + """Uses aiohttp to make http GET requests.""" + async with session.get(url, **kwargs) as response: + if response_format == 'text': + return await response.text() + elif response_format == 'json': + return await response.json() + + +async def fetch_github_snippet(session: ClientSession, repo: str, + path: str, start_line: str, end_line: str) -> str: + """Fetches a snippet from a GitHub repo.""" + headers = {'Accept': 'application/vnd.github.v3.raw'} + + # Search the GitHub API for the specified branch + refs = (await fetch_http(session, f'https://api.github.com/repos/{repo}/branches', 'json', headers=headers) + + await fetch_http(session, f'https://api.github.com/repos/{repo}/tags', 'json', headers=headers)) + + ref = path.split('/')[0] + file_path = '/'.join(path.split('/')[1:]) + for possible_ref in refs: + if path.startswith(possible_ref['name'] + '/'): + ref = possible_ref['name'] + file_path = path[len(ref) + 1:] + break + + file_contents = await fetch_http( + session, + f'https://api.github.com/repos/{repo}/contents/{file_path}?ref={ref}', + 'text', + headers=headers, + ) + + return await snippet_to_md(file_contents, file_path, start_line, end_line) + + +async def fetch_github_gist_snippet(session: ClientSession, gist_id: str, revision: str, + file_path: str, start_line: str, end_line: str) -> str: + """Fetches a snippet from a GitHub gist.""" + headers = {'Accept': 'application/vnd.github.v3.raw'} + + gist_json = await fetch_http( + session, + f'https://api.github.com/gists/{gist_id}{f"/{revision}" if len(revision) > 0 else ""}', + 'json', + headers=headers, + ) + + # Check each file in the gist for the specified file + for gist_file in gist_json['files']: + if file_path == gist_file.lower().replace('.', '-'): + file_contents = await fetch_http( + session, + gist_json['files'][gist_file]['raw_url'], + 'text', + ) + + return await snippet_to_md(file_contents, gist_file, start_line, end_line) + + return '' + + +async def fetch_gitlab_snippet(session: ClientSession, repo: str, + path: str, start_line: str, end_line: str) -> str: + """Fetches a snippet from a GitLab repo.""" + enc_repo = quote_plus(repo) + + # Searches the GitLab API for the specified branch + refs = (await fetch_http(session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/branches', 'json') + + await fetch_http(session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/tags', 'json')) + + ref = path.split('/')[0] + file_path = '/'.join(path.split('/')[1:]) + for possible_ref in refs: + if path.startswith(possible_ref['name'] + '/'): + ref = possible_ref['name'] + file_path = path[len(ref) + 1:] + break + + enc_ref = quote_plus(ref) + enc_file_path = quote_plus(file_path) + + file_contents = await fetch_http( + session, + f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/files/{enc_file_path}/raw?ref={enc_ref}', + 'text', + ) + + return await snippet_to_md(file_contents, file_path, start_line, end_line) + + +async def fetch_bitbucket_snippet(session: ClientSession, repo: str, ref: str, + file_path: str, start_line: int, end_line: int) -> str: + """Fetches a snippet from a BitBucket repo.""" + file_contents = await fetch_http( + session, + f'https://bitbucket.org/{quote_plus(repo)}/raw/{quote_plus(ref)}/{quote_plus(file_path)}', + 'text', + ) + + return await snippet_to_md(file_contents, file_path, start_line, end_line) + + +async def snippet_to_md(file_contents: str, file_path: str, start_line: str, end_line: str) -> str: + """Given file contents, file path, start line and end line creates a code block.""" + # Parse start_line and end_line into integers + if end_line is None: + start_line = end_line = int(start_line) + else: + start_line = int(start_line) + end_line = int(end_line) + + split_file_contents = file_contents.splitlines() + + # Make sure that the specified lines are in range + if start_line > end_line: + start_line, end_line = end_line, start_line + if start_line > len(split_file_contents) or end_line < 1: + return '' + start_line = max(1, start_line) + end_line = min(len(split_file_contents), end_line) + + # Gets the code lines, dedents them, and inserts zero-width spaces to prevent Markdown injection + required = '\n'.join(split_file_contents[start_line - 1:end_line]) + required = textwrap.dedent(required).rstrip().replace('`', '`\u200b') + + # Extracts the code language and checks whether it's a "valid" language + language = file_path.split('/')[-1].split('.')[-1] + if not language.replace('-', '').replace('+', '').replace('_', '').isalnum(): + language = '' + + if len(required) != 0: + return f'```{language}\n{required}```\n' + return '' + + +GITHUB_RE = re.compile( + r'https://github\.com/(?P.+?)/blob/(?P.+/.+)' + r'#L(?P\d+)([-~]L(?P\d+))?\b' +) + +GITHUB_GIST_RE = re.compile( + r'https://gist\.github\.com/([^/]+)/(?P[^\W_]+)/*' + r'(?P[^\W_]*)/*#file-(?P.+?)' + r'-L(?P\d+)([-~]L(?P\d+))?\b' +) + +GITLAB_RE = re.compile( + r'https://gitlab\.com/(?P.+?)/\-/blob/(?P.+/.+)' + r'#L(?P\d+)([-](?P\d+))?\b' +) + +BITBUCKET_RE = re.compile( + r'https://bitbucket\.org/(?P.+?)/src/(?P.+?)/' + r'(?P.+?)#lines-(?P\d+)(:(?P\d+))?\b' +) + + +class CodeSnippets(Cog): + """ + Cog that prints out snippets to Discord. + + Matches each message against a regex and prints the contents of all matched snippets. + """ + + def __init__(self, bot: Bot): + """Initializes the cog's bot.""" + self.bot = bot + + @Cog.listener() + async def on_message(self, message: Message) -> None: + """Checks if the message has a snippet link, removes the embed, then sends the snippet contents.""" + gh_match = GITHUB_RE.search(message.content) + gh_gist_match = GITHUB_GIST_RE.search(message.content) + gl_match = GITLAB_RE.search(message.content) + bb_match = BITBUCKET_RE.search(message.content) + + if (gh_match or gh_gist_match or gl_match or bb_match) and not message.author.bot: + message_to_send = '' + + for gh in GITHUB_RE.finditer(message.content): + message_to_send += await fetch_github_snippet(self.bot.http_session, **gh.groupdict()) + + for gh_gist in GITHUB_GIST_RE.finditer(message.content): + message_to_send += await fetch_github_gist_snippet(self.bot.http_session, **gh_gist.groupdict()) + + for gl in GITLAB_RE.finditer(message.content): + message_to_send += await fetch_gitlab_snippet(self.bot.http_session, **gl.groupdict()) + + for bb in BITBUCKET_RE.finditer(message.content): + message_to_send += await fetch_bitbucket_snippet(self.bot.http_session, **bb.groupdict()) + + if 0 < len(message_to_send) <= 2000 and message_to_send.count('\n') <= 15: + await message.edit(suppress=True) + await wait_for_deletion( + await message.channel.send(message_to_send), + (message.author.id,), + client=self.bot + ) + + +def setup(bot: Bot) -> None: + """Load the CodeSnippets cog.""" + bot.add_cog(CodeSnippets(bot)) diff --git a/bot/cogs/print_snippets.py b/bot/cogs/print_snippets.py deleted file mode 100644 index 3f784d2c6..000000000 --- a/bot/cogs/print_snippets.py +++ /dev/null @@ -1,190 +0,0 @@ -import asyncio -import os -import re -import textwrap - -import aiohttp -from discord import Message, Reaction, User -from discord.ext.commands import Cog - -from bot.bot import Bot - - -async def fetch_http(session: aiohttp.ClientSession, url: str, response_format: str, **kwargs) -> str: - """Uses aiohttp to make http GET requests.""" - async with session.get(url, **kwargs) as response: - if response_format == 'text': - return await response.text() - elif response_format == 'json': - return await response.json() - - -async def revert_to_orig(d: dict) -> dict: - """Replace URL Encoded values back to their original.""" - for obj in d: - if d[obj] is not None: - d[obj] = d[obj].replace('%2F', '/').replace('%2E', '.') - - -async def orig_to_encode(d: dict) -> dict: - """Encode URL Parameters.""" - for obj in d: - if d[obj] is not None: - d[obj] = d[obj].replace('/', '%2F').replace('.', '%2E') - - -async def snippet_to_embed(d: dict, file_contents: str) -> str: - """Given a regex groupdict and file contents, creates a code block.""" - if d['end_line']: - start_line = int(d['start_line']) - end_line = int(d['end_line']) - else: - start_line = end_line = int(d['start_line']) - - split_file_contents = file_contents.split('\n') - - if start_line > end_line: - start_line, end_line = end_line, start_line - if start_line > len(split_file_contents) or end_line < 1: - return '' - start_line = max(1, start_line) - end_line = min(len(split_file_contents), end_line) - - required = '\n'.join(split_file_contents[start_line - 1:end_line]) - required = textwrap.dedent(required).rstrip().replace('`', '`\u200b') - - language = d['file_path'].split('/')[-1].split('.')[-1] - if not language.replace('-', '').replace('+', '').replace('_', '').isalnum(): - language = '' - - if len(required) != 0: - return f'```{language}\n{required}```\n' - return '``` ```\n' - - -GITHUB_RE = re.compile( - r'https://github\.com/(?P.+?)/blob/(?P.+?)/' - + r'(?P.+?)#L(?P\d+)([-~]L(?P\d+))?\b' -) - -GITHUB_GIST_RE = re.compile( - r'https://gist\.github\.com/([^/]*)/(?P[0-9a-zA-Z]+)/*' - + r'(?P[0-9a-zA-Z]*)/*#file-(?P.+?)' - + r'-L(?P\d+)([-~]L(?P\d+))?\b' -) - -GITLAB_RE = re.compile( - r'https://gitlab\.com/(?P.+?)/\-/blob/(?P.+?)/' - + r'(?P.+?)#L(?P\d+)([-~](?P\d+))?\b' -) - -BITBUCKET_RE = re.compile( - r'https://bitbucket\.org/(?P.+?)/src/(?P.+?)/' - + r'(?P.+?)#lines-(?P\d+)(:(?P\d+))?\b' -) - - -class PrintSnippets(Cog): - """ - Cog that prints out snippets to Discord. - - Matches each message against a regex and prints the contents of all matched snippets. - """ - - def __init__(self, bot: Bot): - """Initializes the cog's bot.""" - self.bot = bot - self.session = aiohttp.ClientSession() - - @Cog.listener() - async def on_message(self, message: Message) -> None: - """Checks if the message has a snippet link, removes the embed, then sends the snippet contents.""" - gh_match = GITHUB_RE.search(message.content) - gh_gist_match = GITHUB_GIST_RE.search(message.content) - gl_match = GITLAB_RE.search(message.content) - bb_match = BITBUCKET_RE.search(message.content) - - if (gh_match or gh_gist_match or gl_match or bb_match) and not message.author.bot: - message_to_send = '' - - for gh in GITHUB_RE.finditer(message.content): - d = gh.groupdict() - headers = {'Accept': 'application/vnd.github.v3.raw'} - if 'GITHUB_TOKEN' in os.environ: - headers['Authorization'] = f'token {os.environ["GITHUB_TOKEN"]}' - file_contents = await fetch_http( - self.session, - f'https://api.github.com/repos/{d["repo"]}' - + f'/contents/{d["file_path"]}?ref={d["branch"]}', - 'text', - headers=headers, - ) - message_to_send += await snippet_to_embed(d, file_contents) - - for gh_gist in GITHUB_GIST_RE.finditer(message.content): - d = gh_gist.groupdict() - gist_json = await fetch_http( - self.session, - f'https://api.github.com/gists/{d["gist_id"]}' - + f'{"/" + d["revision"] if len(d["revision"]) > 0 else ""}', - 'json', - ) - for f in gist_json['files']: - if d['file_path'] == f.lower().replace('.', '-'): - d['file_path'] = f - file_contents = await fetch_http( - self.session, - gist_json['files'][f]['raw_url'], - 'text', - ) - message_to_send += await snippet_to_embed(d, file_contents) - break - - for gl in GITLAB_RE.finditer(message.content): - d = gl.groupdict() - await orig_to_encode(d) - headers = {} - if 'GITLAB_TOKEN' in os.environ: - headers['PRIVATE-TOKEN'] = os.environ["GITLAB_TOKEN"] - file_contents = await fetch_http( - self.session, - f'https://gitlab.com/api/v4/projects/{d["repo"]}/' - + f'repository/files/{d["file_path"]}/raw?ref={d["branch"]}', - 'text', - headers=headers, - ) - await revert_to_orig(d) - message_to_send += await snippet_to_embed(d, file_contents) - - for bb in BITBUCKET_RE.finditer(message.content): - d = bb.groupdict() - await orig_to_encode(d) - file_contents = await fetch_http( - self.session, - f'https://bitbucket.org/{d["repo"]}/raw/{d["branch"]}/{d["file_path"]}', - 'text', - ) - await revert_to_orig(d) - message_to_send += await snippet_to_embed(d, file_contents) - - message_to_send = message_to_send[:-1] - - if 0 < len(message_to_send) <= 2000 and message_to_send.count('\n') <= 50: - sent_message = await message.channel.send(message_to_send) - await message.edit(suppress=True) - await sent_message.add_reaction('❌') - - def check(reaction: Reaction, user: User) -> bool: - return user == message.author and str(reaction.emoji) == '❌' - - try: - reaction, user = await self.bot.wait_for('reaction_add', timeout=10.0, check=check) - except asyncio.TimeoutError: - await sent_message.remove_reaction('❌', self.bot.user) - else: - await sent_message.delete() - - -def setup(bot: Bot) -> None: - """Load the Utils cog.""" - bot.add_cog(PrintSnippets(bot)) -- cgit v1.2.3 From ddb3c230cc7e1b38dbb57be10b1684c4ecb2ac7b Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 16 Sep 2020 00:14:58 +0200 Subject: Remove old comment --- bot/cogs/doc/cog.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index e52ee95c1..2f4c99252 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -160,7 +160,6 @@ class DocCog(commands.Cog): # If renamed `symbol` already exists, add library name in front to differentiate between them. elif symbol in self.renamed_symbols: - # Split `package_name` because of packages like Pillow that have spaces in them. symbol = f"{api_package_name}.{symbol}" self.renamed_symbols.add(symbol) -- cgit v1.2.3 From cb89cbaa36102c111c0204eb7c8bc27cecc1d4cd Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 16 Sep 2020 00:18:51 +0200 Subject: Don't return fragment in DocItem url The fragment is only needed for the user and required sparingly returning only the url while keeping the fragment behind symbol_id simplifies the uses of the url without it. --- bot/cogs/doc/cog.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 2f4c99252..2e49fcd38 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -55,15 +55,16 @@ NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay class DocItem(NamedTuple): """Holds inventory symbol information.""" - base_url: str - relative_url: str package: str group: str + base_url: str + relative_url_path: str + symbol_id: str @property def url(self) -> str: """Return the absolute url to the symbol.""" - return self.base_url + self.relative_url + return "".join((self.base_url, self.relative_url_path)) class InventoryURL(commands.Converter): @@ -141,21 +142,20 @@ class DocCog(commands.Cog): # to remove unnecessary memory consumption from them being unique objects group_name = sys.intern(group.split(":")[1]) - if symbol in self.doc_symbols: - symbol_base_url = self.doc_symbols[symbol].url.split("/", 3)[2] + if (original_symbol := self.doc_symbols.get(symbol)) is not None: if ( group_name in NO_OVERRIDE_GROUPS - or any(package in symbol_base_url for package in NO_OVERRIDE_PACKAGES) + or any(package == original_symbol.package for package in NO_OVERRIDE_PACKAGES) ): symbol = f"{group_name}.{symbol}" self.renamed_symbols.add(symbol) - elif (overridden_symbol_group := self.doc_symbols[symbol].group) in NO_OVERRIDE_GROUPS: + elif (overridden_symbol_group := original_symbol.group) in NO_OVERRIDE_GROUPS: overridden_symbol = f"{overridden_symbol_group}.{symbol}" if overridden_symbol in self.renamed_symbols: overridden_symbol = f"{api_package_name}.{overridden_symbol}" - self.doc_symbols[overridden_symbol] = self.doc_symbols[symbol] + self.doc_symbols[overridden_symbol] = original_symbol self.renamed_symbols.add(overridden_symbol) # If renamed `symbol` already exists, add library name in front to differentiate between them. @@ -202,7 +202,7 @@ class DocCog(commands.Cog): embed = discord.Embed( title=discord.utils.escape_markdown(symbol), - url=symbol_info.url, + url=f"{symbol_info.url}#{symbol_info.symbol_id}", description=embed_description ) # Show all symbols with the same name that were renamed in the footer. -- cgit v1.2.3 From 75f95a110ce96734cb64f89321f9a6eeb0d79463 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 20 Sep 2020 03:06:59 +0200 Subject: Replace caching of soups with new class. Storing BeautifulSoup objects could lead to memory problems because of their large footprint, the new class replaces the long term storage by parsing all items on the first fetch of the page and only storing their markdown string. --- bot/cogs/doc/cog.py | 122 +++++++++++++++++++++++++++++++++++++++++++++--- bot/cogs/doc/parsing.py | 36 ++------------ 2 files changed, 119 insertions(+), 39 deletions(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 2e49fcd38..d57e76ebd 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -1,14 +1,18 @@ +from __future__ import annotations + import asyncio import functools import logging import re import sys -from collections import OrderedDict +from collections import defaultdict from contextlib import suppress from types import SimpleNamespace -from typing import Dict, NamedTuple, Optional +from typing import Dict, List, NamedTuple, Optional, Union import discord +from aiohttp import ClientSession +from bs4 import BeautifulSoup from discord.ext import commands from requests import ConnectTimeout, ConnectionError, HTTPError from sphinx.ext import intersphinx @@ -20,7 +24,6 @@ from bot.converters import PackageName, ValidURL from bot.decorators import with_role from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion -from .cache import async_cache from .parsing import get_symbol_markdown log = logging.getLogger(__name__) @@ -67,6 +70,108 @@ class DocItem(NamedTuple): return "".join((self.base_url, self.relative_url_path)) +class QueueItem(NamedTuple): + """Contains a symbol and the BeautifulSoup object needed to parse it.""" + + symbol: DocItem + soup: BeautifulSoup + + def __eq__(self, other: Union[QueueItem, DocItem]): + if isinstance(other, DocItem): + return self.symbol == other + return NamedTuple.__eq__(self, other) + + +class CachedParser: + """ + Get symbol markdown from pages with smarter caching. + + DocItems are added through the `add_item` method which adds them to the `_page_symbols` dict. + `get_markdown` is used to fetch the markdown; when this is used for the first time on a page, + all of the symbols are queued to be parsed to avoid multiple web requests to the same page. + """ + + def __init__(self): + self._queue: List[QueueItem] = [] + self._results = {} + self._page_symbols: Dict[str, List[DocItem]] = defaultdict(list) + self._item_events: Dict[DocItem, asyncio.Event] = {} + self._parse_task = None + + async def get_markdown(self, client_session: ClientSession, doc_item: DocItem) -> str: + """ + Get result markdown of `doc_item`. + + If no symbols were fetched from `doc_item`s page before, + the HTML has to be fetched before parsing can be queued. + """ + if (symbol := self._results.get(doc_item)) is not None: + return symbol + + if (symbols_to_queue := self._page_symbols.get(doc_item.url)) is not None: + async with client_session.get(doc_item.url) as response: + soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") + + self._queue.extend(QueueItem(symbol, soup) for symbol in symbols_to_queue) + del self._page_symbols[doc_item.url] + log.debug(f"Added symbols from {doc_item.url} to parse queue.") + + if self._parse_task is None: + self._parse_task = asyncio.create_task(self._parse_queue()) + + self._move_to_front(doc_item) + self._item_events[doc_item] = item_event = asyncio.Event() + await item_event.wait() + return self._results[doc_item] + + async def _parse_queue(self) -> None: + """ + Parse all item from the queue, setting associated events for symbols if present. + + The coroutine will run as long as the queue is not empty, resetting `self._parse_task` to None when finished. + """ + log.trace("Starting queue parsing.") + while self._queue: + item, soup = self._queue.pop() + self._results[item] = get_symbol_markdown(soup, item) + if (event := self._item_events.get(item)) is not None: + event.set() + await asyncio.sleep(0.1) + + self._parse_task = None + log.trace("Finished parsing queue.") + + def _move_to_front(self, item: Union[QueueItem, DocItem]) -> None: + """Move `item` to the front of the parse queue.""" + # The parse queue stores soups along with the doc symbols in QueueItem objects, + # in case we're moving a DocItem we have to get the associated QueueItem first and then move it. + item_index = self._queue.index(item) + queue_item = self._queue[item_index] + + del self._queue[item_index] + self._queue.append(queue_item) + + def add_item(self, doc_item: DocItem) -> None: + """Add a DocItem to `_page_symbols`.""" + self._page_symbols[doc_item.url].append(doc_item) + + async def clear(self) -> None: + """ + Clear all internal symbol data. + + All currently requested items are waited to be parsed before clearing. + """ + for event in self._item_events.values(): + await event.wait() + if self._parse_task is not None: + self._parse_task.cancel() + self._parse_task = None + self._queue.clear() + self._results.clear() + self._page_symbols.clear() + self._item_events.clear() + + class InventoryURL(commands.Converter): """ Represents an Intersphinx inventory URL. @@ -106,6 +211,7 @@ class DocCog(commands.Cog): self.base_urls = {} self.bot = bot self.doc_symbols: Dict[str, DocItem] = {} + self.item_fetcher = CachedParser() self.renamed_symbols = set() self.bot.loop.create_task(self.init_refresh_inventory()) @@ -163,7 +269,10 @@ class DocCog(commands.Cog): symbol = f"{api_package_name}.{symbol}" self.renamed_symbols.add(symbol) - self.doc_symbols[symbol] = DocItem(base_url, relative_doc_url, api_package_name, group_name) + relative_url_path, _, symbol_id = relative_doc_url.partition("#") + symbol_item = DocItem(api_package_name, group_name, base_url, relative_url_path, symbol_id) + self.doc_symbols[symbol] = symbol_item + self.item_fetcher.add_item(symbol_item) log.trace(f"Fetched inventory for {api_package_name}.") @@ -177,7 +286,7 @@ class DocCog(commands.Cog): self.base_urls.clear() self.doc_symbols.clear() self.renamed_symbols.clear() - async_cache.cache = OrderedDict() + await self.item_fetcher.clear() # Run all coroutines concurrently - since each of them performs a HTTP # request, this speeds up fetching the inventory data heavily. @@ -198,12 +307,11 @@ class DocCog(commands.Cog): if symbol_info is None: return None self.bot.stats.incr(f"doc_fetches.{symbol_info.package.lower()}") - embed_description = await get_symbol_markdown(self.bot.http_session, symbol_info) embed = discord.Embed( title=discord.utils.escape_markdown(symbol), url=f"{symbol_info.url}#{symbol_info.symbol_id}", - description=embed_description + description=await self.item_fetcher.get_markdown(self.bot.http_session, symbol_info) ) # Show all symbols with the same name that were renamed in the footer. embed.set_footer( diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 1271953d4..9fbce7bed 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -5,11 +5,9 @@ import textwrap from functools import partial from typing import Callable, List, Optional, TYPE_CHECKING, Tuple, Union -from aiohttp import ClientSession from bs4 import BeautifulSoup from bs4.element import NavigableString, PageElement, Tag -from .cache import async_cache from .html import Strainer from .markdown import markdownify if TYPE_CHECKING: @@ -171,16 +169,6 @@ def _parse_into_markdown(signatures: Optional[List[str]], description: str, url: return formatted_markdown -@async_cache(arg_offset=1) -async def _get_soup_from_url(http_session: ClientSession, url: str) -> BeautifulSoup: - """Create a BeautifulSoup object from the HTML data in `url` with the head tag removed.""" - log.trace(f"Sending a request to {url}.") - async with http_session.get(url) as response: - soup = BeautifulSoup(await response.text(encoding="utf8"), 'lxml') - soup.find("head").decompose() # the head contains no useful data so we can remove it - return soup - - def _match_end_tag(tag: Tag) -> bool: """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" for attr in _SEARCH_END_TAG_ATTRS: @@ -190,44 +178,28 @@ def _match_end_tag(tag: Tag) -> bool: return tag.name == "table" -async def get_symbol_markdown(http_session: ClientSession, symbol_data: "DocItem") -> str: +def get_symbol_markdown(soup: BeautifulSoup, symbol_data: "DocItem") -> str: """ - Return parsed markdown of the passed symbol, truncated to 1000 characters. + Return parsed markdown of the passed symbol using the passed in soup, truncated to 1000 characters. - A request through `http_session` is made to the url associated with `symbol_data` for the html contents; - the contents are then parsed depending on what group the symbol belongs to. + The method of parsing and what information gets included depends on the symbol's group. """ - log.trace(f"Parsing symbol from url {symbol_data.url}.") - if "#" in symbol_data.url: - request_url, symbol_id = symbol_data.url.rsplit('#') - else: - request_url = symbol_data.url - symbol_id = None - - soup = await _get_soup_from_url(http_session, request_url) - symbol_heading = soup.find(id=symbol_id) + symbol_heading = soup.find(id=symbol_data.symbol_id) signature = None # Modules, doc pages and labels don't point to description list tags but to tags like divs, # no special parsing can be done so we only try to include what's under them. if symbol_data.group in {"module", "doc", "label"}: - log.trace("Symbol is a module, doc or a label; using general description parsing.") description = _get_general_description(symbol_heading) elif symbol_heading.name != "dt": # Use the general parsing for symbols that aren't modules, docs or labels and aren't dt tags, # log info the tag can be looked at. - log.info( - f"Symbol heading at url {symbol_data.url} was not a dt tag or from known groups that lack it," - f"handling as general description." - ) description = _get_general_description(symbol_heading) elif symbol_data.group in _NO_SIGNATURE_GROUPS: - log.trace("Symbol's group is in the group signature blacklist, skipping parsing of signature.") description = _get_dd_description(symbol_heading) else: - log.trace("Parsing both signature and description of symbol.") signature = _get_signatures(symbol_heading) description = _get_dd_description(symbol_heading) -- cgit v1.2.3 From 38753114c0d056ba330296c9fea7a8f2312459f9 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 20 Sep 2020 03:08:36 +0200 Subject: Replace forward ref with future annotations import --- bot/cogs/doc/parsing.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 9fbce7bed..21a3065f4 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging import re import string @@ -178,7 +180,7 @@ def _match_end_tag(tag: Tag) -> bool: return tag.name == "table" -def get_symbol_markdown(soup: BeautifulSoup, symbol_data: "DocItem") -> str: +def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> str: """ Return parsed markdown of the passed symbol using the passed in soup, truncated to 1000 characters. -- cgit v1.2.3 From de440ce8c4539972ea0f0538042e6cb41a4395dc Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 20 Sep 2020 03:09:24 +0200 Subject: Remove unused cache --- bot/cogs/doc/cache.py | 32 -------------------------------- 1 file changed, 32 deletions(-) delete mode 100644 bot/cogs/doc/cache.py diff --git a/bot/cogs/doc/cache.py b/bot/cogs/doc/cache.py deleted file mode 100644 index 9da2a1dab..000000000 --- a/bot/cogs/doc/cache.py +++ /dev/null @@ -1,32 +0,0 @@ -import functools -from collections import OrderedDict -from typing import Any, Callable - - -def async_cache(max_size: int = 128, arg_offset: int = 0) -> Callable: - """ - LRU cache implementation for coroutines. - - Once the cache exceeds the maximum size, keys are deleted in FIFO order. - - An offset may be optionally provided to be applied to the coroutine's arguments when creating the cache key. - """ - # Assign the cache to the function itself so we can clear it from outside. - async_cache.cache = OrderedDict() - - def decorator(function: Callable) -> Callable: - """Define the async_cache decorator.""" - @functools.wraps(function) - async def wrapper(*args) -> Any: - """Decorator wrapper for the caching logic.""" - key = ':'.join(args[arg_offset:]) - - value = async_cache.cache.get(key) - if value is None: - if len(async_cache.cache) > max_size: - async_cache.cache.popitem(last=False) - - async_cache.cache[key] = await function(*args) - return async_cache.cache[key] - return wrapper - return decorator -- cgit v1.2.3 From 758dd3ef6ca5c1cd7615f0eb6688d7d2f19578ea Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 20 Sep 2020 23:46:54 +0200 Subject: Log exceptions from parsing task --- bot/cogs/doc/cog.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index fc01dfb20..7c1bf2a5f 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -133,9 +133,13 @@ class CachedParser: log.trace("Starting queue parsing.") while self._queue: item, soup = self._queue.pop() - self._results[item] = get_symbol_markdown(soup, item) - if (event := self._item_events.get(item)) is not None: - event.set() + try: + self._results[item] = get_symbol_markdown(soup, item) + except Exception: + log.exception(f"Unexpected error when handling {item}") + else: + if (event := self._item_events.get(item)) is not None: + event.set() await asyncio.sleep(0.1) self._parse_task = None -- cgit v1.2.3 From 7ab949e09a22d7547f74caa447d81299f7b52e47 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 21 Sep 2020 00:30:08 +0200 Subject: Properly truncate description markdown The previous truncating implementation used a naive method that disregarded the actual markdown formatting, possibly resulting in it getting cut out. With the introduction of proper href tags this became impossible to manage without writing an actual parser; so the process was moved to happen when the gathered bs4 elements are being converted into markdown --- bot/cogs/doc/markdown.py | 7 +--- bot/cogs/doc/parsing.py | 86 +++++++++++++++++++++++++++--------------------- 2 files changed, 49 insertions(+), 44 deletions(-) diff --git a/bot/cogs/doc/markdown.py b/bot/cogs/doc/markdown.py index dca477d35..a95e94991 100644 --- a/bot/cogs/doc/markdown.py +++ b/bot/cogs/doc/markdown.py @@ -4,7 +4,7 @@ from bs4.element import PageElement from markdownify import MarkdownConverter -class _DocMarkdownConverter(MarkdownConverter): +class DocMarkdownConverter(MarkdownConverter): """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" def __init__(self, *, page_url: str, **options): @@ -51,8 +51,3 @@ class _DocMarkdownConverter(MarkdownConverter): if parent is not None and parent.name == "li": return f"{text}\n" return super().convert_p(el, text) - - -def markdownify(html: str, *, url: str = "") -> str: - """Create a DocMarkdownConverter object from the input html.""" - return _DocMarkdownConverter(bullets='•', page_url=url).convert(html) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 21a3065f4..ed6343cd8 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -5,13 +5,13 @@ import re import string import textwrap from functools import partial -from typing import Callable, List, Optional, TYPE_CHECKING, Tuple, Union +from typing import Callable, Iterable, List, Optional, TYPE_CHECKING, Tuple, Union from bs4 import BeautifulSoup from bs4.element import NavigableString, PageElement, Tag from .html import Strainer -from .markdown import markdownify +from .markdown import DocMarkdownConverter if TYPE_CHECKING: from .cog import DocItem @@ -39,6 +39,8 @@ _NO_SIGNATURE_GROUPS = { "templatetag", "term", } +_MAX_DESCRIPTION_LENGTH = 1800 +_TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace def _find_elements_until_tag( @@ -80,7 +82,7 @@ _find_next_siblings_until_tag = partial(_find_elements_until_tag, func=Beautiful _find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) -def _get_general_description(start_element: PageElement) -> Optional[str]: +def _get_general_description(start_element: PageElement) -> Iterable[Union[Tag, NavigableString]]: """ Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. @@ -89,18 +91,13 @@ def _get_general_description(start_element: PageElement) -> Optional[str]: """ header = start_element.find_next("a", attrs={"class": "headerlink"}) start_tag = header.parent if header is not None else start_element - description = "".join( - str(tag) for tag in _find_next_siblings_until_tag(start_tag, _match_end_tag, include_strings=True) - ) + return _find_next_siblings_until_tag(start_tag, _match_end_tag, include_strings=True) - return description - -def _get_dd_description(symbol: PageElement) -> str: - """Get the string contents of the next dd tag, up to a dt or a dl tag.""" +def _get_dd_description(symbol: PageElement) -> List[Union[Tag, NavigableString]]: + """Get the contents of the next dd tag, up to a dt or a dl tag.""" description_tag = symbol.find_next("dd") - description_contents = _find_next_children_until_tag(description_tag, ("dt", "dl"), include_strings=True) - return "".join(str(tag) for tag in description_contents) + return _find_next_children_until_tag(description_tag, ("dt", "dl"), include_strings=True) def _get_signatures(start_signature: PageElement) -> List[str]: @@ -124,43 +121,57 @@ def _get_signatures(start_signature: PageElement) -> List[str]: return signatures -def _truncate_markdown(markdown: str, max_length: int) -> str: +def _get_truncated_description( + elements: Iterable[Union[Tag, NavigableString]], + markdown_converter: DocMarkdownConverter, + max_length: int, +) -> str: """ - Truncate `markdown` to be at most `max_length` characters. + Truncate markdown from `elements` to be at most `max_length` characters visually. - The markdown string is searched for substrings to cut at, to keep its structure, - but if none are found the string is simply sliced. + `max_length` limits the length of the rendered characters in the string, + with the real string length limited to `_MAX_DESCRIPTION_LENGTH` to accommodate discord length limits """ - if len(markdown) > max_length: - shortened = markdown[:max_length] - description_cutoff = shortened.rfind('\n\n', 100) - if description_cutoff == -1: - # Search the shortened version for cutoff points in decreasing desirability, - # cutoff at 1000 if none are found. - for cutoff_string in (". ", ", ", ",", " "): - description_cutoff = shortened.rfind(cutoff_string) - if description_cutoff != -1: - break + visual_length = 0 + real_length = 0 + result = [] + shortened = False + + for element in elements: + is_tag = isinstance(element, Tag) + element_length = len(element.text) if is_tag else len(element) + if visual_length + element_length < max_length: + if is_tag: + element_markdown = markdown_converter.process_tag(element) + else: + element_markdown = markdown_converter.process_text(element) + + element_markdown_length = len(element_markdown) + if real_length + element_markdown_length < _MAX_DESCRIPTION_LENGTH: + result.append(element_markdown) else: - description_cutoff = max_length - markdown = markdown[:description_cutoff] + shortened = True + break + real_length += element_markdown_length + visual_length += element_length + else: + shortened = True + break - # If there is an incomplete code block, cut it out - if markdown.count("```") % 2: - codeblock_start = markdown.rfind('```py') - markdown = markdown[:codeblock_start].rstrip() - markdown = markdown.rstrip(string.punctuation) + "..." - return markdown + markdown_string = "".join(result) + if shortened: + markdown_string = markdown_string.rstrip(_TRUNCATE_STRIP_CHARACTERS) + "..." + return markdown_string -def _parse_into_markdown(signatures: Optional[List[str]], description: str, url: str) -> str: +def _parse_into_markdown(signatures: Optional[List[str]], description: Iterable[Tag], url: str) -> str: """ Create a markdown string with the signatures at the top, and the converted html description below them. The signatures are wrapped in python codeblocks, separated from the description by a newline. The result string is truncated to be max 1000 symbols long. """ - description = _truncate_markdown(markdownify(description, url=url), 1000) + description = _get_truncated_description(description, DocMarkdownConverter(bullets="•", page_url=url), 750) description = _WHITESPACE_AFTER_NEWLINES_RE.sub('', description) if signatures is not None: formatted_markdown = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures) @@ -204,5 +215,4 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> str: else: signature = _get_signatures(symbol_heading) description = _get_dd_description(symbol_heading) - - return _parse_into_markdown(signature, description.replace('¶', ''), symbol_data.url) + return _parse_into_markdown(signature, description, symbol_data.url).replace('¶', '') -- cgit v1.2.3 From 3eed4af70fa24e5daef6c5e6d2d145094b9e672f Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 21 Sep 2020 00:39:15 +0200 Subject: Use f strings instead of c style on copied code The code copied over from MarkdownConverter's implementation used c style string formatting, there is no reason to keep the style of strings in our code --- bot/cogs/doc/markdown.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/bot/cogs/doc/markdown.py b/bot/cogs/doc/markdown.py index a95e94991..ba35a84c4 100644 --- a/bot/cogs/doc/markdown.py +++ b/bot/cogs/doc/markdown.py @@ -14,18 +14,18 @@ class DocMarkdownConverter(MarkdownConverter): def convert_li(self, el: PageElement, text: str) -> str: """Fix markdownify's erroneous indexing in ol tags.""" parent = el.parent - if parent is not None and parent.name == 'ol': + if parent is not None and parent.name == "ol": li_tags = parent.find_all("li") - bullet = '%s.' % (li_tags.index(el)+1) + bullet = f"{li_tags.index(el)+1}." else: depth = -1 while el: - if el.name == 'ul': + if el.name == "ul": depth += 1 el = el.parent - bullets = self.options['bullets'] + bullets = self.options["bullets"] bullet = bullets[depth % len(bullets)] - return '%s %s\n' % (bullet, text or '') + return f"{bullet} {text}\n" def convert_hn(self, _n: int, el: PageElement, text: str) -> str: """Convert h tags to bold text with ** instead of adding #.""" @@ -33,11 +33,11 @@ class DocMarkdownConverter(MarkdownConverter): def convert_code(self, el: PageElement, text: str) -> str: """Undo `markdownify`s underscore escaping.""" - return f"`{text}`".replace('\\', '') + return f"`{text}`".replace("\\", "") def convert_pre(self, el: PageElement, text: str) -> str: """Wrap any codeblocks in `py` for syntax highlighting.""" - code = ''.join(el.strings) + code = "".join(el.strings) return f"```py\n{code}```" def convert_a(self, el: PageElement, text: str) -> str: -- cgit v1.2.3 From b6ef6b6bc30b02e0a6797dd9feae167da2cb6e5b Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 21 Sep 2020 00:52:40 +0200 Subject: Handle cases with outdated bot inventories. --- bot/cogs/doc/parsing.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index ed6343cd8..939f963f1 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -198,6 +198,9 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> str: The method of parsing and what information gets included depends on the symbol's group. """ symbol_heading = soup.find(id=symbol_data.symbol_id) + if symbol_heading is None: + log.warning("Symbol present in loaded inventories not found on site, consider refreshing inventories.") + return "Unable to parse the requested symbol." signature = None # Modules, doc pages and labels don't point to description list tags but to tags like divs, # no special parsing can be done so we only try to include what's under them. -- cgit v1.2.3 From ba73313adaff363bef9e3a505bf66373ea915997 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 21 Sep 2020 22:36:18 +0200 Subject: Use List typehint that has a narrower scope --- bot/cogs/doc/parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 939f963f1..9c82a1c13 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -82,7 +82,7 @@ _find_next_siblings_until_tag = partial(_find_elements_until_tag, func=Beautiful _find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) -def _get_general_description(start_element: PageElement) -> Iterable[Union[Tag, NavigableString]]: +def _get_general_description(start_element: PageElement) -> List[Union[Tag, NavigableString]]: """ Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. -- cgit v1.2.3 From 730f30197c43cc170aaecde664712f6f4aaea246 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 26 Sep 2020 17:49:43 +0200 Subject: Collapse signatures between args instead of spaces The signature length needed more logic and shorter limits to ensure messages would fit in a discord message in a nice way. --- bot/cogs/doc/parsing.py | 95 +++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 92 insertions(+), 3 deletions(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 9c82a1c13..7dddadf43 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -5,7 +5,7 @@ import re import string import textwrap from functools import partial -from typing import Callable, Iterable, List, Optional, TYPE_CHECKING, Tuple, Union +from typing import Callable, Collection, Iterable, List, Optional, TYPE_CHECKING, Tuple, Union from bs4 import BeautifulSoup from bs4.element import NavigableString, PageElement, Tag @@ -19,6 +19,7 @@ log = logging.getLogger(__name__) _UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") _WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") +_PARAMETERS_RE = re.compile(r"\((.+)\)") _SEARCH_END_TAG_ATTRS = ( "data", @@ -39,8 +40,59 @@ _NO_SIGNATURE_GROUPS = { "templatetag", "term", } -_MAX_DESCRIPTION_LENGTH = 1800 +_EMBED_CODE_BLOCK_LENGTH = 61 +# Three code block wrapped lines with py syntax highlight +_MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LENGTH + 8) * 3 +# Maximum discord message length - signatures on top +_MAX_DESCRIPTION_LENGTH = 2000 - _MAX_SIGNATURES_LENGTH _TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace +_BRACKET_PAIRS = { + "{": "}", + "(": ")", + "[": "]", +} + + +def _split_parameters(parameters_string: str) -> List[str]: + """ + Split parameters of a signature into individual parameter strings on commas. + + Long string literals are not accounted for. + """ + parameters_list = [] + last_split = 0 + depth = 0 + expected_end = None + current_search = None + previous_character = "" + + for index, character in enumerate(parameters_string): + if character in _BRACKET_PAIRS: + if current_search is None: + current_search = character + expected_end = _BRACKET_PAIRS[character] + if character == current_search: + depth += 1 + + elif character in {"'", '"'}: + if depth == 0: + depth += 1 + elif not previous_character == "\\": + depth -= 1 + + elif character == expected_end: + depth -= 1 + if depth == 0: + current_search = None + expected_end = None + + elif depth == 0 and character == ",": + parameters_list.append(parameters_string[last_split:index]) + last_split = index + 1 + previous_character = character + + parameters_list.append(parameters_string[last_split:]) + return parameters_list def _find_elements_until_tag( @@ -121,6 +173,43 @@ def _get_signatures(start_signature: PageElement) -> List[str]: return signatures +def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collection[str]]: + """ + Truncate passed signatures to not exceed `_MAX_SIGNAUTRES_LENGTH`. + + If the signatures need to be truncated, parameters are collapsed until they fit withing the limit. + Individual signatures can consist of max 1, 2 or 3 lines of text, inversely proportional to the amount of them. + A maximum of 3 signatures is assumed to be passed. + """ + if not sum(len(signature) for signature in signatures) > _MAX_SIGNATURES_LENGTH: + return signatures + + max_signature_length = _EMBED_CODE_BLOCK_LENGTH * (4 - len(signatures)) + formatted_signatures = [] + for signature in signatures: + signature = signature.strip() + if len(signature) > max_signature_length: + if (parameters_match := _PARAMETERS_RE.search(signature)) is None: + formatted_signatures.append(textwrap.shorten(signature, max_signature_length)) + continue + + truncated_signature = [] + parameters_string = parameters_match[1] + running_length = len(signature) - len(parameters_string) + for parameter in _split_parameters(parameters_string): + if (len(parameter) + running_length) <= max_signature_length - 4: # account for comma and placeholder + truncated_signature.append(parameter) + running_length += len(parameter) + 1 + else: + truncated_signature.append(" ...") + formatted_signatures.append(signature.replace(parameters_string, ",".join(truncated_signature))) + break + else: + formatted_signatures.append(signature) + + return formatted_signatures + + def _get_truncated_description( elements: Iterable[Union[Tag, NavigableString]], markdown_converter: DocMarkdownConverter, @@ -174,7 +263,7 @@ def _parse_into_markdown(signatures: Optional[List[str]], description: Iterable[ description = _get_truncated_description(description, DocMarkdownConverter(bullets="•", page_url=url), 750) description = _WHITESPACE_AFTER_NEWLINES_RE.sub('', description) if signatures is not None: - formatted_markdown = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures) + formatted_markdown = "".join(f"```py\n{signature}```" for signature in _truncate_signatures(signatures)) else: formatted_markdown = "" formatted_markdown += f"\n{description}" -- cgit v1.2.3 From e10f91fce08f26f92776c3641ddd26f961a0c8b8 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 26 Sep 2020 17:51:52 +0200 Subject: Make amount of included signatures configurable --- bot/cogs/doc/parsing.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index 7dddadf43..cf1124936 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -17,6 +17,8 @@ if TYPE_CHECKING: log = logging.getLogger(__name__) +_MAX_SIGNATURE_AMOUNT = 3 + _UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") _WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") _PARAMETERS_RE = re.compile(r"\((.+)\)") @@ -41,8 +43,8 @@ _NO_SIGNATURE_GROUPS = { "term", } _EMBED_CODE_BLOCK_LENGTH = 61 -# Three code block wrapped lines with py syntax highlight -_MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LENGTH + 8) * 3 +# _MAX_SIGNATURE_AMOUNT code block wrapped lines with py syntax highlight +_MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LENGTH + 8) * _MAX_SIGNATURE_AMOUNT # Maximum discord message length - signatures on top _MAX_DESCRIPTION_LENGTH = 2000 - _MAX_SIGNATURES_LENGTH _TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace @@ -154,7 +156,7 @@ def _get_dd_description(symbol: PageElement) -> List[Union[Tag, NavigableString] def _get_signatures(start_signature: PageElement) -> List[str]: """ - Collect up to 3 signatures from dt tags around the `start_signature` dt tag. + Collect up to `_MAX_SIGNATURE_AMOUNT` signatures from dt tags around the `start_signature` dt tag. First the signatures under the `start_signature` are included; if less than 2 are found, tags above the start signature are added to the result if any are present. @@ -164,7 +166,7 @@ def _get_signatures(start_signature: PageElement) -> List[str]: *reversed(_find_previous_siblings_until_tag(start_signature, ("dd",), limit=2)), start_signature, *_find_next_siblings_until_tag(start_signature, ("dd",), limit=2), - )[-3:]: + )[-_MAX_SIGNATURE_AMOUNT:]: signature = _UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) if signature: @@ -178,13 +180,14 @@ def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collec Truncate passed signatures to not exceed `_MAX_SIGNAUTRES_LENGTH`. If the signatures need to be truncated, parameters are collapsed until they fit withing the limit. - Individual signatures can consist of max 1, 2 or 3 lines of text, inversely proportional to the amount of them. - A maximum of 3 signatures is assumed to be passed. + Individual signatures can consist of max 1, 2, ..., `_MAX_SIGNATURE_AMOUNT` lines of text, + inversely proportional to the amount of signatures. + A maximum of `_MAX_SIGNATURE_AMOUNT` signatures is assumed to be passed. """ if not sum(len(signature) for signature in signatures) > _MAX_SIGNATURES_LENGTH: return signatures - max_signature_length = _EMBED_CODE_BLOCK_LENGTH * (4 - len(signatures)) + max_signature_length = _EMBED_CODE_BLOCK_LENGTH * (_MAX_SIGNATURE_AMOUNT + 1 - len(signatures)) formatted_signatures = [] for signature in signatures: signature = signature.strip() -- cgit v1.2.3 From a2e7db718fbeb6fabb5e261ef4414038477abfb2 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 28 Sep 2020 23:43:58 +0200 Subject: Add parentheses for clarity --- bot/cogs/doc/parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/doc/parsing.py b/bot/cogs/doc/parsing.py index cf1124936..7cf4ec7ba 100644 --- a/bot/cogs/doc/parsing.py +++ b/bot/cogs/doc/parsing.py @@ -166,7 +166,7 @@ def _get_signatures(start_signature: PageElement) -> List[str]: *reversed(_find_previous_siblings_until_tag(start_signature, ("dd",), limit=2)), start_signature, *_find_next_siblings_until_tag(start_signature, ("dd",), limit=2), - )[-_MAX_SIGNATURE_AMOUNT:]: + )[-(_MAX_SIGNATURE_AMOUNT):]: signature = _UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) if signature: -- cgit v1.2.3 From 2b97cfad08f7dac0ea1ce6119bab004b4c2452e7 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 29 Sep 2020 23:03:36 +0200 Subject: Add async implementation of sphinx fetch_inventory The sphinx version of the function does a lot of checks that are unnecessary for the bot because it's not working with anything else related to docs. The custom implementation means we can throw some of the code out and get rid of sphinx as a dependency. --- LICENSE-THIRD-PARTY | 30 ++++++++++++++ bot/cogs/doc/inventory_parser.py | 87 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 117 insertions(+) create mode 100644 LICENSE-THIRD-PARTY create mode 100644 bot/cogs/doc/inventory_parser.py diff --git a/LICENSE-THIRD-PARTY b/LICENSE-THIRD-PARTY new file mode 100644 index 000000000..f78491fc1 --- /dev/null +++ b/LICENSE-THIRD-PARTY @@ -0,0 +1,30 @@ +License for Sphinx +Applies to: + - bot/cogs/doc/inventory_parser.py: _load_v1, _load_v2 and ZlibStreamReader.__aiter__. +================== + +Copyright (c) 2007-2020 by the Sphinx team (see AUTHORS file). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/bot/cogs/doc/inventory_parser.py b/bot/cogs/doc/inventory_parser.py new file mode 100644 index 000000000..6c2b63d5e --- /dev/null +++ b/bot/cogs/doc/inventory_parser.py @@ -0,0 +1,87 @@ +import re +import zlib +from collections import defaultdict +from typing import AsyncIterator, DefaultDict, List, Tuple + +import aiohttp + +_V2_LINE_RE = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+?(\S*)\s+(.*)') + + +class ZlibStreamReader: + """Class used for decoding zlib data of a stream line by line.""" + + READ_CHUNK_SIZE = 16 * 1024 + + def __init__(self, stream: aiohttp.StreamReader) -> None: + self.stream = stream + + async def _read_compressed_chunks(self) -> AsyncIterator[bytes]: + """Read zlib data in `READ_CHUNK_SIZE` sized chunks and decompress.""" + decompressor = zlib.decompressobj() + async for chunk in self.stream.iter_chunked(self.READ_CHUNK_SIZE): + yield decompressor.decompress(chunk) + + yield decompressor.flush() + + async def __aiter__(self) -> AsyncIterator[str]: + """Yield lines of decompressed text.""" + buf = b'' + async for chunk in self._read_compressed_chunks(): + buf += chunk + pos = buf.find(b'\n') + while pos != -1: + yield buf[:pos].decode() + buf = buf[pos + 1:] + pos = buf.find(b'\n') + + +async def _load_v1(stream: aiohttp.StreamReader) -> DefaultDict[str, List[Tuple[str, str]]]: + invdata = defaultdict(list) + + async for line in stream: + name, type_, location = line.decode().rstrip().split(maxsplit=2) + # version 1 did not add anchors to the location + if type_ == 'mod': + type_ = 'py:module' + location += '#module-' + name + else: + type_ = 'py:' + type_ + location += '#' + name + invdata[type_].append((name, location)) + return invdata + + +async def _load_v2(stream: aiohttp.StreamReader) -> DefaultDict[str, List[Tuple[str, str]]]: + invdata = defaultdict(list) + + async for line in ZlibStreamReader(stream): + m = _V2_LINE_RE.match(line.rstrip()) + name, type_, _prio, location, _dispname = m.groups() # ignore the parsed items we don't need + if location.endswith('$'): + location = location[:-1] + name + + invdata[type_].append((name, location)) + return invdata + + +async def fetch_inventory(client_session: aiohttp.ClientSession, url: str) -> DefaultDict[str, List[Tuple[str, str]]]: + """Fetch, parse and return an intersphinx inventory file from an url.""" + timeout = aiohttp.ClientTimeout(sock_connect=5, sock_read=5) + async with client_session.get(url, timeout=timeout, raise_for_status=True) as response: + stream = response.content + + inventory_header = (await stream.readline()).decode().rstrip() + inventory_version = int(inventory_header[-1:]) + await stream.readline() # skip project name + await stream.readline() # skip project version + + if inventory_version == 1: + return await _load_v1(stream) + + elif inventory_version == 2: + if b"zlib" not in await stream.readline(): + raise ValueError(f"Invalid inventory file at url {url}.") + return await _load_v2(stream) + + raise ValueError(f"Invalid inventory file at url {url}.") -- cgit v1.2.3 From d8c36ac9f189ba9638ef91df7628f95845161f8e Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 30 Sep 2020 00:19:39 +0200 Subject: Handle errors on inventory fetching --- bot/cogs/doc/inventory_parser.py | 37 +++++++++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/bot/cogs/doc/inventory_parser.py b/bot/cogs/doc/inventory_parser.py index 6c2b63d5e..23931869b 100644 --- a/bot/cogs/doc/inventory_parser.py +++ b/bot/cogs/doc/inventory_parser.py @@ -1,10 +1,14 @@ +import logging import re import zlib from collections import defaultdict -from typing import AsyncIterator, DefaultDict, List, Tuple +from typing import AsyncIterator, DefaultDict, List, Optional, Tuple import aiohttp +log = logging.getLogger(__name__) + +FAILED_REQUEST_ATTEMPTS = 3 _V2_LINE_RE = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+?(\S*)\s+(.*)') @@ -65,7 +69,7 @@ async def _load_v2(stream: aiohttp.StreamReader) -> DefaultDict[str, List[Tuple[ return invdata -async def fetch_inventory(client_session: aiohttp.ClientSession, url: str) -> DefaultDict[str, List[Tuple[str, str]]]: +async def _fetch_inventory(client_session: aiohttp.ClientSession, url: str) -> DefaultDict[str, List[Tuple[str, str]]]: """Fetch, parse and return an intersphinx inventory file from an url.""" timeout = aiohttp.ClientTimeout(sock_connect=5, sock_read=5) async with client_session.get(url, timeout=timeout, raise_for_status=True) as response: @@ -85,3 +89,32 @@ async def fetch_inventory(client_session: aiohttp.ClientSession, url: str) -> De return await _load_v2(stream) raise ValueError(f"Invalid inventory file at url {url}.") + + +async def fetch_inventory( + client_session: aiohttp.ClientSession, + url: str +) -> Optional[DefaultDict[str, List[Tuple[str, str]]]]: + """Get inventory from `url`, retrying `FAILED_REQUEST_ATTEMPTS` times on errors.""" + for attempt in range(1, FAILED_REQUEST_ATTEMPTS+1): + try: + inventory = await _fetch_inventory(client_session, url) + except aiohttp.ClientConnectorError: + log.warning( + f"Failed to connect to inventory url at {url}, " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + except aiohttp.ClientError: + log.error( + f"Failed to get inventory from {url}, " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + except Exception: + log.exception( + f"An unexpected error has occurred during fetching of {url}, " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + else: + return inventory + + return None -- cgit v1.2.3 From 3bf04d8a353056944ac335b1d387d71464a81aa1 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 30 Sep 2020 00:38:24 +0200 Subject: Use new async inventory fetching --- bot/cogs/doc/cog.py | 71 ++++++----------------------------------------------- 1 file changed, 7 insertions(+), 64 deletions(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 7c1bf2a5f..2cb296d53 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -1,22 +1,17 @@ from __future__ import annotations import asyncio -import functools import logging import re import sys from collections import defaultdict from contextlib import suppress -from types import SimpleNamespace from typing import Dict, List, NamedTuple, Optional, Union import discord from aiohttp import ClientSession from bs4 import BeautifulSoup from discord.ext import commands -from requests import ConnectTimeout, ConnectionError, HTTPError -from sphinx.ext import intersphinx -from urllib3.exceptions import ProtocolError from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput @@ -24,20 +19,10 @@ from bot.converters import PackageName, ValidURL from bot.decorators import with_role from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion +from .inventory_parser import FAILED_REQUEST_ATTEMPTS, fetch_inventory from .parsing import get_symbol_markdown log = logging.getLogger(__name__) -logging.getLogger('urllib3').setLevel(logging.WARNING) - -# Since Intersphinx is intended to be used with Sphinx, -# we need to mock its configuration. -SPHINX_MOCK_APP = SimpleNamespace( - config=SimpleNamespace( - intersphinx_timeout=3, - tls_verify=True, - user_agent="python3:python-discord/bot:1.0.0" - ) -) NO_OVERRIDE_GROUPS = ( "2to3fixer", @@ -51,7 +36,6 @@ NO_OVERRIDE_PACKAGES = ( ) WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") -FAILED_REQUEST_RETRY_AMOUNT = 3 NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay @@ -190,21 +174,8 @@ class InventoryURL(commands.Converter): async def convert(ctx: commands.Context, url: str) -> str: """Convert url to Intersphinx inventory URL.""" await ctx.trigger_typing() - try: - intersphinx.fetch_inventory(SPHINX_MOCK_APP, '', url) - except AttributeError: - raise commands.BadArgument(f"Failed to fetch Intersphinx inventory from URL `{url}`.") - except ConnectionError: - if url.startswith('https'): - raise commands.BadArgument( - f"Cannot establish a connection to `{url}`. Does it support HTTPS?" - ) - raise commands.BadArgument(f"Cannot connect to host with URL `{url}`.") - except ValueError: - raise commands.BadArgument( - f"Failed to read Intersphinx inventory from URL `{url}`. " - "Are you sure that it's a valid inventory file?" - ) + if await fetch_inventory(ctx.bot.http_session, url) is None: + raise commands.BadArgument(f"Failed to fetch inventory file after {FAILED_REQUEST_ATTEMPTS}.") return url @@ -235,17 +206,16 @@ class DocCog(commands.Cog): * `package_name` is the package name to use, appears in the log * `base_url` is the root documentation URL for the specified package, used to build absolute paths that link to specific symbols - * `inventory_url` is the absolute URL to the intersphinx inventory, fetched by running - `intersphinx.fetch_inventory` in an executor on the bot's event loop + * `inventory_url` is the absolute URL to the intersphinx inventory. """ self.base_urls[api_package_name] = base_url - package = await self._fetch_inventory(inventory_url) + package = await fetch_inventory(self.bot.http_session, inventory_url) if not package: return None - for group, value in package.items(): - for symbol, (_package_name, _version, relative_doc_url, _) in value.items(): + for group, items in package.items(): + for symbol, relative_doc_url in items: if "/" in symbol: continue # skip unreachable symbols with slashes # Intern the group names since they're reused in all the DocItems @@ -455,30 +425,3 @@ class DocCog(commands.Cog): description=f"```diff\n{added}\n{removed}```" if added or removed else "" ) await ctx.send(embed=embed) - - async def _fetch_inventory(self, inventory_url: str) -> Optional[dict]: - """Get and return inventory from `inventory_url`. If fetching fails, return None.""" - fetch_func = functools.partial(intersphinx.fetch_inventory, SPHINX_MOCK_APP, '', inventory_url) - for retry in range(1, FAILED_REQUEST_RETRY_AMOUNT+1): - try: - package = await self.bot.loop.run_in_executor(None, fetch_func) - except ConnectTimeout: - log.error( - f"Fetching of inventory {inventory_url} timed out," - f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})" - ) - except ProtocolError: - log.error( - f"Connection lost while fetching inventory {inventory_url}," - f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})" - ) - except HTTPError as e: - log.error(f"Fetching of inventory {inventory_url} failed with status code {e.response.status_code}.") - return None - except ConnectionError: - log.error(f"Couldn't establish connection to inventory {inventory_url}.") - return None - else: - return package - log.error(f"Fetching of inventory {inventory_url} failed.") - return None -- cgit v1.2.3 From 46ee70533328eed3790ebb93d1257b5d4e598802 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 30 Sep 2020 00:42:55 +0200 Subject: Remove sphinx and requests from Pipfile With our own implementation of sphinx's inventory fetching we no longer need the sphinx package, and requests which were used inside of it. --- Pipfile | 2 -- 1 file changed, 2 deletions(-) diff --git a/Pipfile b/Pipfile index 6fff2223e..1e54c9212 100644 --- a/Pipfile +++ b/Pipfile @@ -21,9 +21,7 @@ markdownify = "~=0.4" more_itertools = "~=8.2" python-dateutil = "~=2.8" pyyaml = "~=5.1" -requests = "~=2.22" sentry-sdk = "~=0.14" -sphinx = "~=2.2" statsd = "~=3.3" [dev-packages] -- cgit v1.2.3 From c5aa0c0bd7e8933648fbedc92a7cd1f5ae199772 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 1 Oct 2020 00:04:53 +0200 Subject: Reschedule failed inventory updates --- bot/cogs/doc/cog.py | 39 +++++++++++++++++++++++++++++++++++---- 1 file changed, 35 insertions(+), 4 deletions(-) diff --git a/bot/cogs/doc/cog.py b/bot/cogs/doc/cog.py index 2cb296d53..41fca4584 100644 --- a/bot/cogs/doc/cog.py +++ b/bot/cogs/doc/cog.py @@ -19,6 +19,7 @@ from bot.converters import PackageName, ValidURL from bot.decorators import with_role from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion +from bot.utils.scheduling import Scheduler from .inventory_parser import FAILED_REQUEST_ATTEMPTS, fetch_inventory from .parsing import get_symbol_markdown @@ -189,6 +190,9 @@ class DocCog(commands.Cog): self.item_fetcher = CachedParser() self.renamed_symbols = set() + self.inventory_scheduler = Scheduler(self.__class__.__name__) + self.scheduled_inventories = set() + self.bot.loop.create_task(self.init_refresh_inventory()) async def init_refresh_inventory(self) -> None: @@ -198,7 +202,7 @@ class DocCog(commands.Cog): async def update_single( self, api_package_name: str, base_url: str, inventory_url: str - ) -> None: + ) -> bool: """ Rebuild the inventory for a single package. @@ -207,12 +211,27 @@ class DocCog(commands.Cog): * `base_url` is the root documentation URL for the specified package, used to build absolute paths that link to specific symbols * `inventory_url` is the absolute URL to the intersphinx inventory. + + If the inventory file is currently unreachable, + the update is rescheduled to execute in 2 minutes on the first attempt, and 5 minutes on subsequent attempts. + + Return True on success; False if fetching failed and was rescheduled. """ self.base_urls[api_package_name] = base_url - package = await fetch_inventory(self.bot.http_session, inventory_url) + if not package: - return None + delay = 2*60 if inventory_url not in self.scheduled_inventories else 5*60 + log.info(f"Failed to fetch inventory, attempting again in {delay//60} minutes.") + self.inventory_scheduler.schedule_later( + delay, + api_package_name, + fetch_inventory(self.bot.http_session, inventory_url) + ) + self.scheduled_inventories.add(api_package_name) + return False + with suppress(KeyError): + self.scheduled_inventories.discard(api_package_name) for group, items in package.items(): for symbol, relative_doc_url in items: @@ -249,6 +268,7 @@ class DocCog(commands.Cog): self.item_fetcher.add_item(symbol_item) log.trace(f"Fetched inventory for {api_package_name}.") + return True async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" @@ -260,6 +280,7 @@ class DocCog(commands.Cog): self.base_urls.clear() self.doc_symbols.clear() self.renamed_symbols.clear() + self.scheduled_inventories.clear() await self.item_fetcher.clear() # Run all coroutines concurrently - since each of them performs a HTTP @@ -385,7 +406,11 @@ class DocCog(commands.Cog): f"Inventory URL: {inventory_url}" ) - await self.update_single(package_name, base_url, inventory_url) + if await self.update_single(package_name, base_url, inventory_url) is None: + await ctx.send( + f"Added package `{package_name}` to database but failed to fetch inventory; rescheduled in 2 minutes." + ) + return await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) @@ -399,6 +424,9 @@ class DocCog(commands.Cog): """ await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') + if package_name in self.scheduled_inventories: + self.inventory_scheduler.cancel(package_name) + async with ctx.typing(): # Rebuild the inventory to ensure that everything # that was from this package is properly deleted. @@ -409,6 +437,9 @@ class DocCog(commands.Cog): @with_role(*MODERATION_ROLES) async def refresh_command(self, ctx: commands.Context) -> None: """Refresh inventories and send differences to channel.""" + for inventory in self.scheduled_inventories: + self.inventory_scheduler.cancel(inventory) + old_inventories = set(self.base_urls) with ctx.typing(): await self.refresh_inventory() -- cgit v1.2.3 From f4924f0e8c26e373ddae8cb29f1f3935aaf00f4a Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 10 Oct 2020 21:47:34 +0200 Subject: Handle non dt fallback together with modules --- bot/exts/info/doc/_parsing.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 83e35e2b1..a79332716 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -296,12 +296,7 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> str: signature = None # Modules, doc pages and labels don't point to description list tags but to tags like divs, # no special parsing can be done so we only try to include what's under them. - if symbol_data.group in {"module", "doc", "label"}: - description = _get_general_description(symbol_heading) - - elif symbol_heading.name != "dt": - # Use the general parsing for symbols that aren't modules, docs or labels and aren't dt tags, - # log info the tag can be looked at. + if symbol_data.group in {"module", "doc", "label"} or symbol_heading.name != "dt": description = _get_general_description(symbol_heading) elif symbol_data.group in _NO_SIGNATURE_GROUPS: -- cgit v1.2.3 From 2744b10fae0f3b1d4ac198ba819c024e037e5660 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 10 Oct 2020 21:48:10 +0200 Subject: Use more descriptive name for end_tag_filter --- bot/exts/info/doc/_parsing.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index a79332716..5f6c23c8d 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -99,7 +99,7 @@ def _split_parameters(parameters_string: str) -> List[str]: def _find_elements_until_tag( start_element: PageElement, - tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]], + end_tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]], *, func: Callable, include_strings: bool = False, @@ -108,7 +108,7 @@ def _find_elements_until_tag( """ Get all elements up to `limit` or until a tag matching `tag_filter` is found. - `tag_filter` can be either a tuple of string names to check against, + `end_tag_filter` can be either a tuple of string names to check against, or a filtering callable that's applied to tags. When `include_strings` is True, `NavigableString`s from the document will be included in the result along `Tag`s. @@ -116,15 +116,15 @@ def _find_elements_until_tag( `func` takes in a BeautifulSoup unbound method for finding multiple elements, such as `BeautifulSoup.find_all`. The method is then iterated over and all elements until the matching tag or the limit are added to the return list. """ - use_tuple_filter = isinstance(tag_filter, tuple) + use_tuple_filter = isinstance(end_tag_filter, tuple) elements = [] for element in func(start_element, name=Strainer(include_strings=include_strings), limit=limit): if isinstance(element, Tag): if use_tuple_filter: - if element.name in tag_filter: + if element.name in end_tag_filter: break - elif tag_filter(element): + elif end_tag_filter(element): break elements.append(element) -- cgit v1.2.3 From 9e4832965957eec291a3ccde198252ab28ce13e2 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 10 Oct 2020 21:50:37 +0200 Subject: Exclude headerlinks outside of current section --- bot/exts/info/doc/_parsing.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 5f6c23c8d..d31f26060 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -132,20 +132,22 @@ def _find_elements_until_tag( _find_next_children_until_tag = partial(_find_elements_until_tag, func=partial(BeautifulSoup.find_all, recursive=False)) +_find_recursive_children_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_all) _find_next_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_next_siblings) _find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) -def _get_general_description(start_element: PageElement) -> List[Union[Tag, NavigableString]]: +def _get_general_description(start_element: Tag) -> List[Union[Tag, NavigableString]]: """ Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. A headerlink a tag is attempted to be found to skip repeating the symbol information in the description, if it's found it's used as the tag to start the search from instead of the `start_element`. """ - header = start_element.find_next("a", attrs={"class": "headerlink"}) + child_tags = _find_recursive_children_until_tag(start_element, _class_filter_factory(["section"]), limit=100) + header = next(filter(_class_filter_factory(["headerlink"]), child_tags), None) start_tag = header.parent if header is not None else start_element - return _find_next_siblings_until_tag(start_tag, _match_end_tag, include_strings=True) + return _find_next_siblings_until_tag(start_tag, _class_filter_factory(_SEARCH_END_TAG_ATTRS), include_strings=True) def _get_dd_description(symbol: PageElement) -> List[Union[Tag, NavigableString]]: @@ -274,13 +276,15 @@ def _parse_into_markdown(signatures: Optional[List[str]], description: Iterable[ return formatted_markdown -def _match_end_tag(tag: Tag) -> bool: - """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" - for attr in _SEARCH_END_TAG_ATTRS: - if attr in tag.get("class", ()): - return True +def _class_filter_factory(class_names: Iterable[str]) -> Callable[[Tag], bool]: + """Create callable that returns True when the passed in tag's class is in `class_names` or when it's is a table.""" + def match_tag(tag: Tag) -> bool: + for attr in class_names: + if attr in tag.get("class", ()): + return True + return tag.name == "table" - return tag.name == "table" + return match_tag def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> str: -- cgit v1.2.3 From 59f1fffb656447668f6e5a34fcc52697b152780a Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 18 Oct 2020 03:04:29 +0200 Subject: Handle escaped backslashes in strings --- bot/exts/info/doc/_parsing.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index d31f26060..0883b9f42 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -66,7 +66,6 @@ def _split_parameters(parameters_string: str) -> List[str]: depth = 0 expected_end = None current_search = None - previous_character = "" for index, character in enumerate(parameters_string): if character in _BRACKET_PAIRS: @@ -79,7 +78,9 @@ def _split_parameters(parameters_string: str) -> List[str]: elif character in {"'", '"'}: if depth == 0: depth += 1 - elif not previous_character == "\\": + elif parameters_string[index-1] != "\\": + depth -= 1 + elif parameters_string[index-2] == "\\": depth -= 1 elif character == expected_end: @@ -91,7 +92,6 @@ def _split_parameters(parameters_string: str) -> List[str]: elif depth == 0 and character == ",": parameters_list.append(parameters_string[last_split:index]) last_split = index + 1 - previous_character = character parameters_list.append(parameters_string[last_split:]) return parameters_list -- cgit v1.2.3 From c966853e92b696b9132c6f5316e6920e3cb70733 Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 27 Oct 2020 10:58:49 +0200 Subject: Moved code for finding the right ref to a function --- bot/cogs/code_snippets.py | 34 ++++++++++++++-------------------- 1 file changed, 14 insertions(+), 20 deletions(-) diff --git a/bot/cogs/code_snippets.py b/bot/cogs/code_snippets.py index 9bd06f6ff..b10c68789 100644 --- a/bot/cogs/code_snippets.py +++ b/bot/cogs/code_snippets.py @@ -19,6 +19,18 @@ async def fetch_http(session: ClientSession, url: str, response_format: str, **k return await response.json() +def find_ref(path: str, refs: tuple) -> tuple: + """Loops through all branches and tags to find the required ref.""" + ref = path.split('/')[0] + file_path = '/'.join(path.split('/')[1:]) + for possible_ref in refs: + if path.startswith(possible_ref['name'] + '/'): + ref = possible_ref['name'] + file_path = path[len(ref) + 1:] + break + return (ref, file_path) + + async def fetch_github_snippet(session: ClientSession, repo: str, path: str, start_line: str, end_line: str) -> str: """Fetches a snippet from a GitHub repo.""" @@ -28,13 +40,7 @@ async def fetch_github_snippet(session: ClientSession, repo: str, refs = (await fetch_http(session, f'https://api.github.com/repos/{repo}/branches', 'json', headers=headers) + await fetch_http(session, f'https://api.github.com/repos/{repo}/tags', 'json', headers=headers)) - ref = path.split('/')[0] - file_path = '/'.join(path.split('/')[1:]) - for possible_ref in refs: - if path.startswith(possible_ref['name'] + '/'): - ref = possible_ref['name'] - file_path = path[len(ref) + 1:] - break + ref, file_path = find_ref(path, refs) file_contents = await fetch_http( session, @@ -42,7 +48,6 @@ async def fetch_github_snippet(session: ClientSession, repo: str, 'text', headers=headers, ) - return await snippet_to_md(file_contents, file_path, start_line, end_line) @@ -66,9 +71,7 @@ async def fetch_github_gist_snippet(session: ClientSession, gist_id: str, revisi gist_json['files'][gist_file]['raw_url'], 'text', ) - return await snippet_to_md(file_contents, gist_file, start_line, end_line) - return '' @@ -81,14 +84,7 @@ async def fetch_gitlab_snippet(session: ClientSession, repo: str, refs = (await fetch_http(session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/branches', 'json') + await fetch_http(session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/tags', 'json')) - ref = path.split('/')[0] - file_path = '/'.join(path.split('/')[1:]) - for possible_ref in refs: - if path.startswith(possible_ref['name'] + '/'): - ref = possible_ref['name'] - file_path = path[len(ref) + 1:] - break - + ref, file_path = find_ref(path, refs) enc_ref = quote_plus(ref) enc_file_path = quote_plus(file_path) @@ -97,7 +93,6 @@ async def fetch_gitlab_snippet(session: ClientSession, repo: str, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/files/{enc_file_path}/raw?ref={enc_ref}', 'text', ) - return await snippet_to_md(file_contents, file_path, start_line, end_line) @@ -109,7 +104,6 @@ async def fetch_bitbucket_snippet(session: ClientSession, repo: str, ref: str, f'https://bitbucket.org/{quote_plus(repo)}/raw/{quote_plus(ref)}/{quote_plus(file_path)}', 'text', ) - return await snippet_to_md(file_contents, file_path, start_line, end_line) -- cgit v1.2.3 From 372cfb9c1dcfb761ad468ac38955473db57f18b6 Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 27 Oct 2020 11:02:03 +0200 Subject: Renamed fetch_http to fetch_response --- bot/cogs/code_snippets.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/bot/cogs/code_snippets.py b/bot/cogs/code_snippets.py index b10c68789..27faf70ec 100644 --- a/bot/cogs/code_snippets.py +++ b/bot/cogs/code_snippets.py @@ -10,8 +10,8 @@ from bot.bot import Bot from bot.utils.messages import wait_for_deletion -async def fetch_http(session: ClientSession, url: str, response_format: str, **kwargs) -> str: - """Uses aiohttp to make http GET requests.""" +async def fetch_response(session: ClientSession, url: str, response_format: str, **kwargs) -> str: + """Makes http requests using aiohttp.""" async with session.get(url, **kwargs) as response: if response_format == 'text': return await response.text() @@ -37,12 +37,12 @@ async def fetch_github_snippet(session: ClientSession, repo: str, headers = {'Accept': 'application/vnd.github.v3.raw'} # Search the GitHub API for the specified branch - refs = (await fetch_http(session, f'https://api.github.com/repos/{repo}/branches', 'json', headers=headers) - + await fetch_http(session, f'https://api.github.com/repos/{repo}/tags', 'json', headers=headers)) + refs = (await fetch_response(session, f'https://api.github.com/repos/{repo}/branches', 'json', headers=headers) + + await fetch_response(session, f'https://api.github.com/repos/{repo}/tags', 'json', headers=headers)) ref, file_path = find_ref(path, refs) - file_contents = await fetch_http( + file_contents = await fetch_response( session, f'https://api.github.com/repos/{repo}/contents/{file_path}?ref={ref}', 'text', @@ -56,7 +56,7 @@ async def fetch_github_gist_snippet(session: ClientSession, gist_id: str, revisi """Fetches a snippet from a GitHub gist.""" headers = {'Accept': 'application/vnd.github.v3.raw'} - gist_json = await fetch_http( + gist_json = await fetch_response( session, f'https://api.github.com/gists/{gist_id}{f"/{revision}" if len(revision) > 0 else ""}', 'json', @@ -66,7 +66,7 @@ async def fetch_github_gist_snippet(session: ClientSession, gist_id: str, revisi # Check each file in the gist for the specified file for gist_file in gist_json['files']: if file_path == gist_file.lower().replace('.', '-'): - file_contents = await fetch_http( + file_contents = await fetch_response( session, gist_json['files'][gist_file]['raw_url'], 'text', @@ -81,14 +81,14 @@ async def fetch_gitlab_snippet(session: ClientSession, repo: str, enc_repo = quote_plus(repo) # Searches the GitLab API for the specified branch - refs = (await fetch_http(session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/branches', 'json') - + await fetch_http(session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/tags', 'json')) + refs = (await fetch_response(session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/branches', 'json') + + await fetch_response(session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/tags', 'json')) ref, file_path = find_ref(path, refs) enc_ref = quote_plus(ref) enc_file_path = quote_plus(file_path) - file_contents = await fetch_http( + file_contents = await fetch_response( session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/files/{enc_file_path}/raw?ref={enc_ref}', 'text', @@ -99,7 +99,7 @@ async def fetch_gitlab_snippet(session: ClientSession, repo: str, async def fetch_bitbucket_snippet(session: ClientSession, repo: str, ref: str, file_path: str, start_line: int, end_line: int) -> str: """Fetches a snippet from a BitBucket repo.""" - file_contents = await fetch_http( + file_contents = await fetch_response( session, f'https://bitbucket.org/{quote_plus(repo)}/raw/{quote_plus(ref)}/{quote_plus(file_path)}', 'text', -- cgit v1.2.3 From c3ce61937211cbd8c7e3df1c501cda70d97623cb Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 27 Oct 2020 11:16:14 +0200 Subject: Renamed snippet_to_md and wrote a better docstring --- bot/cogs/code_snippets.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/bot/cogs/code_snippets.py b/bot/cogs/code_snippets.py index 27faf70ec..dda4d185f 100644 --- a/bot/cogs/code_snippets.py +++ b/bot/cogs/code_snippets.py @@ -21,8 +21,10 @@ async def fetch_response(session: ClientSession, url: str, response_format: str, def find_ref(path: str, refs: tuple) -> tuple: """Loops through all branches and tags to find the required ref.""" + # Base case: there is no slash in the branch name ref = path.split('/')[0] file_path = '/'.join(path.split('/')[1:]) + # In case there are slashes in the branch name, we loop through all branches and tags for possible_ref in refs: if path.startswith(possible_ref['name'] + '/'): ref = possible_ref['name'] @@ -48,7 +50,7 @@ async def fetch_github_snippet(session: ClientSession, repo: str, 'text', headers=headers, ) - return await snippet_to_md(file_contents, file_path, start_line, end_line) + return snippet_to_codeblock(file_contents, file_path, start_line, end_line) async def fetch_github_gist_snippet(session: ClientSession, gist_id: str, revision: str, @@ -71,7 +73,7 @@ async def fetch_github_gist_snippet(session: ClientSession, gist_id: str, revisi gist_json['files'][gist_file]['raw_url'], 'text', ) - return await snippet_to_md(file_contents, gist_file, start_line, end_line) + return snippet_to_codeblock(file_contents, gist_file, start_line, end_line) return '' @@ -93,7 +95,7 @@ async def fetch_gitlab_snippet(session: ClientSession, repo: str, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/files/{enc_file_path}/raw?ref={enc_ref}', 'text', ) - return await snippet_to_md(file_contents, file_path, start_line, end_line) + return snippet_to_codeblock(file_contents, file_path, start_line, end_line) async def fetch_bitbucket_snippet(session: ClientSession, repo: str, ref: str, @@ -104,11 +106,21 @@ async def fetch_bitbucket_snippet(session: ClientSession, repo: str, ref: str, f'https://bitbucket.org/{quote_plus(repo)}/raw/{quote_plus(ref)}/{quote_plus(file_path)}', 'text', ) - return await snippet_to_md(file_contents, file_path, start_line, end_line) + return snippet_to_codeblock(file_contents, file_path, start_line, end_line) -async def snippet_to_md(file_contents: str, file_path: str, start_line: str, end_line: str) -> str: - """Given file contents, file path, start line and end line creates a code block.""" +def snippet_to_codeblock(file_contents: str, file_path: str, start_line: str, end_line: str) -> str: + """ + Given the entire file contents and target lines, creates a code block. + + First, we split the file contents into a list of lines and then keep and join only the required + ones together. + + We then dedent the lines to look nice, and replace all ` characters with `\u200b to prevent + markdown injection. + + Finally, we surround the code with ``` characters. + """ # Parse start_line and end_line into integers if end_line is None: start_line = end_line = int(start_line) -- cgit v1.2.3 From 28dfd8278a8ee24fb26bc5359729ca0ed0307632 Mon Sep 17 00:00:00 2001 From: Andi Qu <31325319+dolphingarlic@users.noreply.github.com> Date: Tue, 27 Oct 2020 11:17:26 +0200 Subject: Update bot/cogs/code_snippets.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Leon Sandøy --- bot/cogs/code_snippets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/code_snippets.py b/bot/cogs/code_snippets.py index dda4d185f..d5424ea15 100644 --- a/bot/cogs/code_snippets.py +++ b/bot/cogs/code_snippets.py @@ -176,7 +176,7 @@ BITBUCKET_RE = re.compile( class CodeSnippets(Cog): """ - Cog that prints out snippets to Discord. + Cog that parses and sends code snippets to Discord. Matches each message against a regex and prints the contents of all matched snippets. """ -- cgit v1.2.3 From fd0bbdcd80156a443e5b91ad4b7f74e2c0285242 Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 27 Oct 2020 11:19:56 +0200 Subject: Split up refs into branches and tags --- bot/cogs/code_snippets.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bot/cogs/code_snippets.py b/bot/cogs/code_snippets.py index dda4d185f..77c0ede42 100644 --- a/bot/cogs/code_snippets.py +++ b/bot/cogs/code_snippets.py @@ -39,9 +39,9 @@ async def fetch_github_snippet(session: ClientSession, repo: str, headers = {'Accept': 'application/vnd.github.v3.raw'} # Search the GitHub API for the specified branch - refs = (await fetch_response(session, f'https://api.github.com/repos/{repo}/branches', 'json', headers=headers) - + await fetch_response(session, f'https://api.github.com/repos/{repo}/tags', 'json', headers=headers)) - + branches = await fetch_response(session, f'https://api.github.com/repos/{repo}/branches', 'json', headers=headers) + tags = await fetch_response(session, f'https://api.github.com/repos/{repo}/tags', 'json', headers=headers) + refs = branches + tags ref, file_path = find_ref(path, refs) file_contents = await fetch_response( @@ -83,9 +83,9 @@ async def fetch_gitlab_snippet(session: ClientSession, repo: str, enc_repo = quote_plus(repo) # Searches the GitLab API for the specified branch - refs = (await fetch_response(session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/branches', 'json') - + await fetch_response(session, f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/tags', 'json')) - + branches = await fetch_response(session, f'https://api.github.com/repos/{repo}/branches', 'json') + tags = await fetch_response(session, f'https://api.github.com/repos/{repo}/tags', 'json') + refs = branches + tags ref, file_path = find_ref(path, refs) enc_ref = quote_plus(ref) enc_file_path = quote_plus(file_path) -- cgit v1.2.3 From 7807939084f01fed327ff2d1772fb81efc0edbba Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 27 Oct 2020 15:34:52 +0200 Subject: Made check for valid language easier to read --- bot/exts/info/code_snippets.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 3d38ef1c3..c53c28e8b 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -144,7 +144,9 @@ def snippet_to_codeblock(file_contents: str, file_path: str, start_line: str, en # Extracts the code language and checks whether it's a "valid" language language = file_path.split('/')[-1].split('.')[-1] - if not language.replace('-', '').replace('+', '').replace('_', '').isalnum(): + trimmed_language = language.replace('-', '').replace('+', '').replace('_', '') + is_valid_language = trimmed_language.isalnum() + if not is_valid_language: language = '' if len(required) != 0: -- cgit v1.2.3 From 76afc563ac73f6b8d40194c15e28f42a9fe6be0f Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 27 Oct 2020 15:45:09 +0200 Subject: Moved global functions into the cog and got rid of unnecessary aiohttp sessions --- bot/exts/info/code_snippets.py | 307 +++++++++++++++++++++-------------------- 1 file changed, 158 insertions(+), 149 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index c53c28e8b..12eb692d4 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -2,7 +2,6 @@ import re import textwrap from urllib.parse import quote_plus -from aiohttp import ClientSession from discord import Message from discord.ext.commands import Cog @@ -10,150 +9,6 @@ from bot.bot import Bot from bot.utils.messages import wait_for_deletion -async def fetch_response(session: ClientSession, url: str, response_format: str, **kwargs) -> str: - """Makes http requests using aiohttp.""" - async with session.get(url, **kwargs) as response: - if response_format == 'text': - return await response.text() - elif response_format == 'json': - return await response.json() - - -def find_ref(path: str, refs: tuple) -> tuple: - """Loops through all branches and tags to find the required ref.""" - # Base case: there is no slash in the branch name - ref = path.split('/')[0] - file_path = '/'.join(path.split('/')[1:]) - # In case there are slashes in the branch name, we loop through all branches and tags - for possible_ref in refs: - if path.startswith(possible_ref['name'] + '/'): - ref = possible_ref['name'] - file_path = path[len(ref) + 1:] - break - return (ref, file_path) - - -async def fetch_github_snippet(session: ClientSession, repo: str, - path: str, start_line: str, end_line: str) -> str: - """Fetches a snippet from a GitHub repo.""" - headers = {'Accept': 'application/vnd.github.v3.raw'} - - # Search the GitHub API for the specified branch - branches = await fetch_response(session, f'https://api.github.com/repos/{repo}/branches', 'json', headers=headers) - tags = await fetch_response(session, f'https://api.github.com/repos/{repo}/tags', 'json', headers=headers) - refs = branches + tags - ref, file_path = find_ref(path, refs) - - file_contents = await fetch_response( - session, - f'https://api.github.com/repos/{repo}/contents/{file_path}?ref={ref}', - 'text', - headers=headers, - ) - return snippet_to_codeblock(file_contents, file_path, start_line, end_line) - - -async def fetch_github_gist_snippet(session: ClientSession, gist_id: str, revision: str, - file_path: str, start_line: str, end_line: str) -> str: - """Fetches a snippet from a GitHub gist.""" - headers = {'Accept': 'application/vnd.github.v3.raw'} - - gist_json = await fetch_response( - session, - f'https://api.github.com/gists/{gist_id}{f"/{revision}" if len(revision) > 0 else ""}', - 'json', - headers=headers, - ) - - # Check each file in the gist for the specified file - for gist_file in gist_json['files']: - if file_path == gist_file.lower().replace('.', '-'): - file_contents = await fetch_response( - session, - gist_json['files'][gist_file]['raw_url'], - 'text', - ) - return snippet_to_codeblock(file_contents, gist_file, start_line, end_line) - return '' - - -async def fetch_gitlab_snippet(session: ClientSession, repo: str, - path: str, start_line: str, end_line: str) -> str: - """Fetches a snippet from a GitLab repo.""" - enc_repo = quote_plus(repo) - - # Searches the GitLab API for the specified branch - branches = await fetch_response(session, f'https://api.github.com/repos/{repo}/branches', 'json') - tags = await fetch_response(session, f'https://api.github.com/repos/{repo}/tags', 'json') - refs = branches + tags - ref, file_path = find_ref(path, refs) - enc_ref = quote_plus(ref) - enc_file_path = quote_plus(file_path) - - file_contents = await fetch_response( - session, - f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/files/{enc_file_path}/raw?ref={enc_ref}', - 'text', - ) - return snippet_to_codeblock(file_contents, file_path, start_line, end_line) - - -async def fetch_bitbucket_snippet(session: ClientSession, repo: str, ref: str, - file_path: str, start_line: int, end_line: int) -> str: - """Fetches a snippet from a BitBucket repo.""" - file_contents = await fetch_response( - session, - f'https://bitbucket.org/{quote_plus(repo)}/raw/{quote_plus(ref)}/{quote_plus(file_path)}', - 'text', - ) - return snippet_to_codeblock(file_contents, file_path, start_line, end_line) - - -def snippet_to_codeblock(file_contents: str, file_path: str, start_line: str, end_line: str) -> str: - """ - Given the entire file contents and target lines, creates a code block. - - First, we split the file contents into a list of lines and then keep and join only the required - ones together. - - We then dedent the lines to look nice, and replace all ` characters with `\u200b to prevent - markdown injection. - - Finally, we surround the code with ``` characters. - """ - # Parse start_line and end_line into integers - if end_line is None: - start_line = end_line = int(start_line) - else: - start_line = int(start_line) - end_line = int(end_line) - - split_file_contents = file_contents.splitlines() - - # Make sure that the specified lines are in range - if start_line > end_line: - start_line, end_line = end_line, start_line - if start_line > len(split_file_contents) or end_line < 1: - return '' - start_line = max(1, start_line) - end_line = min(len(split_file_contents), end_line) - - # Gets the code lines, dedents them, and inserts zero-width spaces to prevent Markdown injection - required = '\n'.join(split_file_contents[start_line - 1:end_line]) - required = textwrap.dedent(required).rstrip().replace('`', '`\u200b') - - # Extracts the code language and checks whether it's a "valid" language - language = file_path.split('/')[-1].split('.')[-1] - trimmed_language = language.replace('-', '').replace('+', '').replace('_', '') - is_valid_language = trimmed_language.isalnum() - if not is_valid_language: - language = '' - - if len(required) != 0: - return f'```{language}\n{required}```\n' - return '' - - GITHUB_RE = re.compile( r'https://github\.com/(?P.+?)/blob/(?P.+/.+)' r'#L(?P\d+)([-~]L(?P\d+))?\b' @@ -183,6 +38,160 @@ class CodeSnippets(Cog): Matches each message against a regex and prints the contents of all matched snippets. """ + async def _fetch_response(self, url: str, response_format: str, **kwargs) -> str: + """Makes http requests using aiohttp.""" + async with self.bot.http_session.get(url, **kwargs) as response: + if response_format == 'text': + return await response.text() + elif response_format == 'json': + return await response.json() + + def _find_ref(self, path: str, refs: tuple) -> tuple: + """Loops through all branches and tags to find the required ref.""" + # Base case: there is no slash in the branch name + ref = path.split('/')[0] + file_path = '/'.join(path.split('/')[1:]) + # In case there are slashes in the branch name, we loop through all branches and tags + for possible_ref in refs: + if path.startswith(possible_ref['name'] + '/'): + ref = possible_ref['name'] + file_path = path[len(ref) + 1:] + break + return (ref, file_path) + + async def _fetch_github_snippet( + self, + repo: str, + path: str, + start_line: str, + end_line: str + ) -> str: + """Fetches a snippet from a GitHub repo.""" + headers = {'Accept': 'application/vnd.github.v3.raw'} + + # Search the GitHub API for the specified branch + branches = await self._fetch_response(f'https://api.github.com/repos/{repo}/branches', 'json', headers=headers) + tags = await self._fetch_response(f'https://api.github.com/repos/{repo}/tags', 'json', headers=headers) + refs = branches + tags + ref, file_path = self._find_ref(path, refs) + + file_contents = await self._fetch_response( + f'https://api.github.com/repos/{repo}/contents/{file_path}?ref={ref}', + 'text', + headers=headers, + ) + return self._snippet_to_codeblock(file_contents, file_path, start_line, end_line) + + async def _fetch_github_gist_snippet( + self, + gist_id: str, + revision: str, + file_path: str, + start_line: str, + end_line: str + ) -> str: + """Fetches a snippet from a GitHub gist.""" + headers = {'Accept': 'application/vnd.github.v3.raw'} + + gist_json = await self._fetch_response( + f'https://api.github.com/gists/{gist_id}{f"/{revision}" if len(revision) > 0 else ""}', + 'json', + headers=headers, + ) + + # Check each file in the gist for the specified file + for gist_file in gist_json['files']: + if file_path == gist_file.lower().replace('.', '-'): + file_contents = await self._fetch_response( + gist_json['files'][gist_file]['raw_url'], + 'text', + ) + return self._snippet_to_codeblock(file_contents, gist_file, start_line, end_line) + return '' + + async def _fetch_gitlab_snippet( + self, + repo: str, + path: str, + start_line: str, + end_line: str + ) -> str: + """Fetches a snippet from a GitLab repo.""" + enc_repo = quote_plus(repo) + + # Searches the GitLab API for the specified branch + branches = await self._fetch_response(f'https://api.github.com/repos/{repo}/branches', 'json') + tags = await self._fetch_response(f'https://api.github.com/repos/{repo}/tags', 'json') + refs = branches + tags + ref, file_path = self._find_ref(path, refs) + enc_ref = quote_plus(ref) + enc_file_path = quote_plus(file_path) + + file_contents = await self._fetch_response( + f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/files/{enc_file_path}/raw?ref={enc_ref}', + 'text', + ) + return self._snippet_to_codeblock(file_contents, file_path, start_line, end_line) + + async def _fetch_bitbucket_snippet( + self, + repo: str, + ref: str, + file_path: str, + start_line: int, + end_line: int + ) -> str: + """Fetches a snippet from a BitBucket repo.""" + file_contents = await self._fetch_response( + f'https://bitbucket.org/{quote_plus(repo)}/raw/{quote_plus(ref)}/{quote_plus(file_path)}', + 'text', + ) + return self._snippet_to_codeblock(file_contents, file_path, start_line, end_line) + + def _snippet_to_codeblock(self, file_contents: str, file_path: str, start_line: str, end_line: str) -> str: + """ + Given the entire file contents and target lines, creates a code block. + + First, we split the file contents into a list of lines and then keep and join only the required + ones together. + + We then dedent the lines to look nice, and replace all ` characters with `\u200b to prevent + markdown injection. + + Finally, we surround the code with ``` characters. + """ + # Parse start_line and end_line into integers + if end_line is None: + start_line = end_line = int(start_line) + else: + start_line = int(start_line) + end_line = int(end_line) + + split_file_contents = file_contents.splitlines() + + # Make sure that the specified lines are in range + if start_line > end_line: + start_line, end_line = end_line, start_line + if start_line > len(split_file_contents) or end_line < 1: + return '' + start_line = max(1, start_line) + end_line = min(len(split_file_contents), end_line) + + # Gets the code lines, dedents them, and inserts zero-width spaces to prevent Markdown injection + required = '\n'.join(split_file_contents[start_line - 1:end_line]) + required = textwrap.dedent(required).rstrip().replace('`', '`\u200b') + + # Extracts the code language and checks whether it's a "valid" language + language = file_path.split('/')[-1].split('.')[-1] + trimmed_language = language.replace('-', '').replace('+', '').replace('_', '') + is_valid_language = trimmed_language.isalnum() + if not is_valid_language: + language = '' + + if len(required) != 0: + return f'```{language}\n{required}```\n' + return '' + def __init__(self, bot: Bot): """Initializes the cog's bot.""" self.bot = bot @@ -199,16 +208,16 @@ class CodeSnippets(Cog): message_to_send = '' for gh in GITHUB_RE.finditer(message.content): - message_to_send += await fetch_github_snippet(self.bot.http_session, **gh.groupdict()) + message_to_send += await self._fetch_github_snippet(**gh.groupdict()) for gh_gist in GITHUB_GIST_RE.finditer(message.content): - message_to_send += await fetch_github_gist_snippet(self.bot.http_session, **gh_gist.groupdict()) + message_to_send += await self._fetch_github_gist_snippet(**gh_gist.groupdict()) for gl in GITLAB_RE.finditer(message.content): - message_to_send += await fetch_gitlab_snippet(self.bot.http_session, **gl.groupdict()) + message_to_send += await self._fetch_gitlab_snippet(**gl.groupdict()) for bb in BITBUCKET_RE.finditer(message.content): - message_to_send += await fetch_bitbucket_snippet(self.bot.http_session, **bb.groupdict()) + message_to_send += await self._fetch_bitbucket_snippet(**bb.groupdict()) if 0 < len(message_to_send) <= 2000 and message_to_send.count('\n') <= 15: await message.edit(suppress=True) -- cgit v1.2.3 From 3102c698e8892d5a3b1b0fcc2183bf2c480d60fd Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 27 Oct 2020 15:55:34 +0200 Subject: Used a list of tuples for on_message instead --- bot/exts/info/code_snippets.py | 29 +++++++++++------------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 12eb692d4..1bb00b677 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -199,25 +199,18 @@ class CodeSnippets(Cog): @Cog.listener() async def on_message(self, message: Message) -> None: """Checks if the message has a snippet link, removes the embed, then sends the snippet contents.""" - gh_match = GITHUB_RE.search(message.content) - gh_gist_match = GITHUB_GIST_RE.search(message.content) - gl_match = GITLAB_RE.search(message.content) - bb_match = BITBUCKET_RE.search(message.content) - - if (gh_match or gh_gist_match or gl_match or bb_match) and not message.author.bot: + if not message.author.bot: message_to_send = '' - - for gh in GITHUB_RE.finditer(message.content): - message_to_send += await self._fetch_github_snippet(**gh.groupdict()) - - for gh_gist in GITHUB_GIST_RE.finditer(message.content): - message_to_send += await self._fetch_github_gist_snippet(**gh_gist.groupdict()) - - for gl in GITLAB_RE.finditer(message.content): - message_to_send += await self._fetch_gitlab_snippet(**gl.groupdict()) - - for bb in BITBUCKET_RE.finditer(message.content): - message_to_send += await self._fetch_bitbucket_snippet(**bb.groupdict()) + pattern_handlers = [ + (GITHUB_RE, self._fetch_github_snippet), + (GITHUB_GIST_RE, self._fetch_github_gist_snippet), + (GITLAB_RE, self._fetch_gitlab_snippet), + (BITBUCKET_RE, self._fetch_bitbucket_snippet) + ] + + for pattern, handler in pattern_handlers: + for match in pattern.finditer(message.content): + message_to_send += await handler(**match.groupdict()) if 0 < len(message_to_send) <= 2000 and message_to_send.count('\n') <= 15: await message.edit(suppress=True) -- cgit v1.2.3 From bbf7a600ca4b657258b46074c00cab1982791613 Mon Sep 17 00:00:00 2001 From: Andi Qu <31325319+dolphingarlic@users.noreply.github.com> Date: Wed, 28 Oct 2020 09:26:09 +0200 Subject: Update bot/exts/info/code_snippets.py Co-authored-by: Mark --- bot/exts/info/code_snippets.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 1bb00b677..4594c36f2 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -49,8 +49,7 @@ class CodeSnippets(Cog): def _find_ref(self, path: str, refs: tuple) -> tuple: """Loops through all branches and tags to find the required ref.""" # Base case: there is no slash in the branch name - ref = path.split('/')[0] - file_path = '/'.join(path.split('/')[1:]) + ref, file_path = path.split('/', 1) # In case there are slashes in the branch name, we loop through all branches and tags for possible_ref in refs: if path.startswith(possible_ref['name'] + '/'): -- cgit v1.2.3 From 1b8610c83dacfe1b19f3efa5d3a2b66c4c6e1e5d Mon Sep 17 00:00:00 2001 From: Andi Qu <31325319+dolphingarlic@users.noreply.github.com> Date: Wed, 28 Oct 2020 09:31:01 +0200 Subject: Removed unnecessary space before equals sign --- bot/exts/info/code_snippets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 4594c36f2..d854ebb4c 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -49,7 +49,7 @@ class CodeSnippets(Cog): def _find_ref(self, path: str, refs: tuple) -> tuple: """Loops through all branches and tags to find the required ref.""" # Base case: there is no slash in the branch name - ref, file_path = path.split('/', 1) + ref, file_path = path.split('/', 1) # In case there are slashes in the branch name, we loop through all branches and tags for possible_ref in refs: if path.startswith(possible_ref['name'] + '/'): -- cgit v1.2.3 From aae80011f5cb7e1ec5b9d6fd648ba255ad30e0df Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 30 Oct 2020 05:31:09 +0200 Subject: Added defcon status notifier --- bot/exts/moderation/defcon.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index caa6fb917..4b25c36df 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -4,8 +4,10 @@ import logging from collections import namedtuple from datetime import datetime, timedelta from enum import Enum +from gettext import ngettext from discord import Colour, Embed, Member +from discord.ext import tasks from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot @@ -83,6 +85,7 @@ class Defcon(Cog): self.days = timedelta(days=0) log.info("DEFCON disabled") + self.update_notifier() await self.update_channel_topic() @Cog.listener() @@ -153,6 +156,10 @@ class Defcon(Cog): } } ) + + self.days = timedelta(days=days) + self.update_notifier() + except Exception as err: log.exception("Unable to update DEFCON settings.") error = err @@ -199,7 +206,6 @@ class Defcon(Cog): @has_any_role(*MODERATION_ROLES) async def days_command(self, ctx: Context, days: int) -> None: """Set how old an account must be to join the server, in days, with DEFCON mode enabled.""" - self.days = timedelta(days=days) self.enabled = True await self._defcon_action(ctx, days=days, action=Action.UPDATED) await self.update_channel_topic() @@ -252,6 +258,21 @@ class Defcon(Cog): await self.mod_log.send_log_message(info.icon, info.color, status_msg, log_msg) + def update_notifier(self) -> None: + """Start or stop the notifier according to the DEFCON status.""" + if self.days.days != 0 and not self.defcon_notifier.is_running(): + log.info("DEFCON notifier started.") + self.defcon_notifier.start() + + elif self.days.days == 0 and self.defcon_notifier.is_running(): + log.info("DEFCON notifier stopped.") + self.defcon_notifier.cancel() + + @tasks.loop(hours=1) + async def defcon_notifier(self) -> None: + """Routinely notify moderators that DEFCON is active.""" + await self.channel.send(f"Defcon is on and is set to {self.days.days} day{ngettext('', 's', self.days.days)}.") + def setup(bot: Bot) -> None: """Load the Defcon cog.""" -- cgit v1.2.3 From c9fe7b1d6b98334c29f516b682b93b4c1c3946a1 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 10 Nov 2020 01:14:31 +0100 Subject: Cache user fetched symbols through redis. --- bot/exts/info/doc/_cog.py | 22 ++++++++++++++++++++-- bot/exts/info/doc/_redis_cache.py | 23 +++++++++++++++++++++++ 2 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 bot/exts/info/doc/_redis_cache.py diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 257435e95..ab3ad159a 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -4,6 +4,7 @@ import asyncio import logging import re import sys +import urllib.parse from collections import defaultdict from contextlib import suppress from typing import Dict, List, NamedTuple, Optional, Union @@ -21,6 +22,7 @@ from bot.utils.messages import wait_for_deletion from bot.utils.scheduling import Scheduler from ._inventory_parser import FAILED_REQUEST_ATTEMPTS, fetch_inventory from ._parsing import get_symbol_markdown +from ._redis_cache import DocRedisCache log = logging.getLogger(__name__) @@ -182,6 +184,8 @@ class InventoryURL(commands.Converter): class DocCog(commands.Cog): """A set of commands for querying & displaying documentation.""" + doc_cache = DocRedisCache() + def __init__(self, bot: Bot): self.base_urls = {} self.bot = bot @@ -296,16 +300,30 @@ class DocCog(commands.Cog): Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents. If the symbol is known, an Embed with documentation about it is returned. + + First check the DocRedisCache before querying the cog's `CachedParser`, + if not present also create a redis entry for the symbol. """ + log.trace(f"Building embed for symbol `{symbol}`") symbol_info = self.doc_symbols.get(symbol) if symbol_info is None: + log.debug("Symbol does not exist.") return None self.bot.stats.incr(f"doc_fetches.{symbol_info.package.lower()}") + item_url = f"{symbol_info.url}#{symbol_info.symbol_id}" + redis_key = "".join(urllib.parse.urlparse(item_url)[1:]) # url without scheme + + markdown = await self.doc_cache.get(redis_key) + if markdown is None: + log.debug(f"Redis cache miss for symbol `{symbol}`.") + markdown = await self.item_fetcher.get_markdown(self.bot.http_session, symbol_info) + await self.doc_cache.set(redis_key, markdown) + embed = discord.Embed( title=discord.utils.escape_markdown(symbol), - url=f"{symbol_info.url}#{symbol_info.symbol_id}", - description=await self.item_fetcher.get_markdown(self.bot.http_session, symbol_info) + url=item_url, + description=markdown ) # Show all symbols with the same name that were renamed in the footer. embed.set_footer( diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py new file mode 100644 index 000000000..147394ba6 --- /dev/null +++ b/bot/exts/info/doc/_redis_cache.py @@ -0,0 +1,23 @@ +from typing import Optional + +from async_rediscache.types.base import RedisObject, namespace_lock + + +class DocRedisCache(RedisObject): + """Interface for redis functionality needed by the Doc cog.""" + + @namespace_lock + async def set(self, key: str, value: str) -> None: + """ + Set markdown `value` for `key`. + + Keys expire after a week to keep data up to date. + """ + with await self._get_pool_connection() as connection: + await connection.setex(f"{self.namespace}:{key}", 7*24*60*60, value) + + @namespace_lock + async def get(self, key: str) -> Optional[str]: + """Get markdown contents for `key`.""" + with await self._get_pool_connection() as connection: + return await connection.get(f"{self.namespace}:{key}", encoding="utf8") -- cgit v1.2.3 From b8c12d08c9b8dc4e0bf39fcc242d67a3532d0fd0 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 10 Nov 2020 03:16:35 +0100 Subject: Add package in front of symbol as default fallback Previously weo nly added the package name for symbols that shared are named name with an another symbol, but in some edge cases we can get to this point with symbols that weren't renamed but have name conflicts, causing some to get overwritten completely without the capturing condition --- bot/exts/info/doc/_cog.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index ab3ad159a..264d6e31e 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -260,8 +260,7 @@ class DocCog(commands.Cog): self.doc_symbols[overridden_symbol] = original_symbol self.renamed_symbols.add(overridden_symbol) - # If renamed `symbol` already exists, add library name in front to differentiate between them. - elif symbol in self.renamed_symbols: + else: symbol = f"{api_package_name}.{symbol}" self.renamed_symbols.add(symbol) -- cgit v1.2.3 From 89169f5c0b203be1963cfe569c216e0094674c4f Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 10 Nov 2020 03:56:29 +0100 Subject: Simplify duplicate symbol name handling code With the catchall else condition and symbols from FORCE_PREFIX_GROUPS getting renamed even when being overwritten, we can ignore the package handling and let it go to the else which adds the package prefix instead of a group --- bot/exts/info/doc/_cog.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 264d6e31e..ee89f5384 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -26,17 +26,14 @@ from ._redis_cache import DocRedisCache log = logging.getLogger(__name__) -NO_OVERRIDE_GROUPS = ( +# symbols with a group contained here will get the group prefixed on duplicates +FORCE_PREFIX_GROUPS = ( "2to3fixer", "token", "label", "pdbcommand", "term", ) -NO_OVERRIDE_PACKAGES = ( - "python", -) - WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay @@ -245,14 +242,11 @@ class DocCog(commands.Cog): group_name = sys.intern(group.split(":")[1]) if (original_symbol := self.doc_symbols.get(symbol)) is not None: - if ( - group_name in NO_OVERRIDE_GROUPS - or any(package == original_symbol.package for package in NO_OVERRIDE_PACKAGES) - ): + if group_name in FORCE_PREFIX_GROUPS: symbol = f"{group_name}.{symbol}" self.renamed_symbols.add(symbol) - elif (overridden_symbol_group := original_symbol.group) in NO_OVERRIDE_GROUPS: + elif (overridden_symbol_group := original_symbol.group) in FORCE_PREFIX_GROUPS: overridden_symbol = f"{overridden_symbol_group}.{symbol}" if overridden_symbol in self.renamed_symbols: overridden_symbol = f"{api_package_name}.{overridden_symbol}" -- cgit v1.2.3 From faaa85d2d00a2bc7496965fad3f5f53f56718e9c Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 10 Nov 2020 04:03:23 +0100 Subject: Move InventoryURL converer to the converters file --- bot/converters.py | 20 ++++++++++++++++++++ bot/exts/info/doc/_cog.py | 23 ++--------------------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/bot/converters.py b/bot/converters.py index 6c87a50fe..3066eaabb 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -15,6 +15,7 @@ from discord.utils import DISCORD_EPOCH, snowflake_time from bot.api import ResponseCodeError from bot.constants import URLs +from bot.exts.info.doc import _inventory_parser from bot.utils.regex import INVITE_RE log = logging.getLogger(__name__) @@ -175,6 +176,25 @@ class ValidURL(Converter): return url +class InventoryURL(Converter): + """ + Represents an Intersphinx inventory URL. + + This converter checks whether intersphinx accepts the given inventory URL, and raises + `BadArgument` if that is not the case. + + Otherwise, it simply passes through the given URL. + """ + + @staticmethod + async def convert(ctx: Context, url: str) -> str: + """Convert url to Intersphinx inventory URL.""" + await ctx.trigger_typing() + if await _inventory_parser.fetch_inventory(ctx.bot.http_session, url) is None: + raise BadArgument(f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS}.") + return url + + class Snowflake(IDConverter): """ Converts to an int if the argument is a valid Discord snowflake. diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index ee89f5384..25477fe07 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -16,11 +16,11 @@ from discord.ext import commands from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput -from bot.converters import PackageName, ValidURL +from bot.converters import InventoryURL, PackageName, ValidURL from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion from bot.utils.scheduling import Scheduler -from ._inventory_parser import FAILED_REQUEST_ATTEMPTS, fetch_inventory +from ._inventory_parser import fetch_inventory from ._parsing import get_symbol_markdown from ._redis_cache import DocRedisCache @@ -159,25 +159,6 @@ class CachedParser: self._item_events.clear() -class InventoryURL(commands.Converter): - """ - Represents an Intersphinx inventory URL. - - This converter checks whether intersphinx accepts the given inventory URL, and raises - `BadArgument` if that is not the case. - - Otherwise, it simply passes through the given URL. - """ - - @staticmethod - async def convert(ctx: commands.Context, url: str) -> str: - """Convert url to Intersphinx inventory URL.""" - await ctx.trigger_typing() - if await fetch_inventory(ctx.bot.http_session, url) is None: - raise commands.BadArgument(f"Failed to fetch inventory file after {FAILED_REQUEST_ATTEMPTS}.") - return url - - class DocCog(commands.Cog): """A set of commands for querying & displaying documentation.""" -- cgit v1.2.3 From 2836ce6f24d66949376a1defbf3813ffae8b7f47 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 10 Nov 2020 13:45:43 +0100 Subject: Relock Pipfile.lock --- Pipfile.lock | 434 +++++++++++++++++++---------------------------------------- 1 file changed, 136 insertions(+), 298 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index becd85c55..f622d9e01 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "073fd0c51749aafa188fdbe96c5b90dd157cb1d23bdd144801fb0d0a369ffa88" + "sha256": "35130d225126e341941fe36e4193fe53aa253e193a50505054a87f48ab7f7c8c" }, "pipfile-spec": 6, "requires": { @@ -34,21 +34,22 @@ }, "aiohttp": { "hashes": [ - "sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e", - "sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326", - "sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a", - "sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654", - "sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a", - "sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4", - "sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17", - "sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec", - "sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd", - "sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48", - "sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59", - "sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965" + "sha256:1a4160579ffbc1b69e88cb6ca8bb0fbd4947dfcbf9fb1e2a4fc4c7a4a986c1fe", + "sha256:206c0ccfcea46e1bddc91162449c20c72f308aebdcef4977420ef329c8fcc599", + "sha256:2ad493de47a8f926386fa6d256832de3095ba285f325db917c7deae0b54a9fc8", + "sha256:319b490a5e2beaf06891f6711856ea10591cfe84fe9f3e71a721aa8f20a0872a", + "sha256:470e4c90da36b601676fe50c49a60d34eb8c6593780930b1aa4eea6f508dfa37", + "sha256:60f4caa3b7f7a477f66ccdd158e06901e1d235d572283906276e3803f6b098f5", + "sha256:66d64486172b032db19ea8522328b19cfb78a3e1e5b62ab6a0567f93f073dea0", + "sha256:687461cd974722110d1763b45c5db4d2cdee8d50f57b00c43c7590d1dd77fc5c", + "sha256:698cd7bc3c7d1b82bb728bae835724a486a8c376647aec336aa21a60113c3645", + "sha256:797456399ffeef73172945708810f3277f794965eb6ec9bd3a0c007c0476be98", + "sha256:a885432d3cabc1287bcf88ea94e1826d3aec57fd5da4a586afae4591b061d40d", + "sha256:c506853ba52e516b264b106321c424d03f3ddef2813246432fa9d1cefd361c81", + "sha256:fb83326d8295e8840e4ba774edf346e87eca78ba8a89c55d2690352842c15ba5" ], "index": "pypi", - "version": "==3.6.2" + "version": "==3.6.3" }, "aioping": { "hashes": [ @@ -68,18 +69,11 @@ }, "aiormq": { "hashes": [ - "sha256:106695a836f19c1af6c46b58e8aac80e00f86c5b3287a3c6483a1ee369cc95c9", - "sha256:9f6dbf6155fe2b7a3d24bf68de97fb812db0fac0a54e96bc1af14ea95078ba7f" + "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573", + "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e" ], "markers": "python_version >= '3.6'", - "version": "==3.2.3" - }, - "alabaster": { - "hashes": [ - "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", - "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02" - ], - "version": "==0.7.12" + "version": "==3.3.1" }, "async-rediscache": { "extras": [ @@ -103,35 +97,27 @@ }, "attrs": { "hashes": [ - "sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594", - "sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc" + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.2.0" - }, - "babel": { - "hashes": [ - "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38", - "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.0" + "version": "==20.3.0" }, "beautifulsoup4": { "hashes": [ - "sha256:1edf5e39f3a5bc6e38b235b369128416c7239b34f692acccececb040233032a1", - "sha256:5dfe44f8fddc89ac5453f02659d3ab1668f2c0d9684839f0785037e8c6d9ac8d", - "sha256:645d833a828722357038299b7f6879940c11dddd95b900fe5387c258b72bb883" + "sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35", + "sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25", + "sha256:fff47e031e34ec82bf17e00da8f592fe7de69aeea38be00523c04623c04fb666" ], "index": "pypi", - "version": "==4.9.2" + "version": "==4.9.3" }, "certifi": { "hashes": [ - "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", - "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41" + "sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd", + "sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4" ], - "version": "==2020.6.20" + "version": "==2020.11.8" }, "cffi": { "hashes": [ @@ -183,11 +169,12 @@ }, "colorama": { "hashes": [ - "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff", - "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1" + "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", + "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], + "index": "pypi", "markers": "sys_platform == 'win32'", - "version": "==0.4.3" + "version": "==0.4.4" }, "coloredlogs": { "hashes": [ @@ -207,26 +194,18 @@ }, "discord.py": { "hashes": [ - "sha256:3acb61fde0d862ed346a191d69c46021e6063673f63963bc984ae09a685ab211", - "sha256:e71089886aa157341644bdecad63a72ff56b44406b1a6467b66db31c8e5a5a15" + "sha256:2367359e31f6527f8a936751fc20b09d7495dd6a76b28c8fb13d4ca6c55b7563", + "sha256:def00dc50cf36d21346d71bc89f0cad8f18f9a3522978dc18c7796287d47de8b" ], "index": "pypi", - "version": "==1.5.0" - }, - "docutils": { - "hashes": [ - "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", - "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==0.16" + "version": "==1.5.1" }, "fakeredis": { "hashes": [ - "sha256:7ea0866ba5edb40fe2e9b1722535df0c7e6b91d518aa5f50d96c2fff3ea7f4c2", - "sha256:aad8836ffe0319ffbba66dcf872ac6e7e32d1f19790e31296ba58445efb0a5c7" + "sha256:8070b7fce16f828beaef2c757a4354af91698685d5232404f1aeeb233529c7a5", + "sha256:f8c8ea764d7b6fd801e7f5486e3edd32ca991d506186f1923a01fc072e33c271" ], - "version": "==1.4.3" + "version": "==1.4.4" }, "feedparser": { "hashes": [ @@ -313,58 +292,48 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, - "imagesize": { - "hashes": [ - "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", - "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.2.0" - }, - "jinja2": { - "hashes": [ - "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", - "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==2.11.2" - }, "lxml": { "hashes": [ - "sha256:05a444b207901a68a6526948c7cc8f9fe6d6f24c70781488e32fd74ff5996e3f", - "sha256:08fc93257dcfe9542c0a6883a25ba4971d78297f63d7a5a26ffa34861ca78730", - "sha256:107781b213cf7201ec3806555657ccda67b1fccc4261fb889ef7fc56976db81f", - "sha256:121b665b04083a1e85ff1f5243d4a93aa1aaba281bc12ea334d5a187278ceaf1", - "sha256:1fa21263c3aba2b76fd7c45713d4428dbcc7644d73dcf0650e9d344e433741b3", - "sha256:2b30aa2bcff8e958cd85d907d5109820b01ac511eae5b460803430a7404e34d7", - "sha256:4b4a111bcf4b9c948e020fd207f915c24a6de3f1adc7682a2d92660eb4e84f1a", - "sha256:5591c4164755778e29e69b86e425880f852464a21c7bb53c7ea453bbe2633bbe", - "sha256:59daa84aef650b11bccd18f99f64bfe44b9f14a08a28259959d33676554065a1", - "sha256:5a9c8d11aa2c8f8b6043d845927a51eb9102eb558e3f936df494e96393f5fd3e", - "sha256:5dd20538a60c4cc9a077d3b715bb42307239fcd25ef1ca7286775f95e9e9a46d", - "sha256:74f48ec98430e06c1fa8949b49ebdd8d27ceb9df8d3d1c92e1fdc2773f003f20", - "sha256:786aad2aa20de3dbff21aab86b2fb6a7be68064cbbc0219bde414d3a30aa47ae", - "sha256:7ad7906e098ccd30d8f7068030a0b16668ab8aa5cda6fcd5146d8d20cbaa71b5", - "sha256:80a38b188d20c0524fe8959c8ce770a8fdf0e617c6912d23fc97c68301bb9aba", - "sha256:8f0ec6b9b3832e0bd1d57af41f9238ea7709bbd7271f639024f2fc9d3bb01293", - "sha256:92282c83547a9add85ad658143c76a64a8d339028926d7dc1998ca029c88ea6a", - "sha256:94150231f1e90c9595ccc80d7d2006c61f90a5995db82bccbca7944fd457f0f6", - "sha256:9dc9006dcc47e00a8a6a029eb035c8f696ad38e40a27d073a003d7d1443f5d88", - "sha256:a76979f728dd845655026ab991df25d26379a1a8fc1e9e68e25c7eda43004bed", - "sha256:aa8eba3db3d8761db161003e2d0586608092e217151d7458206e243be5a43843", - "sha256:bea760a63ce9bba566c23f726d72b3c0250e2fa2569909e2d83cda1534c79443", - "sha256:c3f511a3c58676147c277eff0224c061dd5a6a8e1373572ac817ac6324f1b1e0", - "sha256:c9d317efde4bafbc1561509bfa8a23c5cab66c44d49ab5b63ff690f5159b2304", - "sha256:cc411ad324a4486b142c41d9b2b6a722c534096963688d879ea6fa8a35028258", - "sha256:cdc13a1682b2a6241080745b1953719e7fe0850b40a5c71ca574f090a1391df6", - "sha256:cfd7c5dd3c35c19cec59c63df9571c67c6d6e5c92e0fe63517920e97f61106d1", - "sha256:e1cacf4796b20865789083252186ce9dc6cc59eca0c2e79cca332bdff24ac481", - "sha256:e70d4e467e243455492f5de463b72151cc400710ac03a0678206a5f27e79ddef", - "sha256:ecc930ae559ea8a43377e8b60ca6f8d61ac532fc57efb915d899de4a67928efd", - "sha256:f161af26f596131b63b236372e4ce40f3167c1b5b5d459b29d2514bd8c9dc9ee" - ], - "index": "pypi", - "version": "==4.5.2" + "sha256:098fb713b31050463751dcc694878e1d39f316b86366fb9fe3fbbe5396ac9fab", + "sha256:0e89f5d422988c65e6936e4ec0fe54d6f73f3128c80eb7ecc3b87f595523607b", + "sha256:189ad47203e846a7a4951c17694d845b6ade7917c47c64b29b86526eefc3adf5", + "sha256:1d87936cb5801c557f3e981c9c193861264c01209cb3ad0964a16310ca1b3301", + "sha256:211b3bcf5da70c2d4b84d09232534ad1d78320762e2c59dedc73bf01cb1fc45b", + "sha256:2358809cc64394617f2719147a58ae26dac9e21bae772b45cfb80baa26bfca5d", + "sha256:23c83112b4dada0b75789d73f949dbb4e8f29a0a3511647024a398ebd023347b", + "sha256:24e811118aab6abe3ce23ff0d7d38932329c513f9cef849d3ee88b0f848f2aa9", + "sha256:2d5896ddf5389560257bbe89317ca7bcb4e54a02b53a3e572e1ce4226512b51b", + "sha256:2d6571c48328be4304aee031d2d5046cbc8aed5740c654575613c5a4f5a11311", + "sha256:2e311a10f3e85250910a615fe194839a04a0f6bc4e8e5bb5cac221344e3a7891", + "sha256:302160eb6e9764168e01d8c9ec6becddeb87776e81d3fcb0d97954dd51d48e0a", + "sha256:3a7a380bfecc551cfd67d6e8ad9faa91289173bdf12e9cfafbd2bdec0d7b1ec1", + "sha256:3d9b2b72eb0dbbdb0e276403873ecfae870599c83ba22cadff2db58541e72856", + "sha256:475325e037fdf068e0c2140b818518cf6bc4aa72435c407a798b2db9f8e90810", + "sha256:4b7572145054330c8e324a72d808c8c8fbe12be33368db28c39a255ad5f7fb51", + "sha256:4fff34721b628cce9eb4538cf9a73d02e0f3da4f35a515773cce6f5fe413b360", + "sha256:56eff8c6fb7bc4bcca395fdff494c52712b7a57486e4fbde34c31bb9da4c6cc4", + "sha256:573b2f5496c7e9f4985de70b9bbb4719ffd293d5565513e04ac20e42e6e5583f", + "sha256:7ecaef52fd9b9535ae5f01a1dd2651f6608e4ec9dc136fc4dfe7ebe3c3ddb230", + "sha256:803a80d72d1f693aa448566be46ffd70882d1ad8fc689a2e22afe63035eb998a", + "sha256:8862d1c2c020cb7a03b421a9a7b4fe046a208db30994fc8ff68c627a7915987f", + "sha256:9b06690224258db5cd39a84e993882a6874676f5de582da57f3df3a82ead9174", + "sha256:a71400b90b3599eb7bf241f947932e18a066907bf84617d80817998cee81e4bf", + "sha256:bb252f802f91f59767dcc559744e91efa9df532240a502befd874b54571417bd", + "sha256:be1ebf9cc25ab5399501c9046a7dcdaa9e911802ed0e12b7d620cd4bbf0518b3", + "sha256:be7c65e34d1b50ab7093b90427cbc488260e4b3a38ef2435d65b62e9fa3d798a", + "sha256:c0dac835c1a22621ffa5e5f999d57359c790c52bbd1c687fe514ae6924f65ef5", + "sha256:c152b2e93b639d1f36ec5a8ca24cde4a8eefb2b6b83668fcd8e83a67badcb367", + "sha256:d182eada8ea0de61a45a526aa0ae4bcd222f9673424e65315c35820291ff299c", + "sha256:d18331ea905a41ae71596502bd4c9a2998902328bbabd29e3d0f5f8569fabad1", + "sha256:d20d32cbb31d731def4b1502294ca2ee99f9249b63bc80e03e67e8f8e126dea8", + "sha256:d4ad7fd3269281cb471ad6c7bafca372e69789540d16e3755dd717e9e5c9d82f", + "sha256:d6f8c23f65a4bfe4300b85f1f40f6c32569822d08901db3b6454ab785d9117cc", + "sha256:d84d741c6e35c9f3e7406cb7c4c2e08474c2a6441d59322a00dcae65aac6315d", + "sha256:e65c221b2115a91035b55a593b6eb94aa1206fa3ab374f47c6dc10d364583ff9", + "sha256:f98b6f256be6cec8dd308a8563976ddaff0bdc18b730720f6f4bee927ffe926f" + ], + "index": "pypi", + "version": "==4.6.1" }, "markdownify": { "hashes": [ @@ -374,52 +343,13 @@ "index": "pypi", "version": "==0.5.3" }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.1.1" - }, "more-itertools": { "hashes": [ - "sha256:6f83822ae94818eae2612063a5101a7311e68ae8002005b5e05f03fd74a86a20", - "sha256:9b30f12df9393f0d28af9210ff8efe48d10c94f73e5daf886f10c4b0b0b4f03c" + "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330", + "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf" ], "index": "pypi", - "version": "==8.5.0" + "version": "==8.6.0" }, "multidict": { "hashes": [ @@ -451,14 +381,6 @@ "markers": "python_version >= '3.5'", "version": "==4.0.2" }, - "packaging": { - "hashes": [ - "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", - "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.4" - }, "pamqp": { "hashes": [ "sha256:2f81b5c186f668a67f165193925b6bfd83db4363a6222f599517f29ecee60b02", @@ -508,21 +430,14 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, - "pygments": { - "hashes": [ - "sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998", - "sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7" - ], - "markers": "python_version >= '3.5'", - "version": "==2.7.1" - }, - "pyparsing": { + "pyreadline": { "hashes": [ - "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", - "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1", + "sha256:65540c21bfe14405a3a77e4c085ecfce88724743a4ead47c66b84defcf82c32e", + "sha256:9ce5fa65b8992dfa373bddc5b6e0864ead8f291c94fbfec05fbd5c836162e67b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.4.7" + "markers": "sys_platform == 'win32'", + "version": "==2.1" }, "python-dateutil": { "hashes": [ @@ -532,13 +447,6 @@ "index": "pypi", "version": "==2.8.1" }, - "pytz": { - "hashes": [ - "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed", - "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048" - ], - "version": "==2020.1" - }, "pyyaml": { "hashes": [ "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", @@ -564,21 +472,13 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.5.3" }, - "requests": { - "hashes": [ - "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", - "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898" - ], - "index": "pypi", - "version": "==2.24.0" - }, "sentry-sdk": { "hashes": [ - "sha256:c9c0fa1412bad87104c4eee8dd36c7bbf60b0d92ae917ab519094779b22e6d9a", - "sha256:e159f7c919d19ae86e5a4ff370fccc45149fab461fbeb93fb5a735a0b33a9cb1" + "sha256:17b725df2258354ccb39618ae4ead29651aa92c01a92acf72f98efe06ee2e45a", + "sha256:9040539485226708b5cad0401d76628fba4eed9154bf301c50579767afe344fd" ], "index": "pypi", - "version": "==0.17.8" + "version": "==0.19.2" }, "six": { "hashes": [ @@ -588,19 +488,12 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, - "snowballstemmer": { - "hashes": [ - "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0", - "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52" - ], - "version": "==2.0.0" - }, "sortedcontainers": { "hashes": [ - "sha256:4e73a757831fc3ca4de2859c422564239a31d8213d09a2a666e375807034d2ba", - "sha256:c633ebde8580f241f274c1f8994a665c0e54a17724fecd0cae2f079e09c36d3f" + "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", + "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1" ], - "version": "==2.2.2" + "version": "==2.3.0" }, "soupsieve": { "hashes": [ @@ -610,62 +503,6 @@ "markers": "python_version >= '3.0'", "version": "==2.0.1" }, - "sphinx": { - "hashes": [ - "sha256:b4c750d546ab6d7e05bdff6ac24db8ae3e8b8253a3569b754e445110a0a12b66", - "sha256:fc312670b56cb54920d6cc2ced455a22a547910de10b3142276495ced49231cb" - ], - "index": "pypi", - "version": "==2.4.4" - }, - "sphinxcontrib-applehelp": { - "hashes": [ - "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", - "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.2" - }, - "sphinxcontrib-devhelp": { - "hashes": [ - "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", - "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.2" - }, - "sphinxcontrib-htmlhelp": { - "hashes": [ - "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", - "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.3" - }, - "sphinxcontrib-jsmath": { - "hashes": [ - "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", - "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.1" - }, - "sphinxcontrib-qthelp": { - "hashes": [ - "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", - "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.3" - }, - "sphinxcontrib-serializinghtml": { - "hashes": [ - "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", - "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" - ], - "markers": "python_version >= '3.5'", - "version": "==1.1.4" - }, "statsd": { "hashes": [ "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa", @@ -676,34 +513,34 @@ }, "urllib3": { "hashes": [ - "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", - "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" + "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2", + "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.25.10" + "version": "==1.25.11" }, "yarl": { "hashes": [ - "sha256:04a54f126a0732af75e5edc9addeaa2113e2ca7c6fce8974a63549a70a25e50e", - "sha256:3cc860d72ed989f3b1f3abbd6ecf38e412de722fb38b8f1b1a086315cf0d69c5", - "sha256:5d84cc36981eb5a8533be79d6c43454c8e6a39ee3118ceaadbd3c029ab2ee580", - "sha256:5e447e7f3780f44f890360ea973418025e8c0cdcd7d6a1b221d952600fd945dc", - "sha256:61d3ea3c175fe45f1498af868879c6ffeb989d4143ac542163c45538ba5ec21b", - "sha256:67c5ea0970da882eaf9efcf65b66792557c526f8e55f752194eff8ec722c75c2", - "sha256:6f6898429ec3c4cfbef12907047136fd7b9e81a6ee9f105b45505e633427330a", - "sha256:7ce35944e8e61927a8f4eb78f5bc5d1e6da6d40eadd77e3f79d4e9399e263921", - "sha256:b7c199d2cbaf892ba0f91ed36d12ff41ecd0dde46cbf64ff4bfe997a3ebc925e", - "sha256:c15d71a640fb1f8e98a1423f9c64d7f1f6a3a168f803042eaf3a5b5022fde0c1", - "sha256:c22607421f49c0cb6ff3ed593a49b6a99c6ffdeaaa6c944cdda83c2393c8864d", - "sha256:c604998ab8115db802cc55cb1b91619b2831a6128a62ca7eea577fc8ea4d3131", - "sha256:d088ea9319e49273f25b1c96a3763bf19a882cff774d1792ae6fba34bd40550a", - "sha256:db9eb8307219d7e09b33bcb43287222ef35cbcf1586ba9472b0a4b833666ada1", - "sha256:e31fef4e7b68184545c3d68baec7074532e077bd1906b040ecfba659737df188", - "sha256:e32f0fb443afcfe7f01f95172b66f279938fbc6bdaebe294b0ff6747fb6db020", - "sha256:fcbe419805c9b20db9a51d33b942feddbf6e7fb468cb20686fd7089d4164c12a" + "sha256:040b237f58ff7d800e6e0fd89c8439b841f777dd99b4a9cca04d6935564b9409", + "sha256:17668ec6722b1b7a3a05cc0167659f6c95b436d25a36c2d52db0eca7d3f72593", + "sha256:3a584b28086bc93c888a6c2aa5c92ed1ae20932f078c46509a66dce9ea5533f2", + "sha256:4439be27e4eee76c7632c2427ca5e73703151b22cae23e64adb243a9c2f565d8", + "sha256:48e918b05850fffb070a496d2b5f97fc31d15d94ca33d3d08a4f86e26d4e7c5d", + "sha256:9102b59e8337f9874638fcfc9ac3734a0cfadb100e47d55c20d0dc6087fb4692", + "sha256:9b930776c0ae0c691776f4d2891ebc5362af86f152dd0da463a6614074cb1b02", + "sha256:b3b9ad80f8b68519cc3372a6ca85ae02cc5a8807723ac366b53c0f089db19e4a", + "sha256:bc2f976c0e918659f723401c4f834deb8a8e7798a71be4382e024bcc3f7e23a8", + "sha256:c22c75b5f394f3d47105045ea551e08a3e804dc7e01b37800ca35b58f856c3d6", + "sha256:c52ce2883dc193824989a9b97a76ca86ecd1fa7955b14f87bf367a61b6232511", + "sha256:ce584af5de8830d8701b8979b18fcf450cef9a382b1a3c8ef189bedc408faf1e", + "sha256:da456eeec17fa8aa4594d9a9f27c0b1060b6a75f2419fe0c00609587b2695f4a", + "sha256:db6db0f45d2c63ddb1a9d18d1b9b22f308e52c83638c26b422d520a815c4b3fb", + "sha256:df89642981b94e7db5596818499c4b2219028f2a528c9c37cc1de45bf2fd3a3f", + "sha256:f18d68f2be6bf0e89f1521af2b1bb46e66ab0018faafa81d70f358153170a317", + "sha256:f379b7f83f23fe12823085cd6b906edc49df969eb99757f58ff382349a3303c6" ], "markers": "python_version >= '3.5'", - "version": "==1.6.0" + "version": "==1.5.1" } }, "develop": { @@ -716,11 +553,11 @@ }, "attrs": { "hashes": [ - "sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594", - "sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc" + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.2.0" + "version": "==20.3.0" }, "cfgv": { "hashes": [ @@ -786,19 +623,19 @@ }, "flake8": { "hashes": [ - "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c", - "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208" + "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839", + "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b" ], "index": "pypi", - "version": "==3.8.3" + "version": "==3.8.4" }, "flake8-annotations": { "hashes": [ - "sha256:09fe1aa3f40cb8fef632a0ab3614050a7584bb884b6134e70cf1fc9eeee642fa", - "sha256:5bda552f074fd6e34276c7761756fa07d824ffac91ce9c0a8555eb2bc5b92d7a" + "sha256:0bcebb0792f1f96d617ded674dca7bf64181870bfe5dace353a1483551f8e5f1", + "sha256:bebd11a850f6987a943ce8cdff4159767e0f5f89b3c88aca64680c2175ee02df" ], "index": "pypi", - "version": "==2.4.0" + "version": "==2.4.1" }, "flake8-bugbear": { "hashes": [ @@ -856,11 +693,11 @@ }, "identify": { "hashes": [ - "sha256:7c22c384a2c9b32c5cc891d13f923f6b2653aa83e2d75d8f79be240d6c86c4f4", - "sha256:da683bfb7669fa749fc7731f378229e2dbf29a1d1337cbde04106f02236eb29d" + "sha256:5dd84ac64a9a115b8e0b27d1756b244b882ad264c3c423f42af8235a6e71ca12", + "sha256:c9504ba6a043ee2db0a9d69e43246bc138034895f6338d5aed1b41e4a73b1513" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.5.5" + "version": "==1.5.9" }, "mccabe": { "hashes": [ @@ -886,11 +723,11 @@ }, "pre-commit": { "hashes": [ - "sha256:810aef2a2ba4f31eed1941fc270e72696a1ad5590b9751839c90807d0fff6b9a", - "sha256:c54fd3e574565fe128ecc5e7d2f91279772ddb03f8729645fa812fe809084a70" + "sha256:22e6aa3bd571debb01eb7d34483f11c01b65237be4eebbf30c3d4fb65762d315", + "sha256:905ebc9b534b991baec87e934431f2d0606ba27f2b90f7f652985f5a5b8b6ae6" ], "index": "pypi", - "version": "==2.7.1" + "version": "==2.8.2" }, "pycodestyle": { "hashes": [ @@ -950,10 +787,11 @@ }, "toml": { "hashes": [ - "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f", - "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88" + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "version": "==0.10.1" + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.10.2" }, "unittest-xml-reporting": { "hashes": [ @@ -965,11 +803,11 @@ }, "virtualenv": { "hashes": [ - "sha256:43add625c53c596d38f971a465553f6318decc39d98512bc100fa1b1e839c8dc", - "sha256:e0305af10299a7fb0d69393d8f04cb2965dda9351140d11ac8db4e5e3970451b" + "sha256:b0011228208944ce71052987437d3843e05690b2f23d1c7da4263fde104c97a2", + "sha256:b8d6110f493af256a40d65e29846c69340a947669eec8ce784fcf3dd3af28380" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.0.31" + "version": "==20.1.0" } } } -- cgit v1.2.3 From 70ee01b8726921e8389abd4f69ffb0e2ceee0773 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 10 Nov 2020 18:22:11 +0100 Subject: Generalise tag filter hint to accept all containers --- bot/exts/info/doc/_parsing.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 0883b9f42..93b6f0def 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -5,7 +5,7 @@ import re import string import textwrap from functools import partial -from typing import Callable, Collection, Iterable, List, Optional, TYPE_CHECKING, Tuple, Union +from typing import Callable, Collection, Container, Iterable, List, Optional, TYPE_CHECKING, Union from bs4 import BeautifulSoup from bs4.element import NavigableString, PageElement, Tag @@ -99,7 +99,7 @@ def _split_parameters(parameters_string: str) -> List[str]: def _find_elements_until_tag( start_element: PageElement, - end_tag_filter: Union[Tuple[str, ...], Callable[[Tag], bool]], + end_tag_filter: Union[Container[str], Callable[[Tag], bool]], *, func: Callable, include_strings: bool = False, @@ -108,7 +108,7 @@ def _find_elements_until_tag( """ Get all elements up to `limit` or until a tag matching `tag_filter` is found. - `end_tag_filter` can be either a tuple of string names to check against, + `end_tag_filter` can be either a container of string names to check against, or a filtering callable that's applied to tags. When `include_strings` is True, `NavigableString`s from the document will be included in the result along `Tag`s. @@ -116,12 +116,12 @@ def _find_elements_until_tag( `func` takes in a BeautifulSoup unbound method for finding multiple elements, such as `BeautifulSoup.find_all`. The method is then iterated over and all elements until the matching tag or the limit are added to the return list. """ - use_tuple_filter = isinstance(end_tag_filter, tuple) + use_container_filter = not callable(end_tag_filter) elements = [] for element in func(start_element, name=Strainer(include_strings=include_strings), limit=limit): if isinstance(element, Tag): - if use_tuple_filter: + if use_container_filter: if element.name in end_tag_filter: break elif end_tag_filter(element): -- cgit v1.2.3 From beebeac45cf487e59ca4d76a84472c898bc23b06 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 10 Nov 2020 19:20:44 +0100 Subject: Rename variables for clarity --- bot/exts/info/doc/_cog.py | 4 ++-- bot/exts/info/doc/_parsing.py | 18 +++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 25477fe07..4e48e81e5 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -227,8 +227,8 @@ class DocCog(commands.Cog): symbol = f"{group_name}.{symbol}" self.renamed_symbols.add(symbol) - elif (overridden_symbol_group := original_symbol.group) in FORCE_PREFIX_GROUPS: - overridden_symbol = f"{overridden_symbol_group}.{symbol}" + elif (original_symbol_group := original_symbol.group) in FORCE_PREFIX_GROUPS: + overridden_symbol = f"{original_symbol_group}.{symbol}" if overridden_symbol in self.renamed_symbols: overridden_symbol = f"{api_package_name}.{overridden_symbol}" diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 93b6f0def..9140f635a 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -42,9 +42,9 @@ _NO_SIGNATURE_GROUPS = { "templatetag", "term", } -_EMBED_CODE_BLOCK_LENGTH = 61 +_EMBED_CODE_BLOCK_LINE_LENGTH = 61 # _MAX_SIGNATURE_AMOUNT code block wrapped lines with py syntax highlight -_MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LENGTH + 8) * _MAX_SIGNATURE_AMOUNT +_MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LINE_LENGTH + 8) * _MAX_SIGNATURE_AMOUNT # Maximum discord message length - signatures on top _MAX_DESCRIPTION_LENGTH = 2000 - _MAX_SIGNATURES_LENGTH _TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace @@ -189,7 +189,7 @@ def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collec if not sum(len(signature) for signature in signatures) > _MAX_SIGNATURES_LENGTH: return signatures - max_signature_length = _EMBED_CODE_BLOCK_LENGTH * (_MAX_SIGNATURE_AMOUNT + 1 - len(signatures)) + max_signature_length = _EMBED_CODE_BLOCK_LINE_LENGTH * (_MAX_SIGNATURE_AMOUNT + 1 - len(signatures)) formatted_signatures = [] for signature in signatures: signature = signature.strip() @@ -221,12 +221,12 @@ def _get_truncated_description( max_length: int, ) -> str: """ - Truncate markdown from `elements` to be at most `max_length` characters visually. + Truncate markdown from `elements` to be at most `max_length` characters when rendered. `max_length` limits the length of the rendered characters in the string, with the real string length limited to `_MAX_DESCRIPTION_LENGTH` to accommodate discord length limits """ - visual_length = 0 + rendered_length = 0 real_length = 0 result = [] shortened = False @@ -234,7 +234,7 @@ def _get_truncated_description( for element in elements: is_tag = isinstance(element, Tag) element_length = len(element.text) if is_tag else len(element) - if visual_length + element_length < max_length: + if rendered_length + element_length < max_length: if is_tag: element_markdown = markdown_converter.process_tag(element) else: @@ -247,7 +247,7 @@ def _get_truncated_description( shortened = True break real_length += element_markdown_length - visual_length += element_length + rendered_length += element_length else: shortened = True break @@ -258,7 +258,7 @@ def _get_truncated_description( return markdown_string -def _parse_into_markdown(signatures: Optional[List[str]], description: Iterable[Tag], url: str) -> str: +def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag], url: str) -> str: """ Create a markdown string with the signatures at the top, and the converted html description below them. @@ -309,4 +309,4 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> str: else: signature = _get_signatures(symbol_heading) description = _get_dd_description(symbol_heading) - return _parse_into_markdown(signature, description, symbol_data.url).replace('¶', '') + return _create_markdown(signature, description, symbol_data.url).replace('¶', '') -- cgit v1.2.3 From 7348b86bfedfc24c67d97a08d839a18956a6bff6 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 10 Nov 2020 22:17:15 +0100 Subject: Update outdated docstring --- bot/exts/info/doc/_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 9140f635a..82b2ca808 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -263,7 +263,7 @@ def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag] Create a markdown string with the signatures at the top, and the converted html description below them. The signatures are wrapped in python codeblocks, separated from the description by a newline. - The result string is truncated to be max 1000 symbols long. + The result markdown string is max 750 rendered characters for the description with signatures at the start. """ description = _get_truncated_description(description, DocMarkdownConverter(bullets="•", page_url=url), 750) description = _WHITESPACE_AFTER_NEWLINES_RE.sub('', description) -- cgit v1.2.3 From ddb6b11575c05c8417f5607aec98fb1c09e351af Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 10 Nov 2020 22:22:27 +0100 Subject: Adjust unparseable symbol behaviour With redis we need to make sure we don't send the "error" string into the cache, returning None instead of the string and then setting it manually in the caller makes this nicer compared to checking against a string --- bot/exts/info/doc/_cog.py | 5 ++++- bot/exts/info/doc/_parsing.py | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 4e48e81e5..fa59bcc42 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -292,7 +292,10 @@ class DocCog(commands.Cog): if markdown is None: log.debug(f"Redis cache miss for symbol `{symbol}`.") markdown = await self.item_fetcher.get_markdown(self.bot.http_session, symbol_info) - await self.doc_cache.set(redis_key, markdown) + if markdown is not None: + await self.doc_cache.set(redis_key, markdown) + else: + markdown = "Unable to parse the requested symbol." embed = discord.Embed( title=discord.utils.escape_markdown(symbol), diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 82b2ca808..72e81982a 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -287,7 +287,7 @@ def _class_filter_factory(class_names: Iterable[str]) -> Callable[[Tag], bool]: return match_tag -def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> str: +def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[str]: """ Return parsed markdown of the passed symbol using the passed in soup, truncated to 1000 characters. @@ -296,7 +296,7 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> str: symbol_heading = soup.find(id=symbol_data.symbol_id) if symbol_heading is None: log.warning("Symbol present in loaded inventories not found on site, consider refreshing inventories.") - return "Unable to parse the requested symbol." + return None signature = None # Modules, doc pages and labels don't point to description list tags but to tags like divs, # no special parsing can be done so we only try to include what's under them. -- cgit v1.2.3 From d936e5bc049e2e93beca3c62430d048d9f9cf47b Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 11 Nov 2020 18:23:01 +0100 Subject: Cancel scheduled inventory updates on all refreshes --- bot/exts/info/doc/_cog.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index fa59bcc42..822f682bf 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -250,6 +250,8 @@ class DocCog(commands.Cog): async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" log.debug("Refreshing documentation inventory...") + for inventory in self.scheduled_inventories: + self.inventory_scheduler.cancel(inventory) # Clear the old base URLS and doc symbols to ensure # that we start from a fresh local dataset. @@ -418,9 +420,6 @@ class DocCog(commands.Cog): """ await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') - if package_name in self.scheduled_inventories: - self.inventory_scheduler.cancel(package_name) - async with ctx.typing(): # Rebuild the inventory to ensure that everything # that was from this package is properly deleted. @@ -431,9 +430,6 @@ class DocCog(commands.Cog): @commands.has_any_role(*MODERATION_ROLES) async def refresh_command(self, ctx: commands.Context) -> None: """Refresh inventories and send differences to channel.""" - for inventory in self.scheduled_inventories: - self.inventory_scheduler.cancel(inventory) - old_inventories = set(self.base_urls) with ctx.typing(): await self.refresh_inventory() -- cgit v1.2.3 From 2bae8eeed0eae75d782da097e78826650e1ac498 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 12 Nov 2020 19:44:26 +0100 Subject: Intern relative url paths Group name interning was also moved to the DocItem creation to group the behaviour --- bot/exts/info/doc/_cog.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 822f682bf..ecc648d89 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -218,10 +218,8 @@ class DocCog(commands.Cog): for symbol, relative_doc_url in items: if "/" in symbol: continue # skip unreachable symbols with slashes - # Intern the group names since they're reused in all the DocItems - # to remove unnecessary memory consumption from them being unique objects - group_name = sys.intern(group.split(":")[1]) + group_name = group.split(":")[1] if (original_symbol := self.doc_symbols.get(symbol)) is not None: if group_name in FORCE_PREFIX_GROUPS: symbol = f"{group_name}.{symbol}" @@ -240,7 +238,14 @@ class DocCog(commands.Cog): self.renamed_symbols.add(symbol) relative_url_path, _, symbol_id = relative_doc_url.partition("#") - symbol_item = DocItem(api_package_name, group_name, base_url, relative_url_path, symbol_id) + # Intern fields that have shared content so we're not storing unique strings for every object + symbol_item = DocItem( + api_package_name, + sys.intern(group_name), + base_url, + sys.intern(relative_url_path), + symbol_id + ) self.doc_symbols[symbol] = symbol_item self.item_fetcher.add_item(symbol_item) -- cgit v1.2.3 From aeac77a08cdafadcc180a400c32ce21732d7d20d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 14 Nov 2020 02:39:07 +0100 Subject: Limit newlines in doc descriptions --- bot/exts/info/doc/_parsing.py | 48 ++++++++++++++++++++++++++++--------------- 1 file changed, 32 insertions(+), 16 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 72e81982a..418405ca9 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -10,6 +10,7 @@ from typing import Callable, Collection, Container, Iterable, List, Optional, TY from bs4 import BeautifulSoup from bs4.element import NavigableString, PageElement, Tag +from bot.utils.helpers import find_nth_occurrence from ._html import Strainer from ._markdown import DocMarkdownConverter if TYPE_CHECKING: @@ -219,21 +220,23 @@ def _get_truncated_description( elements: Iterable[Union[Tag, NavigableString]], markdown_converter: DocMarkdownConverter, max_length: int, + max_lines: int, ) -> str: """ - Truncate markdown from `elements` to be at most `max_length` characters when rendered. + Truncate markdown from `elements` to be at most `max_length` characters when rendered or `max_lines` newlines. `max_length` limits the length of the rendered characters in the string, with the real string length limited to `_MAX_DESCRIPTION_LENGTH` to accommodate discord length limits """ + result = "" + markdown_element_ends = [] rendered_length = 0 - real_length = 0 - result = [] - shortened = False + tag_end_index = 0 for element in elements: is_tag = isinstance(element, Tag) element_length = len(element.text) if is_tag else len(element) + if rendered_length + element_length < max_length: if is_tag: element_markdown = markdown_converter.process_tag(element) @@ -241,21 +244,29 @@ def _get_truncated_description( element_markdown = markdown_converter.process_text(element) element_markdown_length = len(element_markdown) - if real_length + element_markdown_length < _MAX_DESCRIPTION_LENGTH: - result.append(element_markdown) - else: - shortened = True - break - real_length += element_markdown_length rendered_length += element_length + tag_end_index += element_markdown_length + + if not element_markdown.isspace(): + markdown_element_ends.append(tag_end_index) + result += element_markdown else: - shortened = True break - markdown_string = "".join(result) - if shortened: - markdown_string = markdown_string.rstrip(_TRUNCATE_STRIP_CHARACTERS) + "..." - return markdown_string + if not markdown_element_ends: + return "" + + newline_truncate_index = find_nth_occurrence(result, "\n", max_lines) + if newline_truncate_index is not None and newline_truncate_index < _MAX_DESCRIPTION_LENGTH: + truncate_index = newline_truncate_index + else: + truncate_index = _MAX_DESCRIPTION_LENGTH + + if truncate_index >= markdown_element_ends[-1]: + return result + + markdown_truncate_index = max(cut for cut in markdown_element_ends if cut < truncate_index) + return result[:markdown_truncate_index].strip(_TRUNCATE_STRIP_CHARACTERS) + "..." def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag], url: str) -> str: @@ -265,7 +276,12 @@ def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag] The signatures are wrapped in python codeblocks, separated from the description by a newline. The result markdown string is max 750 rendered characters for the description with signatures at the start. """ - description = _get_truncated_description(description, DocMarkdownConverter(bullets="•", page_url=url), 750) + description = _get_truncated_description( + description, + markdown_converter=DocMarkdownConverter(bullets="•", page_url=url), + max_length=750, + max_lines=13 + ) description = _WHITESPACE_AFTER_NEWLINES_RE.sub('', description) if signatures is not None: formatted_markdown = "".join(f"```py\n{signature}```" for signature in _truncate_signatures(signatures)) -- cgit v1.2.3 From b118f4cf38bdf99cf66e822c5b2280aff879123d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 14 Nov 2020 22:59:50 +0100 Subject: Rework the doc redis cache to work with hashes This rework requires us to delete packages caches easily with deleting the package hash instead of having to pattern match all keys and delete those. The interface was also updated to accept DocItems instead of requiring callers to construct the keys --- bot/exts/info/doc/_cog.py | 11 +++----- bot/exts/info/doc/_redis_cache.py | 57 +++++++++++++++++++++++++++++++++++---- 2 files changed, 56 insertions(+), 12 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index ecc648d89..67a21ed72 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -4,7 +4,6 @@ import asyncio import logging import re import sys -import urllib.parse from collections import defaultdict from contextlib import suppress from typing import Dict, List, NamedTuple, Optional, Union @@ -175,6 +174,7 @@ class DocCog(commands.Cog): self.scheduled_inventories = set() self.bot.loop.create_task(self.init_refresh_inventory()) + self.bot.loop.create_task(self.doc_cache.delete_expired()) async def init_refresh_inventory(self) -> None: """Refresh documentation inventory on cog initialization.""" @@ -292,21 +292,18 @@ class DocCog(commands.Cog): return None self.bot.stats.incr(f"doc_fetches.{symbol_info.package.lower()}") - item_url = f"{symbol_info.url}#{symbol_info.symbol_id}" - redis_key = "".join(urllib.parse.urlparse(item_url)[1:]) # url without scheme - - markdown = await self.doc_cache.get(redis_key) + markdown = await self.doc_cache.get(symbol_info) if markdown is None: log.debug(f"Redis cache miss for symbol `{symbol}`.") markdown = await self.item_fetcher.get_markdown(self.bot.http_session, symbol_info) if markdown is not None: - await self.doc_cache.set(redis_key, markdown) + await self.doc_cache.set(symbol_info, markdown) else: markdown = "Unable to parse the requested symbol." embed = discord.Embed( title=discord.utils.escape_markdown(symbol), - url=item_url, + url=f"{symbol_info.url}#{symbol_info.symbol_id}", description=markdown ) # Show all symbols with the same name that were renamed in the footer. diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py index 147394ba6..c617eba49 100644 --- a/bot/exts/info/doc/_redis_cache.py +++ b/bot/exts/info/doc/_redis_cache.py @@ -1,23 +1,70 @@ -from typing import Optional +from __future__ import annotations + +import datetime +import pickle +from typing import Optional, TYPE_CHECKING from async_rediscache.types.base import RedisObject, namespace_lock +if TYPE_CHECKING: + from ._cog import DocItem class DocRedisCache(RedisObject): """Interface for redis functionality needed by the Doc cog.""" @namespace_lock - async def set(self, key: str, value: str) -> None: + async def set(self, item: DocItem, value: str) -> None: """ Set markdown `value` for `key`. Keys expire after a week to keep data up to date. """ + expiry_timestamp = datetime.datetime.now().timestamp() + 7 * 24 * 60 * 60 with await self._get_pool_connection() as connection: - await connection.setex(f"{self.namespace}:{key}", 7*24*60*60, value) + await connection.hset( + f"{self.namespace}:{item.package}", + self.get_item_key(item), + pickle.dumps((value, expiry_timestamp)) + ) @namespace_lock - async def get(self, key: str) -> Optional[str]: + async def get(self, item: DocItem) -> Optional[str]: """Get markdown contents for `key`.""" with await self._get_pool_connection() as connection: - return await connection.get(f"{self.namespace}:{key}", encoding="utf8") + cached_value = await connection.hget(f"{self.namespace}:{item.package}", self.get_item_key(item)) + if cached_value is None: + return None + + value, expire = pickle.loads(cached_value) + if expire <= datetime.datetime.now().timestamp(): + await connection.hdel(f"{self.namespace}:{item.package}", self.get_item_key(item)) + return None + + return value + + @namespace_lock + async def delete(self, package: str) -> None: + """Remove all values for `package`.""" + with await self._get_pool_connection() as connection: + await connection.delete(f"{self.namespace}:{package}") + + @namespace_lock + async def delete_expired(self) -> None: + """Delete all expired keys.""" + current_timestamp = datetime.datetime.now().timestamp() + with await self._get_pool_connection() as connection: + async for package_key in connection.iscan(match=f"{self.namespace}*"): + expired_fields = [] + + for field, cached_value in (await connection.hgetall(package_key)).items(): + _, expire = pickle.loads(cached_value) + if expire <= current_timestamp: + expired_fields.append(field) + + if expired_fields: + await connection.hdel(package_key, *expired_fields) + + @staticmethod + def get_item_key(item: DocItem) -> str: + """Create redis key for `item`.""" + return item.relative_url_path + item.symbol_id -- cgit v1.2.3 From 07a5d5fc58a402f930505c7b29a7a275e743a84d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 14 Nov 2020 23:07:13 +0100 Subject: Update existing redis values when parsing pages If we're parsing a page for a symbol that's out of the cache and encounter a symbol that was already cached we can update that symbol to keep it up to date without additional requests --- bot/exts/info/doc/_cog.py | 14 ++++++++------ bot/exts/info/doc/_redis_cache.py | 17 +++++++++++++++++ 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 67a21ed72..678134f3c 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -36,6 +36,8 @@ FORCE_PREFIX_GROUPS = ( WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay +doc_cache = DocRedisCache(namespace="Docs") + class DocItem(NamedTuple): """Holds inventory symbol information.""" @@ -116,7 +118,9 @@ class CachedParser: while self._queue: item, soup = self._queue.pop() try: - self._results[item] = get_symbol_markdown(soup, item) + markdown = get_symbol_markdown(soup, item) + await doc_cache.set_if_exists(item, markdown) + self._results[item] = markdown except Exception: log.exception(f"Unexpected error when handling {item}") else: @@ -161,8 +165,6 @@ class CachedParser: class DocCog(commands.Cog): """A set of commands for querying & displaying documentation.""" - doc_cache = DocRedisCache() - def __init__(self, bot: Bot): self.base_urls = {} self.bot = bot @@ -174,7 +176,7 @@ class DocCog(commands.Cog): self.scheduled_inventories = set() self.bot.loop.create_task(self.init_refresh_inventory()) - self.bot.loop.create_task(self.doc_cache.delete_expired()) + self.bot.loop.create_task(doc_cache.delete_expired()) async def init_refresh_inventory(self) -> None: """Refresh documentation inventory on cog initialization.""" @@ -292,12 +294,12 @@ class DocCog(commands.Cog): return None self.bot.stats.incr(f"doc_fetches.{symbol_info.package.lower()}") - markdown = await self.doc_cache.get(symbol_info) + markdown = await doc_cache.get(symbol_info) if markdown is None: log.debug(f"Redis cache miss for symbol `{symbol}`.") markdown = await self.item_fetcher.get_markdown(self.bot.http_session, symbol_info) if markdown is not None: - await self.doc_cache.set(symbol_info, markdown) + await doc_cache.set(symbol_info, markdown) else: markdown = "Unable to parse the requested symbol." diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py index c617eba49..2230884c9 100644 --- a/bot/exts/info/doc/_redis_cache.py +++ b/bot/exts/info/doc/_redis_cache.py @@ -27,6 +27,23 @@ class DocRedisCache(RedisObject): pickle.dumps((value, expiry_timestamp)) ) + @namespace_lock + async def set_if_exists(self, item: DocItem, value: str) -> None: + """ + Set markdown `value` for `key` if `key` exists. + + Keys expire after a week to keep data up to date. + """ + expiry_timestamp = datetime.datetime.now().timestamp() + 7 * 24 * 60 * 60 + + with await self._get_pool_connection() as connection: + if await connection.hexists(f"{self.namespace}:{item.package}", self.get_item_key(item)): + await connection.hset( + f"{self.namespace}:{item.package}", + self.get_item_key(item), + pickle.dumps((value, expiry_timestamp)) + ) + @namespace_lock async def get(self, item: DocItem) -> Optional[str]: """Get markdown contents for `key`.""" -- cgit v1.2.3 From 15e73b7d4148ff16d2d408eaf201ebd5a6fd1251 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 14 Nov 2020 23:34:39 +0100 Subject: Add command for clearing the cache of packages We also clear the cache when removing a package --- bot/exts/info/doc/_cog.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 678134f3c..b2d015b89 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -428,6 +428,7 @@ class DocCog(commands.Cog): # Rebuild the inventory to ensure that everything # that was from this package is properly deleted. await self.refresh_inventory() + await doc_cache.delete(package_name) await ctx.send(f"Successfully deleted `{package_name}` and refreshed inventory.") @docs_group.command(name="refreshdoc", aliases=("rfsh", "r")) @@ -450,3 +451,10 @@ class DocCog(commands.Cog): description=f"```diff\n{added}\n{removed}```" if added or removed else "" ) await ctx.send(embed=embed) + + @docs_group.command(name="cleardoccache") + @commands.has_any_role(*MODERATION_ROLES) + async def clear_cache_command(self, ctx: commands.Context, package_name: PackageName) -> None: + """Clear persistent redis cache for `package`.""" + await doc_cache.delete(package_name) + await ctx.send(f"Succesfully cleared cache for {package_name}") -- cgit v1.2.3 From 531ee4aad5432860afa784d0c067019662b3a0fe Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 15 Nov 2020 02:35:37 +0100 Subject: Ensure packages from PRIORITY_PACKAGES are directly accessible Some packages (currently only python) should be prioritised to others, the previous cleanup didn't account for other packages loading before it which resulted in duplicate symbols getting the python prefix and the original symbols linking to most probably undesired pages --- bot/exts/info/doc/_cog.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index b2d015b89..9e4bb54ea 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -33,6 +33,9 @@ FORCE_PREFIX_GROUPS = ( "pdbcommand", "term", ) +PRIORITY_PACKAGES = ( + "python", +) WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay @@ -235,6 +238,10 @@ class DocCog(commands.Cog): self.doc_symbols[overridden_symbol] = original_symbol self.renamed_symbols.add(overridden_symbol) + elif api_package_name in PRIORITY_PACKAGES: + self.doc_symbols[f"{original_symbol.package}.{symbol}"] = original_symbol + self.renamed_symbols.add(symbol) + else: symbol = f"{api_package_name}.{symbol}" self.renamed_symbols.add(symbol) -- cgit v1.2.3 From 977cc0552bd71018d874246137f812df14bb4d31 Mon Sep 17 00:00:00 2001 From: Harbys Date: Tue, 24 Nov 2020 14:22:55 +0100 Subject: Added Stream and revokestream commands --- bot/constants.py | 28 +++++++++ bot/exts/moderation/stream.py | 138 ++++++++++++++++++++++++++++++++++++++++++ config-default.yml | 3 + 3 files changed, 169 insertions(+) create mode 100644 bot/exts/moderation/stream.py diff --git a/bot/constants.py b/bot/constants.py index 2126b2b37..744fbd512 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -466,6 +466,7 @@ class Roles(metaclass=YAMLGetter): unverified: int verified: int # This is the Developers role on PyDis, here named verified for readability reasons. voice_verified: int + video: int class Guild(metaclass=YAMLGetter): @@ -701,3 +702,30 @@ ERROR_REPLIES = [ "Noooooo!!", "I can't believe you've done this", ] + +# TIME_FORMATS defines aliases and multipliers for time formats +# key is a standard time unit name like second ,year, decade etc. +# mul is a multiplier where duration of said time unit * multiplier = time in seconds +# eg. 1 day = 1 * multiplier seconds, so mul = 86400 +TIME_FORMATS = { + "second": { + "aliases": ("s", "sec", "seconds", "secs"), + "mul": 1 + }, + "minute": { + "aliases": ("m", "min", "mins", "minutes"), + "mul": 60 + }, + "hour": { + "aliases": ("h", "hr", "hrs", "hours"), + "mul": 3600 + }, + "day": { + "aliases": ("d", "days"), + "mul": 86400 + }, + "year": { + "aliases": ("yr", "yrs", "years"), + "mul": 31536000 + } +} diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py new file mode 100644 index 000000000..673a21b1b --- /dev/null +++ b/bot/exts/moderation/stream.py @@ -0,0 +1,138 @@ +from discord.ext import commands, tasks +import discord + +from bot.constants import Roles, STAFF_ROLES, Guild, TIME_FORMATS +from bot import Bot +import time +from async_rediscache import RedisCache + +# Constant error messages +NO_USER_SPECIFIED = "Please specify a user" +TIME_FORMAT_NOT_VALID = "Please specify a valid time format ex. 10h or 1day" +TIME_LESS_EQ_0 = "Duration can not be a 0 or lower" +USER_ALREADY_ALLOWED_TO_STREAM = "This user can already stream" +USER_ALREADY_NOT_ALLOWED_TO_STREAM = "This user already can't stream" + + +# FORMATS holds a combined list of all allowed time units +# made from TIME_FORMATS constant +FORMATS = [] +for key, entry in TIME_FORMATS.items(): + FORMATS.extend(entry["aliases"]) + FORMATS.append(key) + + +class Stream(commands.Cog): + """Stream class handles giving screen sharing permission with commands""" + + # Data cache storing userid to unix_time relation + # user id is used to get member who's streaming permission need to be revoked after some time + # unix_time is a time when user's streaming permission needs tp be revoked in unix time notation + user_cache = RedisCache() + + def __init__(self, bot: Bot): + self.bot = bot + self.remove_permissions.start() + self.guild_static = None + + @staticmethod + def _link_from_alias(time_format) -> (dict, str): + """Get TIME_FORMATS key and entry by time format or any of its aliases""" + for format_key, val in TIME_FORMATS.items(): + if format_key == time_format or time_format in val["aliases"]: + return TIME_FORMATS[format_key], format_key + + def _parse_time_to_seconds(self, duration, time_format) -> int: + """Get time in seconds from duration and time format""" + return duration * self._link_from_alias(time_format)[0]["mul"] + + @commands.command(aliases=("streaming", "share")) + @commands.has_any_role(*STAFF_ROLES) + async def stream( + self, + ctx: commands.Context, + user: discord.Member = None, + duration: int = 1, + time_format: str = "h", + *_ + ): + """ + stream handles stream command + argument user - required user mention, any errors should be handled by upper level handler + duration - int must be higher than 0 - defaults to 1 + time_format - str defining what time unit you want to use, must be any of FORMATS - defaults to h + + Command give user permission to stream and takes it away after provided duration + """ + # Check for required user argument + # if not provided send NO_USER_SPECIFIED message + if not user: + await ctx.send(NO_USER_SPECIFIED) + return + + # Time can't be negative lol + if duration <= 0: + await ctx.send(TIME_LESS_EQ_0) + return + + # Check if time_format argument is a valid time format + # eg. d, day etc are aliases for day time format + if time_format not in FORMATS: + await ctx.send(TIME_FORMAT_NOT_VALID) + return + + # Check if user already has streaming permission + already_allowed = any(Roles.video == role.id for role in user.roles) + if already_allowed: + await ctx.send(USER_ALREADY_ALLOWED_TO_STREAM) + return + + # Set user id - time in redis cache and add streaming permission role + await self.user_cache.set(user.id, time.time() + self._parse_time_to_seconds(duration, time_format)) + await user.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") + await ctx.send(f"{user.mention} can now stream for {duration} {self._link_from_alias(time_format)[1]}/s") + + @tasks.loop(seconds=30) + async def remove_permissions(self): + """ + background loop for removing streaming permission + """ + all_entries = await self.user_cache.items() + for user_id, delete_time in all_entries: + if time.time() > delete_time: + member = self.guild_static.fetch_memebr(user_id) + if member: + await member.remove_roles(discord.Object(Roles.video), reason="Temporary streaming access revoked") + await self.user_cache.pop(user_id) + + @remove_permissions.before_loop + async def await_ready(self): + """Wait for bot to be ready before starting remove_permissions loop + and get guild by id + """ + await self.bot.wait_until_ready() + self.guild_static = self.bot.get_guild(Guild.id) + + @commands.command(aliases=("unstream", )) + @commands.has_any_role(*STAFF_ROLES) + async def revokestream( + self, + ctx: commands.Context, + user: discord.Member = None + ): + """ + stream handles revokestream command + argument user - required user mention, any errors should be handled by upper level handler + + command removes streaming permission from a user + """ + not_allowed = not any(Roles.video == role.id for role in user.roles) + if not_allowed: + await user.remove_roles(discord.Object(Roles.video)) + else: + await ctx.send(USER_ALREADY_NOT_ALLOWED_TO_STREAM) + + +def setup(bot: Bot) -> None: + """Loads the Stream cog.""" + bot.add_cog(Stream(bot)) diff --git a/config-default.yml b/config-default.yml index 89493c4de..700406f4e 100644 --- a/config-default.yml +++ b/config-default.yml @@ -251,6 +251,9 @@ guild: jammers: 737249140966162473 team_leaders: 737250302834638889 + # Streaming + video: 764245844798079016 + moderation_roles: - *OWNERS_ROLE - *ADMINS_ROLE -- cgit v1.2.3 From a3aec34b444d75292f17dadc308457490c395620 Mon Sep 17 00:00:00 2001 From: Harbys Date: Tue, 24 Nov 2020 15:03:39 +0100 Subject: import fix --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 673a21b1b..ceb291027 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -2,7 +2,7 @@ from discord.ext import commands, tasks import discord from bot.constants import Roles, STAFF_ROLES, Guild, TIME_FORMATS -from bot import Bot +from bot.bot import Bot import time from async_rediscache import RedisCache -- cgit v1.2.3 From 91f1962703902fffabbbd7b710373850763e3ed7 Mon Sep 17 00:00:00 2001 From: Harbys Date: Tue, 24 Nov 2020 15:05:50 +0100 Subject: Add additional year alias --- bot/constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/constants.py b/bot/constants.py index 744fbd512..6c0ef913b 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -725,7 +725,7 @@ TIME_FORMATS = { "mul": 86400 }, "year": { - "aliases": ("yr", "yrs", "years"), + "aliases": ("yr", "yrs", "years", "y"), "mul": 31536000 } } -- cgit v1.2.3 From b18127c3df9b46a33648a0db376a3587c9fbe6be Mon Sep 17 00:00:00 2001 From: Harbys Date: Tue, 24 Nov 2020 15:22:01 +0100 Subject: Add 2 first unit tests for Stream cog --- tests/bot/exts/moderation/test_stream.py | 45 ++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 tests/bot/exts/moderation/test_stream.py diff --git a/tests/bot/exts/moderation/test_stream.py b/tests/bot/exts/moderation/test_stream.py new file mode 100644 index 000000000..872627fc1 --- /dev/null +++ b/tests/bot/exts/moderation/test_stream.py @@ -0,0 +1,45 @@ +import unittest +from bot.constants import TIME_FORMATS +from bot.exts.moderation.stream import Stream +from tests.helpers import MockContext, MockBot + + +class StreamCommandTest(unittest.IsolatedAsyncioTestCase): + + def setUp(self) -> None: + self.bot = MockBot() + self.cog = Stream(self.bot) + self.ctx = MockContext() + + def test_linking_time_format_from_alias_or_key(self): + FORMATS = [] + for key, entry in TIME_FORMATS.items(): + FORMATS.extend(entry["aliases"]) + FORMATS.append(key) + + test_cases = (("sec", "second"), + ("s", "second"), + ("seconds", "second"), + ("second", "second"), + ("secs", "second"), + ("min", "minute"), + ("m", "minute"), + ("minutes", "minute"), + ("hr", "hour"), + ("hrs", "hour"), + ("hours", "hour"), + ("d", "day"), + ("days", "day"), + ("yr", "year"), + ("yrs", "year"), + ("y", "year")) + + for case in test_cases: + linked = self.cog._link_from_alias(case[0])[1] + self.assertEqual(linked, case[1]) + + def test_parsing_duration_and_time_format_to_seconds(self): + test_cases = ((1, "minute", 60), (5, "second", 5), (2, "day", 172800)) + for case in test_cases: + time_in_seconds = self.cog._parse_time_to_seconds(case[0], case[1]) + self.assertEqual(time_in_seconds, case[2]) -- cgit v1.2.3 From 42c862b49923e30f66632902c86cfd168021b1e8 Mon Sep 17 00:00:00 2001 From: Harbys Date: Tue, 24 Nov 2020 17:55:25 +0100 Subject: Add more tests and some comments --- tests/bot/exts/moderation/test_stream.py | 46 +++++++++++++++++++++++++++++--- 1 file changed, 43 insertions(+), 3 deletions(-) diff --git a/tests/bot/exts/moderation/test_stream.py b/tests/bot/exts/moderation/test_stream.py index 872627fc1..7aa2fae26 100644 --- a/tests/bot/exts/moderation/test_stream.py +++ b/tests/bot/exts/moderation/test_stream.py @@ -1,7 +1,27 @@ +import asyncio import unittest -from bot.constants import TIME_FORMATS + +from async_rediscache import RedisSession + +from bot.constants import TIME_FORMATS, Roles from bot.exts.moderation.stream import Stream -from tests.helpers import MockContext, MockBot +from tests.helpers import MockBot, MockRole, MockMember + +redis_session = None +redis_loop = asyncio.get_event_loop() + + +def setUpModule(): # noqa: N802 + """Create and connect to the fakeredis session.""" + global redis_session + redis_session = RedisSession(use_fakeredis=True) + redis_loop.run_until_complete(redis_session.connect()) + + +def tearDownModule(): # noqa: N802 + """Close the fakeredis session.""" + if redis_session: + redis_loop.run_until_complete(redis_session.close()) class StreamCommandTest(unittest.IsolatedAsyncioTestCase): @@ -9,9 +29,13 @@ class StreamCommandTest(unittest.IsolatedAsyncioTestCase): def setUp(self) -> None: self.bot = MockBot() self.cog = Stream(self.bot) - self.ctx = MockContext() def test_linking_time_format_from_alias_or_key(self): + """ + User provided time format needs to be lined to a proper entry in TIME_FORMATS + This Test checks _link_from_alias method + Checking for whether alias or key exists in TIME_FORMATS is done before calling this function + """ FORMATS = [] for key, entry in TIME_FORMATS.items(): FORMATS.extend(entry["aliases"]) @@ -39,7 +63,23 @@ class StreamCommandTest(unittest.IsolatedAsyncioTestCase): self.assertEqual(linked, case[1]) def test_parsing_duration_and_time_format_to_seconds(self): + """ + Test calculating time in seconds from duration and time unit + This test is technically dependent on _link_from_alias function, not the best practice but necessary + """ test_cases = ((1, "minute", 60), (5, "second", 5), (2, "day", 172800)) for case in test_cases: time_in_seconds = self.cog._parse_time_to_seconds(case[0], case[1]) self.assertEqual(time_in_seconds, case[2]) + + def test_checking_if_user_has_streaming_permission(self): + """ + Test searching for video role in Member.roles + """ + user1 = MockMember(roles=[MockRole(id=Roles.video)]) + user2 = MockMember() + already_allowed_user1 = any(Roles.video == role.id for role in user1.roles) + self.assertEqual(already_allowed_user1, True) + + already_allowed_user2 = any(Roles.video == role.id for role in user2.roles) + self.assertEqual(already_allowed_user2, False) -- cgit v1.2.3 From 25fe0c919edfffbca5a73554853d076455e2d997 Mon Sep 17 00:00:00 2001 From: Harbys Date: Tue, 24 Nov 2020 19:41:52 +0100 Subject: fixing code to be flake8 compliant --- bot/exts/moderation/stream.py | 43 +++++++++++++++----------------- tests/bot/exts/moderation/test_stream.py | 8 ++---- 2 files changed, 22 insertions(+), 29 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index ceb291027..a44095273 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -1,10 +1,11 @@ -from discord.ext import commands, tasks +import time + import discord +from async_rediscache import RedisCache +from discord.ext import commands, tasks -from bot.constants import Roles, STAFF_ROLES, Guild, TIME_FORMATS from bot.bot import Bot -import time -from async_rediscache import RedisCache +from bot.constants import Guild, Roles, STAFF_ROLES, TIME_FORMATS # Constant error messages NO_USER_SPECIFIED = "Please specify a user" @@ -23,7 +24,7 @@ for key, entry in TIME_FORMATS.items(): class Stream(commands.Cog): - """Stream class handles giving screen sharing permission with commands""" + """Stream class handles giving screen sharing permission with commands.""" # Data cache storing userid to unix_time relation # user id is used to get member who's streaming permission need to be revoked after some time @@ -36,14 +37,14 @@ class Stream(commands.Cog): self.guild_static = None @staticmethod - def _link_from_alias(time_format) -> (dict, str): - """Get TIME_FORMATS key and entry by time format or any of its aliases""" + def _link_from_alias(time_format: str) -> (dict, str): + """Get TIME_FORMATS key and entry by time format or any of its aliases.""" for format_key, val in TIME_FORMATS.items(): if format_key == time_format or time_format in val["aliases"]: return TIME_FORMATS[format_key], format_key - def _parse_time_to_seconds(self, duration, time_format) -> int: - """Get time in seconds from duration and time format""" + def _parse_time_to_seconds(self, duration: int, time_format: str) -> int: + """Get time in seconds from duration and time format.""" return duration * self._link_from_alias(time_format)[0]["mul"] @commands.command(aliases=("streaming", "share")) @@ -55,13 +56,13 @@ class Stream(commands.Cog): duration: int = 1, time_format: str = "h", *_ - ): + ) -> None: """ - stream handles stream command + Stream handles stream command. + argument user - required user mention, any errors should be handled by upper level handler duration - int must be higher than 0 - defaults to 1 time_format - str defining what time unit you want to use, must be any of FORMATS - defaults to h - Command give user permission to stream and takes it away after provided duration """ # Check for required user argument @@ -93,10 +94,8 @@ class Stream(commands.Cog): await ctx.send(f"{user.mention} can now stream for {duration} {self._link_from_alias(time_format)[1]}/s") @tasks.loop(seconds=30) - async def remove_permissions(self): - """ - background loop for removing streaming permission - """ + async def remove_permissions(self) -> None: + """Background loop for removing streaming permission.""" all_entries = await self.user_cache.items() for user_id, delete_time in all_entries: if time.time() > delete_time: @@ -106,10 +105,8 @@ class Stream(commands.Cog): await self.user_cache.pop(user_id) @remove_permissions.before_loop - async def await_ready(self): - """Wait for bot to be ready before starting remove_permissions loop - and get guild by id - """ + async def await_ready(self) -> None: + """Wait for bot to be ready before starting remove_permissions loop and get guild by id.""" await self.bot.wait_until_ready() self.guild_static = self.bot.get_guild(Guild.id) @@ -119,11 +116,11 @@ class Stream(commands.Cog): self, ctx: commands.Context, user: discord.Member = None - ): + ) -> None: """ - stream handles revokestream command - argument user - required user mention, any errors should be handled by upper level handler + Revokestream handles revokestream command. + argument user - required user mention, any errors should be handled by upper level handler command removes streaming permission from a user """ not_allowed = not any(Roles.video == role.id for role in user.roles) diff --git a/tests/bot/exts/moderation/test_stream.py b/tests/bot/exts/moderation/test_stream.py index 7aa2fae26..467c373aa 100644 --- a/tests/bot/exts/moderation/test_stream.py +++ b/tests/bot/exts/moderation/test_stream.py @@ -3,9 +3,9 @@ import unittest from async_rediscache import RedisSession -from bot.constants import TIME_FORMATS, Roles +from bot.constants import Roles from bot.exts.moderation.stream import Stream -from tests.helpers import MockBot, MockRole, MockMember +from tests.helpers import MockBot, MockMember, MockRole redis_session = None redis_loop = asyncio.get_event_loop() @@ -36,10 +36,6 @@ class StreamCommandTest(unittest.IsolatedAsyncioTestCase): This Test checks _link_from_alias method Checking for whether alias or key exists in TIME_FORMATS is done before calling this function """ - FORMATS = [] - for key, entry in TIME_FORMATS.items(): - FORMATS.extend(entry["aliases"]) - FORMATS.append(key) test_cases = (("sec", "second"), ("s", "second"), -- cgit v1.2.3 From 39401cd99b4dffc423a88f18a6e08c7cf1bd26e9 Mon Sep 17 00:00:00 2001 From: Harbys <44087388+Harbys@users.noreply.github.com> Date: Wed, 25 Nov 2020 17:15:53 +0100 Subject: removing redundant class names Co-authored-by: Mark --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index a44095273..92fd9955f 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -24,7 +24,7 @@ for key, entry in TIME_FORMATS.items(): class Stream(commands.Cog): - """Stream class handles giving screen sharing permission with commands.""" + """Grant and revoke streaming permissions from users.""" # Data cache storing userid to unix_time relation # user id is used to get member who's streaming permission need to be revoked after some time -- cgit v1.2.3 From 189f8c31bcf9f58cf72abf7f86061746613dfd7f Mon Sep 17 00:00:00 2001 From: Harbys <44087388+Harbys@users.noreply.github.com> Date: Wed, 25 Nov 2020 17:19:17 +0100 Subject: removing redundant descriptions Co-authored-by: Mark --- bot/exts/moderation/stream.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 92fd9955f..ef52cd107 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -117,12 +117,7 @@ class Stream(commands.Cog): ctx: commands.Context, user: discord.Member = None ) -> None: - """ - Revokestream handles revokestream command. - - argument user - required user mention, any errors should be handled by upper level handler - command removes streaming permission from a user - """ + """Revoke streaming permissions from a user.""" not_allowed = not any(Roles.video == role.id for role in user.roles) if not_allowed: await user.remove_roles(discord.Object(Roles.video)) -- cgit v1.2.3 From 2f97f5705cbc073c9460e3a60cde4d53d7f3d5e0 Mon Sep 17 00:00:00 2001 From: Harbys Date: Wed, 25 Nov 2020 17:11:53 +0100 Subject: spelling fix from fetch_membr to fetch_member --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index ef52cd107..458559b18 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -99,7 +99,7 @@ class Stream(commands.Cog): all_entries = await self.user_cache.items() for user_id, delete_time in all_entries: if time.time() > delete_time: - member = self.guild_static.fetch_memebr(user_id) + member = self.guild_static.fetch_memeber(user_id) if member: await member.remove_roles(discord.Object(Roles.video), reason="Temporary streaming access revoked") await self.user_cache.pop(user_id) -- cgit v1.2.3 From f823f0e9a48f346a8a2ead7ded03da29104f064e Mon Sep 17 00:00:00 2001 From: Harbys Date: Wed, 25 Nov 2020 17:14:16 +0100 Subject: removed share alias --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 458559b18..2ce248f03 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -47,7 +47,7 @@ class Stream(commands.Cog): """Get time in seconds from duration and time format.""" return duration * self._link_from_alias(time_format)[0]["mul"] - @commands.command(aliases=("streaming", "share")) + @commands.command(aliases=("streaming",)) @commands.has_any_role(*STAFF_ROLES) async def stream( self, -- cgit v1.2.3 From e4907d6e06f5ac9d94d7dcfe13dc7bb2c33cd65a Mon Sep 17 00:00:00 2001 From: Harbys Date: Wed, 25 Nov 2020 17:18:05 +0100 Subject: fixing required arguments --- bot/exts/moderation/stream.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 2ce248f03..7678c3184 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -8,7 +8,6 @@ from bot.bot import Bot from bot.constants import Guild, Roles, STAFF_ROLES, TIME_FORMATS # Constant error messages -NO_USER_SPECIFIED = "Please specify a user" TIME_FORMAT_NOT_VALID = "Please specify a valid time format ex. 10h or 1day" TIME_LESS_EQ_0 = "Duration can not be a 0 or lower" USER_ALREADY_ALLOWED_TO_STREAM = "This user can already stream" @@ -52,7 +51,7 @@ class Stream(commands.Cog): async def stream( self, ctx: commands.Context, - user: discord.Member = None, + user: discord.Member, duration: int = 1, time_format: str = "h", *_ @@ -65,12 +64,6 @@ class Stream(commands.Cog): time_format - str defining what time unit you want to use, must be any of FORMATS - defaults to h Command give user permission to stream and takes it away after provided duration """ - # Check for required user argument - # if not provided send NO_USER_SPECIFIED message - if not user: - await ctx.send(NO_USER_SPECIFIED) - return - # Time can't be negative lol if duration <= 0: await ctx.send(TIME_LESS_EQ_0) -- cgit v1.2.3 From 9eb729d89e2969d284a546b539ab720e36007fab Mon Sep 17 00:00:00 2001 From: Harbys Date: Wed, 25 Nov 2020 17:25:49 +0100 Subject: fixing punctuation and adding Emojis to messages --- bot/exts/moderation/stream.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 7678c3184..ca4284e77 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -5,13 +5,13 @@ from async_rediscache import RedisCache from discord.ext import commands, tasks from bot.bot import Bot -from bot.constants import Guild, Roles, STAFF_ROLES, TIME_FORMATS +from bot.constants import Guild, Roles, STAFF_ROLES, TIME_FORMATS, Emojis # Constant error messages -TIME_FORMAT_NOT_VALID = "Please specify a valid time format ex. 10h or 1day" -TIME_LESS_EQ_0 = "Duration can not be a 0 or lower" -USER_ALREADY_ALLOWED_TO_STREAM = "This user can already stream" -USER_ALREADY_NOT_ALLOWED_TO_STREAM = "This user already can't stream" +TIME_FORMAT_NOT_VALID = f"{Emojis.cross_mark}Please specify a valid time format ex. 10h or 1day." +TIME_LESS_EQ_0 = f"{Emojis.cross_mark}Duration can not be a 0 or lower." +USER_ALREADY_ALLOWED_TO_STREAM = f"{Emojis.cross_mark}This user can already stream." +USER_ALREADY_NOT_ALLOWED_TO_STREAM = f"{Emojis.cross_mark}This user already can't stream." # FORMATS holds a combined list of all allowed time units @@ -84,7 +84,8 @@ class Stream(commands.Cog): # Set user id - time in redis cache and add streaming permission role await self.user_cache.set(user.id, time.time() + self._parse_time_to_seconds(duration, time_format)) await user.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") - await ctx.send(f"{user.mention} can now stream for {duration} {self._link_from_alias(time_format)[1]}/s") + await ctx.send(f"{Emojis.check_mark}{user.mention} can now stream for " + f"{duration} {self._link_from_alias(time_format)[1]}/s.") @tasks.loop(seconds=30) async def remove_permissions(self) -> None: -- cgit v1.2.3 From bcbcd3e8b6bc95c96d7c316d032b9f774773e961 Mon Sep 17 00:00:00 2001 From: Harbys Date: Wed, 25 Nov 2020 17:34:55 +0100 Subject: add success message after revokestream command --- bot/exts/moderation/stream.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index ca4284e77..7dd72a95b 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -115,6 +115,7 @@ class Stream(commands.Cog): not_allowed = not any(Roles.video == role.id for role in user.roles) if not_allowed: await user.remove_roles(discord.Object(Roles.video)) + await ctx.send(f"{Emojis.check_mark}Streaming permission taken from {user.display_name}") else: await ctx.send(USER_ALREADY_NOT_ALLOWED_TO_STREAM) -- cgit v1.2.3 From 16936aad19978078a872ce8ebec82f30a3e7442f Mon Sep 17 00:00:00 2001 From: Harbys Date: Fri, 27 Nov 2020 08:44:20 +0100 Subject: move to Scheduler --- Pipfile.lock | 68 +++++++++++--------- bot/constants.py | 27 -------- bot/exts/moderation/stream.py | 106 ++++++++++--------------------- tests/bot/exts/moderation/test_stream.py | 56 ---------------- 4 files changed, 72 insertions(+), 185 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 541db1627..25fcab4b1 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -187,6 +187,7 @@ "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], + "index": "pypi", "markers": "sys_platform == 'win32'", "version": "==0.4.4" }, @@ -231,10 +232,10 @@ }, "fakeredis": { "hashes": [ - "sha256:8070b7fce16f828beaef2c757a4354af91698685d5232404f1aeeb233529c7a5", - "sha256:f8c8ea764d7b6fd801e7f5486e3edd32ca991d506186f1923a01fc072e33c271" + "sha256:01cb47d2286825a171fb49c0e445b1fa9307087e07cbb3d027ea10dbff108b6a", + "sha256:2c6041cf0225889bc403f3949838b2c53470a95a9e2d4272422937786f5f8f73" ], - "version": "==1.4.4" + "version": "==1.4.5" }, "feedparser": { "hashes": [ @@ -538,6 +539,15 @@ "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, + "pyreadline": { + "hashes": [ + "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1", + "sha256:65540c21bfe14405a3a77e4c085ecfce88724743a4ead47c66b84defcf82c32e", + "sha256:9ce5fa65b8992dfa373bddc5b6e0864ead8f291c94fbfec05fbd5c836162e67b" + ], + "markers": "sys_platform == 'win32'", + "version": "==2.1" + }, "python-dateutil": { "hashes": [ "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", @@ -555,18 +565,18 @@ }, "pyyaml": { "hashes": [ - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", + "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" ], "index": "pypi", @@ -846,11 +856,11 @@ }, "flake8-bugbear": { "hashes": [ - "sha256:a3ddc03ec28ba2296fc6f89444d1c946a6b76460f859795b35b77d4920a51b63", - "sha256:bd02e4b009fb153fe6072c31c52aeab5b133d508095befb2ffcf3b41c4823162" + "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538", + "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703" ], "index": "pypi", - "version": "==20.1.4" + "version": "==20.11.1" }, "flake8-docstrings": { "hashes": [ @@ -900,11 +910,11 @@ }, "identify": { "hashes": [ - "sha256:5dd84ac64a9a115b8e0b27d1756b244b882ad264c3c423f42af8235a6e71ca12", - "sha256:c9504ba6a043ee2db0a9d69e43246bc138034895f6338d5aed1b41e4a73b1513" + "sha256:943cd299ac7f5715fcb3f684e2fc1594c1e0f22a90d15398e5888143bd4144b5", + "sha256:cc86e6a9a390879dcc2976cef169dd9cc48843ed70b7380f321d1b118163c60e" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.5.9" + "version": "==1.5.10" }, "idna": { "hashes": [ @@ -938,11 +948,11 @@ }, "pre-commit": { "hashes": [ - "sha256:22e6aa3bd571debb01eb7d34483f11c01b65237be4eebbf30c3d4fb65762d315", - "sha256:905ebc9b534b991baec87e934431f2d0606ba27f2b90f7f652985f5a5b8b6ae6" + "sha256:4aee0db4808fa48d2458cedd5b9a084ef24dda1a0fa504432a11977a4d1cfd0a", + "sha256:b2d106d51c6ba6217e859d81774aae33fd825fe7de0dcf0c46e2586333d7a92e" ], "index": "pypi", - "version": "==2.8.2" + "version": "==2.9.0" }, "pycodestyle": { "hashes": [ @@ -970,18 +980,18 @@ }, "pyyaml": { "hashes": [ - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", + "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" ], "index": "pypi", @@ -1028,11 +1038,11 @@ }, "virtualenv": { "hashes": [ - "sha256:b0011228208944ce71052987437d3843e05690b2f23d1c7da4263fde104c97a2", - "sha256:b8d6110f493af256a40d65e29846c69340a947669eec8ce784fcf3dd3af28380" + "sha256:07cff122e9d343140366055f31be4dcd61fd598c69d11cd33a9d9c8df4546dd7", + "sha256:e0aac7525e880a429764cefd3aaaff54afb5d9f25c82627563603f5d7de5a6e5" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.1.0" + "version": "==20.2.1" } } } diff --git a/bot/constants.py b/bot/constants.py index 33ed29c39..dca83e7ab 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -705,30 +705,3 @@ ERROR_REPLIES = [ "Noooooo!!", "I can't believe you've done this", ] - -# TIME_FORMATS defines aliases and multipliers for time formats -# key is a standard time unit name like second ,year, decade etc. -# mul is a multiplier where duration of said time unit * multiplier = time in seconds -# eg. 1 day = 1 * multiplier seconds, so mul = 86400 -TIME_FORMATS = { - "second": { - "aliases": ("s", "sec", "seconds", "secs"), - "mul": 1 - }, - "minute": { - "aliases": ("m", "min", "mins", "minutes"), - "mul": 60 - }, - "hour": { - "aliases": ("h", "hr", "hrs", "hours"), - "mul": 3600 - }, - "day": { - "aliases": ("d", "days"), - "mul": 86400 - }, - "year": { - "aliases": ("yr", "yrs", "years", "y"), - "mul": 31536000 - } -} diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 7dd72a95b..0fc004d75 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -1,11 +1,11 @@ -import time - import discord -from async_rediscache import RedisCache -from discord.ext import commands, tasks +from discord.ext import commands from bot.bot import Bot -from bot.constants import Guild, Roles, STAFF_ROLES, TIME_FORMATS, Emojis +from bot.constants import Emojis, Roles, STAFF_ROLES +from bot.converters import Expiry +from bot.utils.scheduling import Scheduler +from bot.utils.time import format_infraction_with_duration # Constant error messages TIME_FORMAT_NOT_VALID = f"{Emojis.cross_mark}Please specify a valid time format ex. 10h or 1day." @@ -14,37 +14,17 @@ USER_ALREADY_ALLOWED_TO_STREAM = f"{Emojis.cross_mark}This user can already stre USER_ALREADY_NOT_ALLOWED_TO_STREAM = f"{Emojis.cross_mark}This user already can't stream." -# FORMATS holds a combined list of all allowed time units -# made from TIME_FORMATS constant -FORMATS = [] -for key, entry in TIME_FORMATS.items(): - FORMATS.extend(entry["aliases"]) - FORMATS.append(key) - - class Stream(commands.Cog): """Grant and revoke streaming permissions from users.""" - # Data cache storing userid to unix_time relation - # user id is used to get member who's streaming permission need to be revoked after some time - # unix_time is a time when user's streaming permission needs tp be revoked in unix time notation - user_cache = RedisCache() - def __init__(self, bot: Bot): self.bot = bot - self.remove_permissions.start() - self.guild_static = None + self.scheduler = Scheduler(self.__class__.__name__) @staticmethod - def _link_from_alias(time_format: str) -> (dict, str): - """Get TIME_FORMATS key and entry by time format or any of its aliases.""" - for format_key, val in TIME_FORMATS.items(): - if format_key == time_format or time_format in val["aliases"]: - return TIME_FORMATS[format_key], format_key - - def _parse_time_to_seconds(self, duration: int, time_format: str) -> int: - """Get time in seconds from duration and time format.""" - return duration * self._link_from_alias(time_format)[0]["mul"] + async def _remove_streaming_permission(schedule_user: discord.Member) -> None: + """Remove streaming permission from Member""" + await schedule_user.remove_roles(discord.Object(Roles.video), reason="Temporary streaming access revoked") @commands.command(aliases=("streaming",)) @commands.has_any_role(*STAFF_ROLES) @@ -52,68 +32,48 @@ class Stream(commands.Cog): self, ctx: commands.Context, user: discord.Member, - duration: int = 1, - time_format: str = "h", + duration: Expiry, *_ ) -> None: """ - Stream handles stream command. - - argument user - required user mention, any errors should be handled by upper level handler - duration - int must be higher than 0 - defaults to 1 - time_format - str defining what time unit you want to use, must be any of FORMATS - defaults to h - Command give user permission to stream and takes it away after provided duration + Temporarily grant streaming permissions to a user for a given duration. + A unit of time should be appended to the duration. + Units (∗case-sensitive): + \u2003`y` - years + \u2003`m` - months∗ + \u2003`w` - weeks + \u2003`d` - days + \u2003`h` - hours + \u2003`M` - minutes∗ + \u2003`s` - seconds + Alternatively, an ISO 8601 timestamp can be provided for the duration. """ - # Time can't be negative lol - if duration <= 0: - await ctx.send(TIME_LESS_EQ_0) - return - - # Check if time_format argument is a valid time format - # eg. d, day etc are aliases for day time format - if time_format not in FORMATS: - await ctx.send(TIME_FORMAT_NOT_VALID) - return - # Check if user already has streaming permission already_allowed = any(Roles.video == role.id for role in user.roles) if already_allowed: await ctx.send(USER_ALREADY_ALLOWED_TO_STREAM) return - # Set user id - time in redis cache and add streaming permission role - await self.user_cache.set(user.id, time.time() + self._parse_time_to_seconds(duration, time_format)) + # Schedule task to remove streaming permission from Member + self.scheduler.schedule_at(duration, user.id, self._remove_streaming_permission(user)) await user.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") - await ctx.send(f"{Emojis.check_mark}{user.mention} can now stream for " - f"{duration} {self._link_from_alias(time_format)[1]}/s.") - - @tasks.loop(seconds=30) - async def remove_permissions(self) -> None: - """Background loop for removing streaming permission.""" - all_entries = await self.user_cache.items() - for user_id, delete_time in all_entries: - if time.time() > delete_time: - member = self.guild_static.fetch_memeber(user_id) - if member: - await member.remove_roles(discord.Object(Roles.video), reason="Temporary streaming access revoked") - await self.user_cache.pop(user_id) - - @remove_permissions.before_loop - async def await_ready(self) -> None: - """Wait for bot to be ready before starting remove_permissions loop and get guild by id.""" - await self.bot.wait_until_ready() - self.guild_static = self.bot.get_guild(Guild.id) + await ctx.send(f"{Emojis.check_mark}{user.mention} can now stream until " + f"{format_infraction_with_duration(str(duration))}.") @commands.command(aliases=("unstream", )) @commands.has_any_role(*STAFF_ROLES) async def revokestream( self, ctx: commands.Context, - user: discord.Member = None + user: discord.Member ) -> None: - """Revoke streaming permissions from a user.""" - not_allowed = not any(Roles.video == role.id for role in user.roles) - if not_allowed: + """Take away streaming permission from a user""" + # Check if user has the streaming permission to begin with + allowed = any(Roles.video == role.id for role in user.roles) + if allowed: + # Cancel scheduled task to take away streaming permission to avoid errors + if user.id in self.scheduler: + self.scheduler.cancel(user.id) await user.remove_roles(discord.Object(Roles.video)) await ctx.send(f"{Emojis.check_mark}Streaming permission taken from {user.display_name}") else: diff --git a/tests/bot/exts/moderation/test_stream.py b/tests/bot/exts/moderation/test_stream.py index 467c373aa..15956a9de 100644 --- a/tests/bot/exts/moderation/test_stream.py +++ b/tests/bot/exts/moderation/test_stream.py @@ -1,28 +1,10 @@ -import asyncio import unittest -from async_rediscache import RedisSession from bot.constants import Roles from bot.exts.moderation.stream import Stream from tests.helpers import MockBot, MockMember, MockRole -redis_session = None -redis_loop = asyncio.get_event_loop() - - -def setUpModule(): # noqa: N802 - """Create and connect to the fakeredis session.""" - global redis_session - redis_session = RedisSession(use_fakeredis=True) - redis_loop.run_until_complete(redis_session.connect()) - - -def tearDownModule(): # noqa: N802 - """Close the fakeredis session.""" - if redis_session: - redis_loop.run_until_complete(redis_session.close()) - class StreamCommandTest(unittest.IsolatedAsyncioTestCase): @@ -30,44 +12,6 @@ class StreamCommandTest(unittest.IsolatedAsyncioTestCase): self.bot = MockBot() self.cog = Stream(self.bot) - def test_linking_time_format_from_alias_or_key(self): - """ - User provided time format needs to be lined to a proper entry in TIME_FORMATS - This Test checks _link_from_alias method - Checking for whether alias or key exists in TIME_FORMATS is done before calling this function - """ - - test_cases = (("sec", "second"), - ("s", "second"), - ("seconds", "second"), - ("second", "second"), - ("secs", "second"), - ("min", "minute"), - ("m", "minute"), - ("minutes", "minute"), - ("hr", "hour"), - ("hrs", "hour"), - ("hours", "hour"), - ("d", "day"), - ("days", "day"), - ("yr", "year"), - ("yrs", "year"), - ("y", "year")) - - for case in test_cases: - linked = self.cog._link_from_alias(case[0])[1] - self.assertEqual(linked, case[1]) - - def test_parsing_duration_and_time_format_to_seconds(self): - """ - Test calculating time in seconds from duration and time unit - This test is technically dependent on _link_from_alias function, not the best practice but necessary - """ - test_cases = ((1, "minute", 60), (5, "second", 5), (2, "day", 172800)) - for case in test_cases: - time_in_seconds = self.cog._parse_time_to_seconds(case[0], case[1]) - self.assertEqual(time_in_seconds, case[2]) - def test_checking_if_user_has_streaming_permission(self): """ Test searching for video role in Member.roles -- cgit v1.2.3 From fae4ad4a614c56f011f64c64b3318f511ccb17eb Mon Sep 17 00:00:00 2001 From: Harbys Date: Fri, 27 Nov 2020 09:10:48 +0100 Subject: fix flake8 and line endings --- bot/exts/moderation/stream.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 0fc004d75..d8c2a8628 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -23,7 +23,7 @@ class Stream(commands.Cog): @staticmethod async def _remove_streaming_permission(schedule_user: discord.Member) -> None: - """Remove streaming permission from Member""" + """Remove streaming permission from Member.""" await schedule_user.remove_roles(discord.Object(Roles.video), reason="Temporary streaming access revoked") @commands.command(aliases=("streaming",)) @@ -37,6 +37,7 @@ class Stream(commands.Cog): ) -> None: """ Temporarily grant streaming permissions to a user for a given duration. + A unit of time should be appended to the duration. Units (∗case-sensitive): \u2003`y` - years @@ -67,7 +68,7 @@ class Stream(commands.Cog): ctx: commands.Context, user: discord.Member ) -> None: - """Take away streaming permission from a user""" + """Take away streaming permission from a user.""" # Check if user has the streaming permission to begin with allowed = any(Roles.video == role.id for role in user.roles) if allowed: -- cgit v1.2.3 From 0d3d2bd632e2ed2e14eaacb7db9b49de4cd4baa5 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 29 Nov 2020 04:12:04 +0100 Subject: Use timedelta instead of constructing duration manually A newline was also added to set to keep it consistent with set_if_exists --- bot/exts/info/doc/_redis_cache.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py index 2230884c9..e8577aa64 100644 --- a/bot/exts/info/doc/_redis_cache.py +++ b/bot/exts/info/doc/_redis_cache.py @@ -19,7 +19,8 @@ class DocRedisCache(RedisObject): Keys expire after a week to keep data up to date. """ - expiry_timestamp = datetime.datetime.now().timestamp() + 7 * 24 * 60 * 60 + expiry_timestamp = (datetime.datetime.now() + datetime.timedelta(weeks=1)).timestamp() + with await self._get_pool_connection() as connection: await connection.hset( f"{self.namespace}:{item.package}", @@ -34,7 +35,7 @@ class DocRedisCache(RedisObject): Keys expire after a week to keep data up to date. """ - expiry_timestamp = datetime.datetime.now().timestamp() + 7 * 24 * 60 * 60 + expiry_timestamp = (datetime.datetime.now() + datetime.timedelta(weeks=1)).timestamp() with await self._get_pool_connection() as connection: if await connection.hexists(f"{self.namespace}:{item.package}", self.get_item_key(item)): -- cgit v1.2.3 From e22deb55de286c4186da2f0d2f2d562b9e333630 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 29 Nov 2020 04:34:41 +0100 Subject: Use pop instead of getitem and del Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 9e4bb54ea..e29e3b717 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -139,9 +139,8 @@ class CachedParser: # The parse queue stores soups along with the doc symbols in QueueItem objects, # in case we're moving a DocItem we have to get the associated QueueItem first and then move it. item_index = self._queue.index(item) - queue_item = self._queue[item_index] + queue_item = self._queue.pop(item_index) - del self._queue[item_index] self._queue.append(queue_item) def add_item(self, doc_item: DocItem) -> None: -- cgit v1.2.3 From ad90978fd7c038429b715f30519c01d546441afc Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 29 Nov 2020 04:35:43 +0100 Subject: Clear up docstring so it doesn't rely on private attribute Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index e29e3b717..bd9b589ce 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -135,7 +135,7 @@ class CachedParser: log.trace("Finished parsing queue.") def _move_to_front(self, item: Union[QueueItem, DocItem]) -> None: - """Move `item` to the front of the parse queue.""" + """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" # The parse queue stores soups along with the doc symbols in QueueItem objects, # in case we're moving a DocItem we have to get the associated QueueItem first and then move it. item_index = self._queue.index(item) -- cgit v1.2.3 From b094a6fa0dc9d9c2fde75cd79c95c87582f5e23d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 29 Nov 2020 04:44:17 +0100 Subject: Various grammar and sentence structure changes Co-authored-by: MarkKoz --- bot/converters.py | 2 +- bot/exts/info/doc/_cog.py | 19 ++++++++++--------- bot/exts/info/doc/_inventory_parser.py | 6 +++--- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/bot/converters.py b/bot/converters.py index 3066eaabb..901ba1cca 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -140,7 +140,7 @@ class PackageName(Converter): async def convert(cls, ctx: Context, argument: str) -> str: """Checks whether the given string is a valid package name.""" if cls.PACKAGE_NAME_RE.search(argument): - raise BadArgument("The provided package name is not valid, please only use the _ and a-z characters.") + raise BadArgument("The provided package name is not valid; please only use the _ and a-z characters.") return argument diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index bd9b589ce..ea91b2353 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -207,7 +207,7 @@ class DocCog(commands.Cog): if not package: delay = 2*60 if inventory_url not in self.scheduled_inventories else 5*60 - log.info(f"Failed to fetch inventory, attempting again in {delay//60} minutes.") + log.info(f"Failed to fetch inventory; attempting again in {delay//60} minutes.") self.inventory_scheduler.schedule_later( delay, api_package_name, @@ -275,7 +275,7 @@ class DocCog(commands.Cog): self.scheduled_inventories.clear() await self.item_fetcher.clear() - # Run all coroutines concurrently - since each of them performs a HTTP + # Run all coroutines concurrently - since each of them performs an HTTP # request, this speeds up fetching the inventory data heavily. coros = [ self.update_single( @@ -322,7 +322,7 @@ class DocCog(commands.Cog): @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) async def docs_group(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: - """Lookup documentation for Python symbols.""" + """Look up documentation for Python symbols.""" await ctx.invoke(self.get_command, symbol=symbol) @docs_group.command(name='getdoc', aliases=('g',)) @@ -414,7 +414,8 @@ class DocCog(commands.Cog): if await self.update_single(package_name, base_url, inventory_url) is None: await ctx.send( - f"Added package `{package_name}` to database but failed to fetch inventory; rescheduled in 2 minutes." + f"Added the package `{package_name}` to the database but failed to fetch inventory; " + f"trying again in 2 minutes." ) return await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") @@ -425,7 +426,7 @@ class DocCog(commands.Cog): """ Removes the specified package from the database. - Examples: + Example: !docs deletedoc aiohttp """ await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') @@ -435,12 +436,12 @@ class DocCog(commands.Cog): # that was from this package is properly deleted. await self.refresh_inventory() await doc_cache.delete(package_name) - await ctx.send(f"Successfully deleted `{package_name}` and refreshed inventory.") + await ctx.send(f"Successfully deleted `{package_name}` and refreshed the inventory.") @docs_group.command(name="refreshdoc", aliases=("rfsh", "r")) @commands.has_any_role(*MODERATION_ROLES) async def refresh_command(self, ctx: commands.Context) -> None: - """Refresh inventories and send differences to channel.""" + """Refresh inventories and show the difference.""" old_inventories = set(self.base_urls) with ctx.typing(): await self.refresh_inventory() @@ -461,6 +462,6 @@ class DocCog(commands.Cog): @docs_group.command(name="cleardoccache") @commands.has_any_role(*MODERATION_ROLES) async def clear_cache_command(self, ctx: commands.Context, package_name: PackageName) -> None: - """Clear persistent redis cache for `package`.""" + """Clear the persistent redis cache for `package`.""" await doc_cache.delete(package_name) - await ctx.send(f"Succesfully cleared cache for {package_name}") + await ctx.send(f"Successfully cleared the cache for `{package_name}`.") diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py index 23931869b..96df08786 100644 --- a/bot/exts/info/doc/_inventory_parser.py +++ b/bot/exts/info/doc/_inventory_parser.py @@ -101,17 +101,17 @@ async def fetch_inventory( inventory = await _fetch_inventory(client_session, url) except aiohttp.ClientConnectorError: log.warning( - f"Failed to connect to inventory url at {url}, " + f"Failed to connect to inventory url at {url}; " f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." ) except aiohttp.ClientError: log.error( - f"Failed to get inventory from {url}, " + f"Failed to get inventory from {url}; " f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." ) except Exception: log.exception( - f"An unexpected error has occurred during fetching of {url}, " + f"An unexpected error has occurred during fetching of {url}; " f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." ) else: -- cgit v1.2.3 From 210f7d9b096b373935ab2a3f5f41989f4a081e35 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 29 Nov 2020 23:42:26 +0100 Subject: Remove redundant suppress --- bot/exts/info/doc/_cog.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index ea91b2353..7d57f65ad 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -215,8 +215,8 @@ class DocCog(commands.Cog): ) self.scheduled_inventories.add(api_package_name) return False - with suppress(KeyError): - self.scheduled_inventories.discard(api_package_name) + + self.scheduled_inventories.discard(api_package_name) for group, items in package.items(): for symbol, relative_doc_url in items: -- cgit v1.2.3 From 8b41a7678d175de69ae6bf72e6a9f6e7036e1968 Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 8 Dec 2020 10:21:41 +0200 Subject: Add file path to codeblock --- bot/exts/info/code_snippets.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 1bb00b677..f807fa9a7 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -188,9 +188,16 @@ class CodeSnippets(Cog): if not is_valid_language: language = '' + # Adds a label showing the file path to the snippet + if start_line == end_line: + ret = f'`{file_path}` line {start_line}\n' + else: + ret = f'`{file_path}` lines {start_line} to {end_line}\n' + if len(required) != 0: - return f'```{language}\n{required}```\n' - return '' + return f'{ret}```{language}\n{required}```\n' + # Returns an empty codeblock if the snippet is empty + return f'{ret}``` ```\n' def __init__(self, bot: Bot): """Initializes the cog's bot.""" -- cgit v1.2.3 From e8d2448c771aef262b294a583661092c9e90baef Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 8 Dec 2020 10:36:56 +0200 Subject: Add logging for HTTP requests --- bot/exts/info/code_snippets.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index f807fa9a7..e1025e568 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -1,3 +1,4 @@ +import logging import re import textwrap from urllib.parse import quote_plus @@ -8,6 +9,7 @@ from discord.ext.commands import Cog from bot.bot import Bot from bot.utils.messages import wait_for_deletion +log = logging.getLogger(__name__) GITHUB_RE = re.compile( r'https://github\.com/(?P.+?)/blob/(?P.+/.+)' @@ -40,11 +42,14 @@ class CodeSnippets(Cog): async def _fetch_response(self, url: str, response_format: str, **kwargs) -> str: """Makes http requests using aiohttp.""" - async with self.bot.http_session.get(url, **kwargs) as response: - if response_format == 'text': - return await response.text() - elif response_format == 'json': - return await response.json() + try: + async with self.bot.http_session.get(url, **kwargs) as response: + if response_format == 'text': + return await response.text() + elif response_format == 'json': + return await response.json() + except Exception: + log.exception(f'Failed to fetch code snippet from {url}.') def _find_ref(self, path: str, refs: tuple) -> tuple: """Loops through all branches and tags to find the required ref.""" -- cgit v1.2.3 From 0e48ae679abc0937b4aad583b1b29ee0b3e3eb15 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 9 Dec 2020 13:40:01 +0100 Subject: Improve handling of strings Previously the code assumed ' and " can be used interchangeably, and strings that were inside of brackets were ignored for depth but their contents weren't causing strings like "ab[cd" to increase the depth --- bot/exts/info/doc/_parsing.py | 35 ++++++++++++++++++++++++++++------- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 418405ca9..e6103dde2 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -56,6 +56,15 @@ _BRACKET_PAIRS = { } +def _is_closing_quote(search_string: str, index: int) -> bool: + """Check whether the quote at `index` inside `search_string` can be a closing quote.""" + if search_string[index - 1] != "\\": + return True + elif search_string[index - 2] == "\\": + return True + return False + + def _split_parameters(parameters_string: str) -> List[str]: """ Split parameters of a signature into individual parameter strings on commas. @@ -67,9 +76,11 @@ def _split_parameters(parameters_string: str) -> List[str]: depth = 0 expected_end = None current_search = None + quote_character = None - for index, character in enumerate(parameters_string): - if character in _BRACKET_PAIRS: + enumerated_string = enumerate(parameters_string) + for index, character in enumerated_string: + if quote_character is None and character in _BRACKET_PAIRS: if current_search is None: current_search = character expected_end = _BRACKET_PAIRS[character] @@ -77,12 +88,22 @@ def _split_parameters(parameters_string: str) -> List[str]: depth += 1 elif character in {"'", '"'}: - if depth == 0: + if current_search is not None: + # We're currently searching for a bracket, skip all characters that belong to the string + # to avoid false positives of closing brackets + quote_character = character + for index, character in enumerated_string: + if character == quote_character and _is_closing_quote(parameters_string, index): + break + + elif depth == 0: depth += 1 - elif parameters_string[index-1] != "\\": - depth -= 1 - elif parameters_string[index-2] == "\\": - depth -= 1 + quote_character = character + elif character == quote_character: + if _is_closing_quote(parameters_string, index): + depth -= 1 + if depth == 0: + quote_character = None elif character == expected_end: depth -= 1 -- cgit v1.2.3 From 04aa50bc3ac3baca788392fb6a56a4ba43e678d4 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 9 Dec 2020 15:25:53 +0100 Subject: Merge current_search and expected_end in The two variables were initialized and cleared together and contained related information --- bot/exts/info/doc/_parsing.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index e6103dde2..a8b38f400 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -4,6 +4,7 @@ import logging import re import string import textwrap +from collections import namedtuple from functools import partial from typing import Callable, Collection, Container, Iterable, List, Optional, TYPE_CHECKING, Union @@ -49,10 +50,12 @@ _MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LINE_LENGTH + 8) * _MAX_SIGNATURE_AM # Maximum discord message length - signatures on top _MAX_DESCRIPTION_LENGTH = 2000 - _MAX_SIGNATURES_LENGTH _TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace + +BracketPair = namedtuple("BracketPair", ["opening_bracket", "closing_bracket"]) _BRACKET_PAIRS = { - "{": "}", - "(": ")", - "[": "]", + "{": BracketPair("{", "}"), + "(": BracketPair("(", ")"), + "[": BracketPair("[", "]"), } @@ -74,17 +77,16 @@ def _split_parameters(parameters_string: str) -> List[str]: parameters_list = [] last_split = 0 depth = 0 - expected_end = None - current_search = None + current_search: Optional[BracketPair] = None quote_character = None enumerated_string = enumerate(parameters_string) for index, character in enumerated_string: if quote_character is None and character in _BRACKET_PAIRS: if current_search is None: - current_search = character - expected_end = _BRACKET_PAIRS[character] - if character == current_search: + current_search = _BRACKET_PAIRS[character] + depth = 1 + elif character == current_search.opening_bracket: depth += 1 elif character in {"'", '"'}: @@ -105,11 +107,10 @@ def _split_parameters(parameters_string: str) -> List[str]: if depth == 0: quote_character = None - elif character == expected_end: + elif current_search is not None and character == current_search.closing_bracket: depth -= 1 if depth == 0: current_search = None - expected_end = None elif depth == 0 and character == ",": parameters_list.append(parameters_string[last_split:index]) -- cgit v1.2.3 From 50cbfbda930aab5492411863aaaf8f8cd5ef57fd Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 9 Dec 2020 15:26:53 +0100 Subject: Create a generator instead of returning a list The result of _split_parameters is only iterated over, so a list is not needed. Making it lazy may also save some time in cases where we don't use all parameters --- bot/exts/info/doc/_parsing.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index a8b38f400..567786204 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -6,7 +6,7 @@ import string import textwrap from collections import namedtuple from functools import partial -from typing import Callable, Collection, Container, Iterable, List, Optional, TYPE_CHECKING, Union +from typing import Callable, Collection, Container, Iterable, Iterator, List, Optional, TYPE_CHECKING, Union from bs4 import BeautifulSoup from bs4.element import NavigableString, PageElement, Tag @@ -68,13 +68,12 @@ def _is_closing_quote(search_string: str, index: int) -> bool: return False -def _split_parameters(parameters_string: str) -> List[str]: +def _split_parameters(parameters_string: str) -> Iterator[str]: """ Split parameters of a signature into individual parameter strings on commas. Long string literals are not accounted for. """ - parameters_list = [] last_split = 0 depth = 0 current_search: Optional[BracketPair] = None @@ -113,11 +112,10 @@ def _split_parameters(parameters_string: str) -> List[str]: current_search = None elif depth == 0 and character == ",": - parameters_list.append(parameters_string[last_split:index]) + yield parameters_string[last_split:index] last_split = index + 1 - parameters_list.append(parameters_string[last_split:]) - return parameters_list + yield parameters_string[last_split:] def _find_elements_until_tag( -- cgit v1.2.3 From ea9b3e0e9ac74ea541f436f8021178f76f19af39 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 11 Dec 2020 09:44:11 +0100 Subject: Restructure doc cache to handle caches of whole pages Previously we used packages as the top level keys and fields contained the url and the symbol id, however if we want to store all symbols from fetched pages instead of only the ones that were fetched by the users this comes worse off than using the page url in the field and setting EXPIREs for them instead of doing it manually in python. The new implementation uses package:url as the redis key and only the symbol id for field names, with the expire being set to a week on the key, this means we have to pattern match the keys when deleting the cache for a package but that's being done far less than the expire checking done previously. --- bot/exts/info/doc/_cog.py | 3 +- bot/exts/info/doc/_redis_cache.py | 95 +++++++++++++++------------------------ 2 files changed, 37 insertions(+), 61 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 7d57f65ad..d1518f69d 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -122,7 +122,7 @@ class CachedParser: item, soup = self._queue.pop() try: markdown = get_symbol_markdown(soup, item) - await doc_cache.set_if_exists(item, markdown) + await doc_cache.set(item, markdown) self._results[item] = markdown except Exception: log.exception(f"Unexpected error when handling {item}") @@ -178,7 +178,6 @@ class DocCog(commands.Cog): self.scheduled_inventories = set() self.bot.loop.create_task(self.init_refresh_inventory()) - self.bot.loop.create_task(doc_cache.delete_expired()) async def init_refresh_inventory(self) -> None: """Refresh documentation inventory on cog initialization.""" diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py index e8577aa64..52cb2bc94 100644 --- a/bot/exts/info/doc/_redis_cache.py +++ b/bot/exts/info/doc/_redis_cache.py @@ -1,7 +1,6 @@ from __future__ import annotations import datetime -import pickle from typing import Optional, TYPE_CHECKING from async_rediscache.types.base import RedisObject, namespace_lock @@ -12,77 +11,55 @@ if TYPE_CHECKING: class DocRedisCache(RedisObject): """Interface for redis functionality needed by the Doc cog.""" + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._set_expires = set() + @namespace_lock async def set(self, item: DocItem, value: str) -> None: """ - Set markdown `value` for `key`. + Set the Markdown `value` for the symbol `item`. - Keys expire after a week to keep data up to date. + All keys from a single page are stored together, expiring a week after the first set. """ - expiry_timestamp = (datetime.datetime.now() + datetime.timedelta(weeks=1)).timestamp() + url_key = remove_suffix(item.relative_url_path, ".html") + redis_key = f"{self.namespace}:{item.package}:{url_key}" + needs_expire = False with await self._get_pool_connection() as connection: - await connection.hset( - f"{self.namespace}:{item.package}", - self.get_item_key(item), - pickle.dumps((value, expiry_timestamp)) - ) - - @namespace_lock - async def set_if_exists(self, item: DocItem, value: str) -> None: - """ - Set markdown `value` for `key` if `key` exists. + if item.package+url_key not in self._set_expires: + self._set_expires.add(item.package+url_key) + needs_expire = not await connection.exists(redis_key) - Keys expire after a week to keep data up to date. - """ - expiry_timestamp = (datetime.datetime.now() + datetime.timedelta(weeks=1)).timestamp() - - with await self._get_pool_connection() as connection: - if await connection.hexists(f"{self.namespace}:{item.package}", self.get_item_key(item)): - await connection.hset( - f"{self.namespace}:{item.package}", - self.get_item_key(item), - pickle.dumps((value, expiry_timestamp)) - ) + await connection.hset(redis_key, item.symbol_id, value) + if needs_expire: + await connection.expire(redis_key, datetime.timedelta(weeks=1).total_seconds()) @namespace_lock async def get(self, item: DocItem) -> Optional[str]: - """Get markdown contents for `key`.""" - with await self._get_pool_connection() as connection: - cached_value = await connection.hget(f"{self.namespace}:{item.package}", self.get_item_key(item)) - if cached_value is None: - return None - - value, expire = pickle.loads(cached_value) - if expire <= datetime.datetime.now().timestamp(): - await connection.hdel(f"{self.namespace}:{item.package}", self.get_item_key(item)) - return None + """Return the Markdown content of the symbol `item` if it exists.""" + url_key = remove_suffix(item.relative_url_path, ".html") - return value - - @namespace_lock - async def delete(self, package: str) -> None: - """Remove all values for `package`.""" with await self._get_pool_connection() as connection: - await connection.delete(f"{self.namespace}:{package}") + return await connection.hget(f"{self.namespace}:{item.package}:{url_key}", item.symbol_id, encoding="utf8") @namespace_lock - async def delete_expired(self) -> None: - """Delete all expired keys.""" - current_timestamp = datetime.datetime.now().timestamp() + async def delete(self, package: str) -> bool: + """Remove all values for `package`; return True if at least one key was deleted, False otherwise.""" with await self._get_pool_connection() as connection: - async for package_key in connection.iscan(match=f"{self.namespace}*"): - expired_fields = [] - - for field, cached_value in (await connection.hgetall(package_key)).items(): - _, expire = pickle.loads(cached_value) - if expire <= current_timestamp: - expired_fields.append(field) - - if expired_fields: - await connection.hdel(package_key, *expired_fields) - - @staticmethod - def get_item_key(item: DocItem) -> str: - """Create redis key for `item`.""" - return item.relative_url_path + item.symbol_id + package_keys = [ + package_key async for package_key in connection.iscan(match=f"{self.namespace}:{package}:*") + ] + if package_keys: + await connection.delete(*package_keys) + return True + return False + + +def remove_suffix(string: str, suffix: str) -> str: + """Remove `suffix` from end of `string`.""" + # TODO replace usages with str.removesuffix on 3.9 + if string.endswith(suffix): + return string[:-len(suffix)] + else: + return string -- cgit v1.2.3 From c42bf69a8b170772710c2184a3d0d3d57f597c30 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 11 Dec 2020 11:05:42 +0100 Subject: Use global bot http_session instead of parameter --- bot/converters.py | 2 +- bot/exts/info/doc/_cog.py | 12 ++++++------ bot/exts/info/doc/_inventory_parser.py | 13 ++++++------- 3 files changed, 13 insertions(+), 14 deletions(-) diff --git a/bot/converters.py b/bot/converters.py index d44b675a7..d558fa3df 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -190,7 +190,7 @@ class InventoryURL(Converter): async def convert(ctx: Context, url: str) -> str: """Convert url to Intersphinx inventory URL.""" await ctx.trigger_typing() - if await _inventory_parser.fetch_inventory(ctx.bot.http_session, url) is None: + if await _inventory_parser.fetch_inventory(url) is None: raise BadArgument(f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS}.") return url diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 524dcc829..e1be956cd 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -9,10 +9,10 @@ from contextlib import suppress from typing import Dict, List, NamedTuple, Optional, Union import discord -from aiohttp import ClientSession from bs4 import BeautifulSoup from discord.ext import commands +from bot import instance as bot_instance from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput from bot.converters import InventoryURL, PackageName, ValidURL @@ -85,7 +85,7 @@ class CachedParser: self._item_events: Dict[DocItem, asyncio.Event] = {} self._parse_task = None - async def get_markdown(self, client_session: ClientSession, doc_item: DocItem) -> str: + async def get_markdown(self, doc_item: DocItem) -> str: """ Get result markdown of `doc_item`. @@ -96,7 +96,7 @@ class CachedParser: return symbol if (symbols_to_queue := self._page_symbols.get(doc_item.url)) is not None: - async with client_session.get(doc_item.url) as response: + async with bot_instance.http_session.get(doc_item.url) as response: soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") self._queue.extend(QueueItem(symbol, soup) for symbol in symbols_to_queue) @@ -202,7 +202,7 @@ class DocCog(commands.Cog): Return True on success; False if fetching failed and was rescheduled. """ self.base_urls[api_package_name] = base_url - package = await fetch_inventory(self.bot.http_session, inventory_url) + package = await fetch_inventory(inventory_url) if not package: delay = 2*60 if inventory_url not in self.scheduled_inventories else 5*60 @@ -210,7 +210,7 @@ class DocCog(commands.Cog): self.inventory_scheduler.schedule_later( delay, api_package_name, - fetch_inventory(self.bot.http_session, inventory_url) + fetch_inventory(inventory_url) ) self.scheduled_inventories.add(api_package_name) return False @@ -302,7 +302,7 @@ class DocCog(commands.Cog): markdown = await doc_cache.get(symbol_info) if markdown is None: log.debug(f"Redis cache miss for symbol `{symbol}`.") - markdown = await self.item_fetcher.get_markdown(self.bot.http_session, symbol_info) + markdown = await self.item_fetcher.get_markdown(symbol_info) if markdown is not None: await doc_cache.set(symbol_info, markdown) else: diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py index 96df08786..0d9bd726a 100644 --- a/bot/exts/info/doc/_inventory_parser.py +++ b/bot/exts/info/doc/_inventory_parser.py @@ -6,6 +6,8 @@ from typing import AsyncIterator, DefaultDict, List, Optional, Tuple import aiohttp +import bot + log = logging.getLogger(__name__) FAILED_REQUEST_ATTEMPTS = 3 @@ -69,10 +71,10 @@ async def _load_v2(stream: aiohttp.StreamReader) -> DefaultDict[str, List[Tuple[ return invdata -async def _fetch_inventory(client_session: aiohttp.ClientSession, url: str) -> DefaultDict[str, List[Tuple[str, str]]]: +async def _fetch_inventory(url: str) -> DefaultDict[str, List[Tuple[str, str]]]: """Fetch, parse and return an intersphinx inventory file from an url.""" timeout = aiohttp.ClientTimeout(sock_connect=5, sock_read=5) - async with client_session.get(url, timeout=timeout, raise_for_status=True) as response: + async with bot.instance.http_session.get(url, timeout=timeout, raise_for_status=True) as response: stream = response.content inventory_header = (await stream.readline()).decode().rstrip() @@ -91,14 +93,11 @@ async def _fetch_inventory(client_session: aiohttp.ClientSession, url: str) -> D raise ValueError(f"Invalid inventory file at url {url}.") -async def fetch_inventory( - client_session: aiohttp.ClientSession, - url: str -) -> Optional[DefaultDict[str, List[Tuple[str, str]]]]: +async def fetch_inventory(url: str) -> Optional[DefaultDict[str, List[Tuple[str, str]]]]: """Get inventory from `url`, retrying `FAILED_REQUEST_ATTEMPTS` times on errors.""" for attempt in range(1, FAILED_REQUEST_ATTEMPTS+1): try: - inventory = await _fetch_inventory(client_session, url) + inventory = await _fetch_inventory(url) except aiohttp.ClientConnectorError: log.warning( f"Failed to connect to inventory url at {url}; " -- cgit v1.2.3 From fdff2491fc48bac0c55e0a506e7f7c395be13c0d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 11 Dec 2020 23:41:38 +0100 Subject: Remove internal CachedParser result cache We no longer need to keep the items around since everything is in redis and the costs of always going through redis is fairly small --- bot/exts/info/doc/_cog.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index e1be956cd..d2bbf8c57 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -80,9 +80,8 @@ class CachedParser: def __init__(self): self._queue: List[QueueItem] = [] - self._results = {} self._page_symbols: Dict[str, List[DocItem]] = defaultdict(list) - self._item_events: Dict[DocItem, asyncio.Event] = {} + self._item_futures: Dict[DocItem, asyncio.Future] = {} self._parse_task = None async def get_markdown(self, doc_item: DocItem) -> str: @@ -107,9 +106,8 @@ class CachedParser: self._parse_task = asyncio.create_task(self._parse_queue()) self._move_to_front(doc_item) - self._item_events[doc_item] = item_event = asyncio.Event() - await item_event.wait() - return self._results[doc_item] + self._item_futures[doc_item] = item_future = asyncio.Future() + return await item_future async def _parse_queue(self) -> None: """ @@ -123,12 +121,11 @@ class CachedParser: try: markdown = get_symbol_markdown(soup, item) await doc_cache.set(item, markdown) - self._results[item] = markdown except Exception: log.exception(f"Unexpected error when handling {item}") else: - if (event := self._item_events.get(item)) is not None: - event.set() + if (future := self._item_futures.get(item)) is not None: + future.set_result(markdown) await asyncio.sleep(0.1) self._parse_task = None @@ -153,15 +150,14 @@ class CachedParser: All currently requested items are waited to be parsed before clearing. """ - for event in self._item_events.values(): - await event.wait() + for future in self._item_futures.values(): + await future if self._parse_task is not None: self._parse_task.cancel() self._parse_task = None self._queue.clear() - self._results.clear() self._page_symbols.clear() - self._item_events.clear() + self._item_futures.clear() class DocCog(commands.Cog): -- cgit v1.2.3 From f6805c397c47d7dbfc2f38998c7de3556de69b42 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 11 Dec 2020 23:42:36 +0100 Subject: Ensure only one future is created for each doc_item Previously in case get_markdown for an item ran twice, the one that ran second would overwrite the future created by the first one, potentially causing the coro to wait for it infinitely as _parse_queue would only be able to set the last future --- bot/exts/info/doc/_cog.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index d2bbf8c57..78d9c6b9b 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -106,8 +106,9 @@ class CachedParser: self._parse_task = asyncio.create_task(self._parse_queue()) self._move_to_front(doc_item) - self._item_futures[doc_item] = item_future = asyncio.Future() - return await item_future + if doc_item not in self._item_futures: + self._item_futures[doc_item] = bot_instance.loop.create_future() + return await self._item_futures[doc_item] async def _parse_queue(self) -> None: """ -- cgit v1.2.3 From 121bdd16e8ee53d83822e9320232a65ea2ab540a Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 11 Dec 2020 23:44:59 +0100 Subject: Move parse_queue cleanup into finally block The finally will make sure we reset the task and log it no matter what happens, additionally the clearing of the variable is now only done in one place as the finally also executes when the coro is cancelled --- bot/exts/info/doc/_cog.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 78d9c6b9b..603d7df97 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -117,20 +117,21 @@ class CachedParser: The coroutine will run as long as the queue is not empty, resetting `self._parse_task` to None when finished. """ log.trace("Starting queue parsing.") - while self._queue: - item, soup = self._queue.pop() - try: - markdown = get_symbol_markdown(soup, item) - await doc_cache.set(item, markdown) - except Exception: - log.exception(f"Unexpected error when handling {item}") - else: - if (future := self._item_futures.get(item)) is not None: - future.set_result(markdown) - await asyncio.sleep(0.1) - - self._parse_task = None - log.trace("Finished parsing queue.") + try: + while self._queue: + item, soup = self._queue.pop() + try: + markdown = get_symbol_markdown(soup, item) + await doc_cache.set(item, markdown) + except Exception: + log.exception(f"Unexpected error when handling {item}") + else: + if (future := self._item_futures.get(item)) is not None: + future.set_result(markdown) + await asyncio.sleep(0.1) + finally: + self._parse_task = None + log.trace("Finished parsing queue.") def _move_to_front(self, item: Union[QueueItem, DocItem]) -> None: """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" @@ -155,7 +156,6 @@ class CachedParser: await future if self._parse_task is not None: self._parse_task.cancel() - self._parse_task = None self._queue.clear() self._page_symbols.clear() self._item_futures.clear() -- cgit v1.2.3 From 97d0625823171a873393c8baf14212104b1ee955 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 11 Dec 2020 23:46:24 +0100 Subject: Provide feedback to user when no cache to clear was found While technically correct, always sending success could be misleading in case of a typo on the package --- bot/exts/info/doc/_cog.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 603d7df97..933f4500e 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -454,5 +454,7 @@ class DocCog(commands.Cog): @commands.has_any_role(*MODERATION_ROLES) async def clear_cache_command(self, ctx: commands.Context, package_name: PackageName) -> None: """Clear the persistent redis cache for `package`.""" - await doc_cache.delete(package_name) - await ctx.send(f"Successfully cleared the cache for `{package_name}`.") + if await doc_cache.delete(package_name): + await ctx.send(f"Successfully cleared the cache for `{package_name}`.") + else: + await ctx.send("No keys matching the package found.") -- cgit v1.2.3 From 30a3ce49fd346e4a2f4b3c9c12806a2aba8e9e16 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 02:37:03 +0100 Subject: Create function for merging function and decorator wrapper globals discord.py uses the globals of functions to resolve forward refs in commands, previously decorators applied before commands broke the bot with forwardrefs to names that weren't in the namespace of the module where they were defined, the new function takes care of merging the globals in a new function to mitigate this issue. closes: #1323 --- bot/decorators.py | 6 ++---- bot/utils/function.py | 27 +++++++++++++++++++++++++++ bot/utils/lock.py | 3 +-- 3 files changed, 30 insertions(+), 6 deletions(-) diff --git a/bot/decorators.py b/bot/decorators.py index 063c8f878..3892e350f 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -71,7 +71,6 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N This decorator must go before (below) the `command` decorator. """ def wrap(func: t.Callable) -> t.Callable: - @wraps(func) async def inner(self: Cog, ctx: Context, *args, **kwargs) -> None: if ctx.channel.id == destination_channel: log.trace(f"Command {ctx.command.name} was invoked in destination_channel, not redirecting") @@ -106,7 +105,7 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N await ctx.message.delete() log.trace("Redirect output: Deleted invocation message") - return inner + return wraps(func)(function.update_wrapper_globals(inner, func)) return wrap @@ -123,7 +122,6 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: This decorator must go before (below) the `command` decorator. """ def decorator(func: t.Callable) -> t.Callable: - @wraps(func) async def wrapper(*args, **kwargs) -> None: log.trace(f"{func.__name__}: respect role hierarchy decorator called") @@ -151,5 +149,5 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: else: log.trace(f"{func.__name__}: {target.top_role=} < {actor.top_role=}; calling func") await func(*args, **kwargs) - return wrapper + return wraps(func)(function.update_wrapper_globals(wrapper, func)) return decorator diff --git a/bot/utils/function.py b/bot/utils/function.py index 3ab32fe3c..8b8c7ba5c 100644 --- a/bot/utils/function.py +++ b/bot/utils/function.py @@ -1,6 +1,7 @@ """Utilities for interaction with functions.""" import inspect +import types import typing as t Argument = t.Union[int, str] @@ -73,3 +74,29 @@ def get_bound_args(func: t.Callable, args: t.Tuple, kwargs: t.Dict[str, t.Any]) bound_args.apply_defaults() return bound_args.arguments + + +def update_wrapper_globals(wrapper: types.FunctionType, func: types.FunctionType) -> types.FunctionType: + """ + Update globals of `wrapper` with the globals from `func`. + + For forwardrefs in command annotations discordpy uses the __global__ attribute of the function + to resolve their values, with decorators that replace the function this breaks because they have + their own globals. + + This function creates a new function functionally identical to `wrapper`, which has the globals replaced with + a merge of `func`s globals and the `wrapper`s globals. + + In case a global name from `func` conflicts with a name from `wrapper`'s globals, `wrapper` will win + to keep it functional, but this may cause problems if the name is used as an annotation and + discord.py uses it as a converter on a parameter from `func`. + """ + new_globals = wrapper.__globals__.copy() + new_globals.update((k, v) for k, v in func.__globals__.items() if k not in wrapper.__code__.co_names) + return types.FunctionType( + code=wrapper.__code__, + globals=new_globals, + name=wrapper.__name__, + argdefs=wrapper.__defaults__, + closure=wrapper.__closure__, + ) diff --git a/bot/utils/lock.py b/bot/utils/lock.py index 7aaafbc88..cf87321c5 100644 --- a/bot/utils/lock.py +++ b/bot/utils/lock.py @@ -61,7 +61,6 @@ def lock(namespace: Hashable, resource_id: ResourceId, *, raise_error: bool = Fa def decorator(func: Callable) -> Callable: name = func.__name__ - @wraps(func) async def wrapper(*args, **kwargs) -> Any: log.trace(f"{name}: mutually exclusive decorator called") @@ -93,7 +92,7 @@ def lock(namespace: Hashable, resource_id: ResourceId, *, raise_error: bool = Fa if raise_error: raise LockedResourceError(str(namespace), id_) - return wrapper + return wraps(func)(function.update_wrapper_globals(wrapper, func)) return decorator -- cgit v1.2.3 From 3cc32ae30a671a31a3f05c2c8a4af44e09095cc8 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 02:37:37 +0100 Subject: Lock inventory refreshes All commands that refresh the inventories in some way are now locked to prevent various race conditions that may have occurred in the unlikely scenario that they got triggered together, the fetching part of the get command now also has to wait for the running inventory refresh to finish before proceeding to fetch and parse the html --- bot/exts/info/doc/_cog.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 933f4500e..11d17222d 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -17,6 +17,7 @@ from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput from bot.converters import InventoryURL, PackageName, ValidURL from bot.pagination import LinePaginator +from bot.utils.lock import lock from bot.utils.messages import wait_for_deletion from bot.utils.scheduling import Scheduler from ._inventory_parser import fetch_inventory @@ -39,6 +40,10 @@ PRIORITY_PACKAGES = ( WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay +REFRESH_EVENT = asyncio.Event() +REFRESH_EVENT.set() +COMMAND_LOCK_SINGLETON = "inventory refresh" + doc_cache = DocRedisCache(namespace="Docs") @@ -91,9 +96,6 @@ class CachedParser: If no symbols were fetched from `doc_item`s page before, the HTML has to be fetched before parsing can be queued. """ - if (symbol := self._results.get(doc_item)) is not None: - return symbol - if (symbols_to_queue := self._page_symbols.get(doc_item.url)) is not None: async with bot_instance.http_session.get(doc_item.url) as response: soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") @@ -176,6 +178,7 @@ class DocCog(commands.Cog): self.bot.loop.create_task(self.init_refresh_inventory()) + @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) async def init_refresh_inventory(self) -> None: """Refresh documentation inventory on cog initialization.""" await self.bot.wait_until_guild_available() @@ -258,6 +261,7 @@ class DocCog(commands.Cog): async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" + REFRESH_EVENT.clear() log.debug("Refreshing documentation inventory...") for inventory in self.scheduled_inventories: self.inventory_scheduler.cancel(inventory) @@ -279,6 +283,7 @@ class DocCog(commands.Cog): ) for package in await self.bot.api_client.get('bot/documentation-links') ] await asyncio.gather(*coros) + REFRESH_EVENT.set() async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: """ @@ -299,6 +304,9 @@ class DocCog(commands.Cog): markdown = await doc_cache.get(symbol_info) if markdown is None: log.debug(f"Redis cache miss for symbol `{symbol}`.") + if not REFRESH_EVENT.is_set(): + log.debug("Waiting for inventories to be refreshed before processing item.") + await REFRESH_EVENT.wait() markdown = await self.item_fetcher.get_markdown(symbol_info) if markdown is not None: await doc_cache.set(symbol_info, markdown) @@ -374,6 +382,7 @@ class DocCog(commands.Cog): @docs_group.command(name='setdoc', aliases=('s',)) @commands.has_any_role(*MODERATION_ROLES) + @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) async def set_command( self, ctx: commands.Context, package_name: PackageName, base_url: ValidURL, inventory_url: InventoryURL @@ -413,6 +422,7 @@ class DocCog(commands.Cog): @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) @commands.has_any_role(*MODERATION_ROLES) + @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) async def delete_command(self, ctx: commands.Context, package_name: PackageName) -> None: """ Removes the specified package from the database. @@ -431,6 +441,7 @@ class DocCog(commands.Cog): @docs_group.command(name="refreshdoc", aliases=("rfsh", "r")) @commands.has_any_role(*MODERATION_ROLES) + @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) async def refresh_command(self, ctx: commands.Context) -> None: """Refresh inventories and show the difference.""" old_inventories = set(self.base_urls) -- cgit v1.2.3 From 9f11b453930b5abbab0b891e8b1ca0a2f9d013d0 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 03:59:40 +0100 Subject: Simplify flow The else is a bit clearer than the early return --- bot/exts/info/doc/_cog.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 11d17222d..5e7399afb 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -417,8 +417,8 @@ class DocCog(commands.Cog): f"Added the package `{package_name}` to the database but failed to fetch inventory; " f"trying again in 2 minutes." ) - return - await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") + else: + await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) @commands.has_any_role(*MODERATION_ROLES) -- cgit v1.2.3 From d21540d56853bc33625b0e1b8e2227294706eedb Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:02:49 +0100 Subject: Clear up grammar Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 5e7399afb..d828e6b4a 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -76,10 +76,10 @@ class QueueItem(NamedTuple): class CachedParser: """ - Get symbol markdown from pages with smarter caching. + Get the symbol Markdown from pages with smarter caching. DocItems are added through the `add_item` method which adds them to the `_page_symbols` dict. - `get_markdown` is used to fetch the markdown; when this is used for the first time on a page, + `get_markdown` is used to fetch the Markdown; when this is used for the first time on a page, all of the symbols are queued to be parsed to avoid multiple web requests to the same page. """ @@ -91,7 +91,7 @@ class CachedParser: async def get_markdown(self, doc_item: DocItem) -> str: """ - Get result markdown of `doc_item`. + Get the result Markdown of `doc_item`. If no symbols were fetched from `doc_item`s page before, the HTML has to be fetched before parsing can be queued. @@ -418,7 +418,7 @@ class DocCog(commands.Cog): f"trying again in 2 minutes." ) else: - await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") + await ctx.send(f"Added the package `{package_name}` to the database and refreshed the inventory.") @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) @commands.has_any_role(*MODERATION_ROLES) -- cgit v1.2.3 From b3f9cc10b7fe50575fee74424ba26636007cbcdc Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:16:19 +0100 Subject: Reuse form body to construct log message Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index d828e6b4a..61f770c0a 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -407,9 +407,7 @@ class DocCog(commands.Cog): log.info( f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n" - f"Package name: {package_name}\n" - f"Base url: {base_url}\n" - f"Inventory URL: {inventory_url}" + + "\n".join(f"{key}: {value}" for key, value in body.items()) ) if await self.update_single(package_name, base_url, inventory_url) is None: -- cgit v1.2.3 From 7aea86dd22572e9685ed8353428f14e90a9db321 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:22:20 +0100 Subject: Make reschedule delays a module constant --- bot/exts/info/doc/_cog.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 61f770c0a..30579894c 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -6,6 +6,7 @@ import re import sys from collections import defaultdict from contextlib import suppress +from types import SimpleNamespace from typing import Dict, List, NamedTuple, Optional, Union import discord @@ -39,6 +40,8 @@ PRIORITY_PACKAGES = ( ) WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay +# Delay to wait before trying to reach a rescheduled inventory again, in minutes +FETCH_RESCHEDULE_DELAY = SimpleNamespace(first=2, repeated=5) REFRESH_EVENT = asyncio.Event() REFRESH_EVENT.set() @@ -197,7 +200,8 @@ class DocCog(commands.Cog): * `inventory_url` is the absolute URL to the intersphinx inventory. If the inventory file is currently unreachable, - the update is rescheduled to execute in 2 minutes on the first attempt, and 5 minutes on subsequent attempts. + the update is rescheduled to execute in FETCH_RESCHEDULE_DELAY.first minutes on the first attempt, + and FETCH_RESCHEDULE_DELAY.repeated minutes on the subsequent attempts. Return True on success; False if fetching failed and was rescheduled. """ @@ -205,7 +209,10 @@ class DocCog(commands.Cog): package = await fetch_inventory(inventory_url) if not package: - delay = 2*60 if inventory_url not in self.scheduled_inventories else 5*60 + if inventory_url not in self.scheduled_inventories: + delay = FETCH_RESCHEDULE_DELAY.first * 60 + else: + delay = FETCH_RESCHEDULE_DELAY.repeated * 60 log.info(f"Failed to fetch inventory; attempting again in {delay//60} minutes.") self.inventory_scheduler.schedule_later( delay, @@ -413,7 +420,7 @@ class DocCog(commands.Cog): if await self.update_single(package_name, base_url, inventory_url) is None: await ctx.send( f"Added the package `{package_name}` to the database but failed to fetch inventory; " - f"trying again in 2 minutes." + f"trying again in {FETCH_RESCHEDULE_DELAY.first} minutes." ) else: await ctx.send(f"Added the package `{package_name}` to the database and refreshed the inventory.") -- cgit v1.2.3 From 73502611d1420a62f1e8c0a6ca51c02dc2c8f896 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:25:26 +0100 Subject: Call command method directly Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 30579894c..4cd28e29a 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -334,7 +334,7 @@ class DocCog(commands.Cog): @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) async def docs_group(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: """Look up documentation for Python symbols.""" - await ctx.invoke(self.get_command, symbol=symbol) + await self.get_command(ctx, symbol=symbol) @docs_group.command(name='getdoc', aliases=('g',)) async def get_command(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: -- cgit v1.2.3 From 677f2ad91dbc16ef3a33c102e4932d99a65437da Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:26:22 +0100 Subject: Change param styling to be consistent with the repo --- bot/exts/info/doc/_cog.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 4cd28e29a..60e86353b 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -187,9 +187,7 @@ class DocCog(commands.Cog): await self.bot.wait_until_guild_available() await self.refresh_inventory() - async def update_single( - self, api_package_name: str, base_url: str, inventory_url: str - ) -> bool: + async def update_single(self, api_package_name: str, base_url: str, inventory_url: str) -> bool: """ Rebuild the inventory for a single package. @@ -391,8 +389,11 @@ class DocCog(commands.Cog): @commands.has_any_role(*MODERATION_ROLES) @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) async def set_command( - self, ctx: commands.Context, package_name: PackageName, - base_url: ValidURL, inventory_url: InventoryURL + self, + ctx: commands.Context, + package_name: PackageName, + base_url: ValidURL, + inventory_url: InventoryURL, ) -> None: """ Adds a new documentation metadata object to the site's database. -- cgit v1.2.3 From f988d3ec07c4ca814fa5ddb47a6e064c4bb32461 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:27:29 +0100 Subject: Use string addition instead of join With only two strings, the addition is a bit clearer than constructing and joining a tuple Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 60e86353b..1b5eaa6d5 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -62,7 +62,7 @@ class DocItem(NamedTuple): @property def url(self) -> str: """Return the absolute url to the symbol.""" - return "".join((self.base_url, self.relative_url_path)) + return self.base_url + self.relative_url_path class QueueItem(NamedTuple): -- cgit v1.2.3 From 9cfdeacb807442c27de08e2b66c49d998dfae5ce Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:29:36 +0100 Subject: Move copyright outside of license text Co-authored-by: MarkKoz --- LICENSE-THIRD-PARTY | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/LICENSE-THIRD-PARTY b/LICENSE-THIRD-PARTY index d454070c2..ab715630d 100644 --- a/LICENSE-THIRD-PARTY +++ b/LICENSE-THIRD-PARTY @@ -37,12 +37,10 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. --------------------------------------------------------------------------------------------------- BSD 2-Clause License Applies to: - - bot/cogs/doc/inventory_parser.py: _load_v1, _load_v2 and ZlibStreamReader.__aiter__. + - Copyright (c) 2007-2020 by the Sphinx team (see AUTHORS file). All rights reserved. + - bot/cogs/doc/inventory_parser.py: _load_v1, _load_v2 and ZlibStreamReader.__aiter__. --------------------------------------------------------------------------------------------------- -Copyright (c) 2007-2020 by the Sphinx team (see AUTHORS file). -All rights reserved. - Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: -- cgit v1.2.3 From f416e42efce74082d155d9159114f698a97305cb Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:34:05 +0100 Subject: Return the sent message This allows the caller to work with the message further --- bot/utils/messages.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/utils/messages.py b/bot/utils/messages.py index 42bde358d..c42e4bacc 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -135,14 +135,14 @@ def sub_clyde(username: Optional[str]) -> Optional[str]: return username # Empty string or None -async def send_denial(ctx: Context, reason: str) -> None: +async def send_denial(ctx: Context, reason: str) -> discord.Message: """Send an embed denying the user with the given reason.""" embed = discord.Embed() embed.colour = discord.Colour.red() embed.title = random.choice(NEGATIVE_REPLIES) embed.description = reason - await ctx.send(embed=embed) + return await ctx.send(embed=embed) def format_user(user: discord.abc.User) -> str: -- cgit v1.2.3 From 9c6f3acac1334e885cc6b9d176a4b816bb68710a Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:35:37 +0100 Subject: Use send_denial util instead of creating embed manually The symbol is also no longer sent back to the user, as it is not necessary and we can skip the cleanup on it --- bot/exts/info/doc/_cog.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 1b5eaa6d5..8c52b04cf 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -19,7 +19,7 @@ from bot.constants import MODERATION_ROLES, RedirectOutput from bot.converters import InventoryURL, PackageName, ValidURL from bot.pagination import LinePaginator from bot.utils.lock import lock -from bot.utils.messages import wait_for_deletion +from bot.utils.messages import send_denial, wait_for_deletion from bot.utils.scheduling import Scheduler from ._inventory_parser import fetch_inventory from ._parsing import get_symbol_markdown @@ -370,12 +370,7 @@ class DocCog(commands.Cog): doc_embed = await self.get_symbol_embed(symbol) if doc_embed is None: - symbol = await discord.ext.commands.clean_content().convert(ctx, symbol) - error_embed = discord.Embed( - description=f"Sorry, I could not find any documentation for `{(symbol)}`.", - colour=discord.Colour.red() - ) - error_message = await ctx.send(embed=error_embed) + error_message = await send_denial(ctx, "No documentation found for the requested symbol.") await wait_for_deletion(error_message, (ctx.author.id,), timeout=NOT_FOUND_DELETE_DELAY) with suppress(discord.NotFound): await ctx.message.delete() -- cgit v1.2.3 From 2a855de33c79bfebee4c85757d26b5463c1fccce Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:42:46 +0100 Subject: Use cancel_all instead of manually calling cancel repeatedly --- bot/exts/info/doc/_cog.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 8c52b04cf..07a287572 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -268,8 +268,7 @@ class DocCog(commands.Cog): """Refresh internal documentation inventory.""" REFRESH_EVENT.clear() log.debug("Refreshing documentation inventory...") - for inventory in self.scheduled_inventories: - self.inventory_scheduler.cancel(inventory) + self.inventory_scheduler.cancel_all() # Clear the old base URLS and doc symbols to ensure # that we start from a fresh local dataset. -- cgit v1.2.3 From fdc24cf48fcd34b14098befc36bb3d4ce768dccd Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 12 Dec 2020 04:44:37 +0100 Subject: Strip whitespace from symbol Markdown before returning it The html we parse frequently ends up with trailing and sometimes leading newlines which get stripped out by discord anyway, we have no reason to keep those around when sending the Markdown over to redis --- bot/exts/info/doc/_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 567786204..521034006 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -345,4 +345,4 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[s else: signature = _get_signatures(symbol_heading) description = _get_dd_description(symbol_heading) - return _create_markdown(signature, description, symbol_data.url).replace('¶', '') + return _create_markdown(signature, description, symbol_data.url).replace('¶', '').strip() -- cgit v1.2.3 From b827d9bc8b66b2b7cc3702056b473ebbaf601031 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 13 Dec 2020 05:48:27 +0100 Subject: Simplify the implementation of the custom strainer The strainer now forces the text attribute to be None, simplifying the check on strings and falls back to the superclass' method on non string elements --- bot/exts/info/doc/_html.py | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py index bc705130d..88fbc8825 100644 --- a/bot/exts/info/doc/_html.py +++ b/bot/exts/info/doc/_html.py @@ -1,7 +1,9 @@ -from collections.abc import Iterable +import logging from typing import List, Union -from bs4.element import NavigableString, PageElement, SoupStrainer, Tag +from bs4.element import PageElement, SoupStrainer + +log = logging.getLogger(__name__) class Strainer(SoupStrainer): @@ -9,25 +11,18 @@ class Strainer(SoupStrainer): def __init__(self, *, include_strings: bool, **kwargs): self.include_strings = include_strings + passed_text = kwargs.pop("text", None) + if passed_text is not None: + log.warning("`text` is not a supported kwarg in the custom strainer.") super().__init__(**kwargs) markup_hint = Union[PageElement, List["markup_hint"]] def search(self, markup: markup_hint) -> Union[PageElement, str]: """Extend default SoupStrainer behaviour to allow matching both `Tag`s` and `NavigableString`s.""" - if isinstance(markup, Iterable) and not isinstance(markup, (Tag, str)): - for element in markup: - if isinstance(element, NavigableString) and self.search(element): - return element - elif isinstance(markup, Tag): - # Also include tags while we're searching for strings and tags. - if self.include_strings or (not self.text or self.name or self.attrs): - return self.search_tag(markup) - - elif isinstance(markup, str): + if isinstance(markup, str): # Let everything through the text filter if we're including strings and tags. - text_filter = None if not self.include_strings else True - if not self.name and not self.attrs and self._matches(markup, text_filter): + if not self.name and not self.attrs and self.include_strings: return markup else: - raise Exception(f"I don't know how to match against a {markup.__class__}") + return super().search(markup) -- cgit v1.2.3 From 73d7d748a550e644980d2604542d279472eb1b0c Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 14 Dec 2020 05:49:58 +0100 Subject: Run html parsing in an executor The parsing may take up to a few hundred ms depending on the amount of work it has to do --- bot/exts/info/doc/_cog.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 07a287572..093e5cdb7 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -6,6 +6,7 @@ import re import sys from collections import defaultdict from contextlib import suppress +from functools import partial from types import SimpleNamespace from typing import Dict, List, NamedTuple, Optional, Union @@ -126,7 +127,10 @@ class CachedParser: while self._queue: item, soup = self._queue.pop() try: - markdown = get_symbol_markdown(soup, item) + markdown = await bot_instance.loop.run_in_executor( + None, + partial(get_symbol_markdown, soup, item), + ) await doc_cache.set(item, markdown) except Exception: log.exception(f"Unexpected error when handling {item}") -- cgit v1.2.3 From a9dfeb195e53aba9b444959da8b16addea3574d2 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 14 Dec 2020 05:50:45 +0100 Subject: Revert "Clear up docstring so it doesn't rely on private attribute" This reverts commit ad90978f --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 093e5cdb7..92190bc55 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -143,7 +143,7 @@ class CachedParser: log.trace("Finished parsing queue.") def _move_to_front(self, item: Union[QueueItem, DocItem]) -> None: - """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" + """Move `item` to the front of the parse queue.""" # The parse queue stores soups along with the doc symbols in QueueItem objects, # in case we're moving a DocItem we have to get the associated QueueItem first and then move it. item_index = self._queue.index(item) -- cgit v1.2.3 From 2da9d443598bcf91c9eb6ab22963806a201fce01 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 14 Dec 2020 05:51:13 +0100 Subject: Clear up docstring so it doesn't rely on private attribute Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 92190bc55..6c51ab738 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -152,7 +152,7 @@ class CachedParser: self._queue.append(queue_item) def add_item(self, doc_item: DocItem) -> None: - """Add a DocItem to `_page_symbols`.""" + """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" self._page_symbols[doc_item.url].append(doc_item) async def clear(self) -> None: -- cgit v1.2.3 From cf00aff24d20a57c2c9178d6d9e30f5d33d9a426 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 15 Dec 2020 00:30:17 +0100 Subject: Create futures for all items in the queue Creating futures for everything and then awaiting at the end takes care of all the potential race conditions that may pop up from items that are parsed and sent to redis while the get_markdown method is in the middle of fetching a page. In case it happens with the implementation we'll just need to move the item to the front and the future will get a result set soon afterwards. --- bot/exts/info/doc/_cog.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 6c51ab738..0d344c363 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -7,6 +7,7 @@ import sys from collections import defaultdict from contextlib import suppress from functools import partial +from operator import attrgetter from types import SimpleNamespace from typing import Dict, List, NamedTuple, Optional, Union @@ -78,6 +79,14 @@ class QueueItem(NamedTuple): return NamedTuple.__eq__(self, other) +class ParseResultFuture(asyncio.Future): + """Future with the user_requested attribute to know which futures need to be waited for before clearing.""" + + def __init__(self): + super().__init__() + self.user_requested = False + + class CachedParser: """ Get the symbol Markdown from pages with smarter caching. @@ -90,7 +99,7 @@ class CachedParser: def __init__(self): self._queue: List[QueueItem] = [] self._page_symbols: Dict[str, List[DocItem]] = defaultdict(list) - self._item_futures: Dict[DocItem, asyncio.Future] = {} + self._item_futures: Dict[DocItem, ParseResultFuture] = {} self._parse_task = None async def get_markdown(self, doc_item: DocItem) -> str: @@ -99,21 +108,25 @@ class CachedParser: If no symbols were fetched from `doc_item`s page before, the HTML has to be fetched before parsing can be queued. + + Not safe to run while `self.clear` is running. """ if (symbols_to_queue := self._page_symbols.get(doc_item.url)) is not None: async with bot_instance.http_session.get(doc_item.url) as response: soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") self._queue.extend(QueueItem(symbol, soup) for symbol in symbols_to_queue) + self._item_futures.update((symbol, ParseResultFuture()) for symbol in symbols_to_queue) del self._page_symbols[doc_item.url] log.debug(f"Added symbols from {doc_item.url} to parse queue.") if self._parse_task is None: self._parse_task = asyncio.create_task(self._parse_queue()) - self._move_to_front(doc_item) - if doc_item not in self._item_futures: - self._item_futures[doc_item] = bot_instance.loop.create_future() + with suppress(ValueError): + # If the item is not in the list then the item is already parsed or is being parsed + self._move_to_front(doc_item) + self._item_futures[doc_item].user_requested = True return await self._item_futures[doc_item] async def _parse_queue(self) -> None: @@ -161,7 +174,7 @@ class CachedParser: All currently requested items are waited to be parsed before clearing. """ - for future in self._item_futures.values(): + for future in filter(attrgetter("user_requested"), self._item_futures.values()): await future if self._parse_task is not None: self._parse_task.cancel() -- cgit v1.2.3 From a430f1aefdb092bc7ca2fd41bff20aedaa949f5e Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 15 Dec 2020 00:35:12 +0100 Subject: Wait for the inventory to be refreshed before attempting any fetching Previously the bot returned an error if a symbol was not found while inventories were refreshing, but we can just wait for the to finish refreshing and then the symbol may be filled in. A logging call to notify of the refresh being done was also added. --- bot/exts/info/doc/_cog.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 0d344c363..a8642be3e 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -304,6 +304,7 @@ class DocCog(commands.Cog): ) for package in await self.bot.api_client.get('bot/documentation-links') ] await asyncio.gather(*coros) + log.debug("Finished inventory refresh.") REFRESH_EVENT.set() async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: @@ -316,6 +317,10 @@ class DocCog(commands.Cog): if not present also create a redis entry for the symbol. """ log.trace(f"Building embed for symbol `{symbol}`") + if not REFRESH_EVENT.is_set(): + log.debug("Waiting for inventories to be refreshed before processing item.") + await REFRESH_EVENT.wait() + symbol_info = self.doc_symbols.get(symbol) if symbol_info is None: log.debug("Symbol does not exist.") @@ -325,9 +330,6 @@ class DocCog(commands.Cog): markdown = await doc_cache.get(symbol_info) if markdown is None: log.debug(f"Redis cache miss for symbol `{symbol}`.") - if not REFRESH_EVENT.is_set(): - log.debug("Waiting for inventories to be refreshed before processing item.") - await REFRESH_EVENT.wait() markdown = await self.item_fetcher.get_markdown(symbol_info) if markdown is not None: await doc_cache.set(symbol_info, markdown) -- cgit v1.2.3 From 7e5fb88a9976570590a4e946722fd60ada1aad95 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 15 Dec 2020 05:00:23 +0100 Subject: Return the fetched inventory in the Inventory converter Instead of fetching it again in the cog, the converter now returns the inventory for later use. The set command now no longer attempts to reschedule the inventory, and a bug that caused the inventory rescheduling to do nothing in `update_single` was fixed after moving it to its own method --- bot/converters.py | 12 +++--- bot/exts/info/doc/_cog.py | 75 ++++++++++++++++++---------------- bot/exts/info/doc/_inventory_parser.py | 9 ++-- 3 files changed, 50 insertions(+), 46 deletions(-) diff --git a/bot/converters.py b/bot/converters.py index d558fa3df..6bbc22c3a 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -176,23 +176,23 @@ class ValidURL(Converter): return url -class InventoryURL(Converter): +class Inventory(Converter): """ Represents an Intersphinx inventory URL. This converter checks whether intersphinx accepts the given inventory URL, and raises - `BadArgument` if that is not the case. + `BadArgument` if that is not the case or if the url is unreachable. - Otherwise, it simply passes through the given URL. + Otherwise, it returns the url and the fetched inventory dict in a tuple. """ @staticmethod - async def convert(ctx: Context, url: str) -> str: + async def convert(ctx: Context, url: str) -> t.Tuple[str, _inventory_parser.INVENTORY_DICT]: """Convert url to Intersphinx inventory URL.""" await ctx.trigger_typing() - if await _inventory_parser.fetch_inventory(url) is None: + if (inventory := await _inventory_parser.fetch_inventory(url)) is None: raise BadArgument(f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS}.") - return url + return url, inventory class Snowflake(IDConverter): diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index a8642be3e..11d1dc9ad 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -18,12 +18,12 @@ from discord.ext import commands from bot import instance as bot_instance from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput -from bot.converters import InventoryURL, PackageName, ValidURL +from bot.converters import Inventory, PackageName, ValidURL from bot.pagination import LinePaginator from bot.utils.lock import lock from bot.utils.messages import send_denial, wait_for_deletion from bot.utils.scheduling import Scheduler -from ._inventory_parser import fetch_inventory +from ._inventory_parser import INVENTORY_DICT, fetch_inventory from ._parsing import get_symbol_markdown from ._redis_cache import DocRedisCache @@ -204,7 +204,7 @@ class DocCog(commands.Cog): await self.bot.wait_until_guild_available() await self.refresh_inventory() - async def update_single(self, api_package_name: str, base_url: str, inventory_url: str) -> bool: + async def update_single(self, api_package_name: str, base_url: str, package: INVENTORY_DICT) -> None: """ Rebuild the inventory for a single package. @@ -213,31 +213,8 @@ class DocCog(commands.Cog): * `base_url` is the root documentation URL for the specified package, used to build absolute paths that link to specific symbols * `inventory_url` is the absolute URL to the intersphinx inventory. - - If the inventory file is currently unreachable, - the update is rescheduled to execute in FETCH_RESCHEDULE_DELAY.first minutes on the first attempt, - and FETCH_RESCHEDULE_DELAY.repeated minutes on the subsequent attempts. - - Return True on success; False if fetching failed and was rescheduled. """ self.base_urls[api_package_name] = base_url - package = await fetch_inventory(inventory_url) - - if not package: - if inventory_url not in self.scheduled_inventories: - delay = FETCH_RESCHEDULE_DELAY.first * 60 - else: - delay = FETCH_RESCHEDULE_DELAY.repeated * 60 - log.info(f"Failed to fetch inventory; attempting again in {delay//60} minutes.") - self.inventory_scheduler.schedule_later( - delay, - api_package_name, - fetch_inventory(inventory_url) - ) - self.scheduled_inventories.add(api_package_name) - return False - - self.scheduled_inventories.discard(api_package_name) for group, items in package.items(): for symbol, relative_doc_url in items: @@ -279,7 +256,37 @@ class DocCog(commands.Cog): self.item_fetcher.add_item(symbol_item) log.trace(f"Fetched inventory for {api_package_name}.") - return True + + async def update_or_reschedule_inventory( + self, + api_package_name: str, + base_url: str, + inventory_url: str + ) -> Optional[INVENTORY_DICT]: + """ + Update the cog's inventory, or reschedule this method to execute again if the remote inventory unreachable. + + The first attempt is rescheduled to execute in `FETCH_RESCHEDULE_DELAY.first` minutes, the subsequent attempts + in `FETCH_RESCHEDULE_DELAY.repeated` minutes. + """ + package = await fetch_inventory(inventory_url) + + if not package: + if inventory_url not in self.scheduled_inventories: + delay = FETCH_RESCHEDULE_DELAY.first + else: + delay = FETCH_RESCHEDULE_DELAY.repeated + log.info(f"Failed to fetch inventory; attempting again in {delay} minutes.") + self.inventory_scheduler.schedule_later( + delay*60, + api_package_name, + self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url) + ) + self.scheduled_inventories.add(api_package_name) + return + + self.scheduled_inventories.discard(api_package_name) + await self.update_single(api_package_name, base_url, package) async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" @@ -299,7 +306,7 @@ class DocCog(commands.Cog): # Run all coroutines concurrently - since each of them performs an HTTP # request, this speeds up fetching the inventory data heavily. coros = [ - self.update_single( + self.update_or_reschedule_inventory( package["package"], package["base_url"], package["inventory_url"] ) for package in await self.bot.api_client.get('bot/documentation-links') ] @@ -406,7 +413,7 @@ class DocCog(commands.Cog): ctx: commands.Context, package_name: PackageName, base_url: ValidURL, - inventory_url: InventoryURL, + inventory: Inventory, ) -> None: """ Adds a new documentation metadata object to the site's database. @@ -419,6 +426,7 @@ class DocCog(commands.Cog): https://docs.python.org/3/ \ https://docs.python.org/3/objects.inv """ + inventory_url, inventory_dict = inventory body = { 'package': package_name, 'base_url': base_url, @@ -431,13 +439,8 @@ class DocCog(commands.Cog): + "\n".join(f"{key}: {value}" for key, value in body.items()) ) - if await self.update_single(package_name, base_url, inventory_url) is None: - await ctx.send( - f"Added the package `{package_name}` to the database but failed to fetch inventory; " - f"trying again in {FETCH_RESCHEDULE_DELAY.first} minutes." - ) - else: - await ctx.send(f"Added the package `{package_name}` to the database and refreshed the inventory.") + await self.update_single(package_name, base_url, inventory_dict) + await ctx.send(f"Added the package `{package_name}` to the database and refreshed the inventory.") @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) @commands.has_any_role(*MODERATION_ROLES) diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py index 0d9bd726a..b38c3b2a8 100644 --- a/bot/exts/info/doc/_inventory_parser.py +++ b/bot/exts/info/doc/_inventory_parser.py @@ -11,6 +11,7 @@ import bot log = logging.getLogger(__name__) FAILED_REQUEST_ATTEMPTS = 3 +INVENTORY_DICT = DefaultDict[str, List[Tuple[str, str]]] _V2_LINE_RE = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+?(\S*)\s+(.*)') @@ -42,7 +43,7 @@ class ZlibStreamReader: pos = buf.find(b'\n') -async def _load_v1(stream: aiohttp.StreamReader) -> DefaultDict[str, List[Tuple[str, str]]]: +async def _load_v1(stream: aiohttp.StreamReader) -> INVENTORY_DICT: invdata = defaultdict(list) async for line in stream: @@ -58,7 +59,7 @@ async def _load_v1(stream: aiohttp.StreamReader) -> DefaultDict[str, List[Tuple[ return invdata -async def _load_v2(stream: aiohttp.StreamReader) -> DefaultDict[str, List[Tuple[str, str]]]: +async def _load_v2(stream: aiohttp.StreamReader) -> INVENTORY_DICT: invdata = defaultdict(list) async for line in ZlibStreamReader(stream): @@ -71,7 +72,7 @@ async def _load_v2(stream: aiohttp.StreamReader) -> DefaultDict[str, List[Tuple[ return invdata -async def _fetch_inventory(url: str) -> DefaultDict[str, List[Tuple[str, str]]]: +async def _fetch_inventory(url: str) -> INVENTORY_DICT: """Fetch, parse and return an intersphinx inventory file from an url.""" timeout = aiohttp.ClientTimeout(sock_connect=5, sock_read=5) async with bot.instance.http_session.get(url, timeout=timeout, raise_for_status=True) as response: @@ -93,7 +94,7 @@ async def _fetch_inventory(url: str) -> DefaultDict[str, List[Tuple[str, str]]]: raise ValueError(f"Invalid inventory file at url {url}.") -async def fetch_inventory(url: str) -> Optional[DefaultDict[str, List[Tuple[str, str]]]]: +async def fetch_inventory(url: str) -> Optional[INVENTORY_DICT]: """Get inventory from `url`, retrying `FAILED_REQUEST_ATTEMPTS` times on errors.""" for attempt in range(1, FAILED_REQUEST_ATTEMPTS+1): try: -- cgit v1.2.3 From 7134c10485d2b4215213c1ffb670fa9a06d5de1e Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 18 Dec 2020 21:41:30 +0100 Subject: Use update_wrapper instead of wraps We're not using it as a decorator so using wraps only complicates the call syntax --- bot/decorators.py | 6 +++--- bot/utils/lock.py | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/bot/decorators.py b/bot/decorators.py index 3892e350f..a37996e80 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -2,7 +2,7 @@ import asyncio import logging import typing as t from contextlib import suppress -from functools import wraps +from functools import update_wrapper from discord import Member, NotFound from discord.ext import commands @@ -105,7 +105,7 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N await ctx.message.delete() log.trace("Redirect output: Deleted invocation message") - return wraps(func)(function.update_wrapper_globals(inner, func)) + return update_wrapper(function.update_wrapper_globals(inner, func), func) return wrap @@ -149,5 +149,5 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: else: log.trace(f"{func.__name__}: {target.top_role=} < {actor.top_role=}; calling func") await func(*args, **kwargs) - return wraps(func)(function.update_wrapper_globals(wrapper, func)) + return update_wrapper(function.update_wrapper_globals(wrapper, func), func) return decorator diff --git a/bot/utils/lock.py b/bot/utils/lock.py index cf87321c5..02188c827 100644 --- a/bot/utils/lock.py +++ b/bot/utils/lock.py @@ -1,7 +1,7 @@ import inspect import logging from collections import defaultdict -from functools import partial, wraps +from functools import partial, update_wrapper from typing import Any, Awaitable, Callable, Hashable, Union from weakref import WeakValueDictionary @@ -91,8 +91,7 @@ def lock(namespace: Hashable, resource_id: ResourceId, *, raise_error: bool = Fa log.info(f"{name}: aborted because resource {namespace!r}:{id_!r} is locked") if raise_error: raise LockedResourceError(str(namespace), id_) - - return wraps(func)(function.update_wrapper_globals(wrapper, func)) + return update_wrapper(function.update_wrapper_globals(wrapper, func), func) return decorator -- cgit v1.2.3 From 003613ff0f89871c8477e996c708873e1387e514 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 6 Jan 2021 06:56:17 +0100 Subject: Add comments to truncation handling code Co-authored-by: MarkKoz --- bot/exts/info/doc/_parsing.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 521034006..f51ab4ea1 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -276,15 +276,21 @@ def _get_truncated_description( if not markdown_element_ends: return "" + # Determine the "hard" truncation index. newline_truncate_index = find_nth_occurrence(result, "\n", max_lines) if newline_truncate_index is not None and newline_truncate_index < _MAX_DESCRIPTION_LENGTH: + # Truncate based on maximum lines if there are more than the maximum number of lines. truncate_index = newline_truncate_index else: + # There are less than the maximum number of lines; truncate based on the max char length. truncate_index = _MAX_DESCRIPTION_LENGTH + # Nothing needs to be truncated if the last element ends before the truncation index. if truncate_index >= markdown_element_ends[-1]: return result + # Determine the actual truncation index. + # Truncate at the last Markdown element that comes before the truncation index. markdown_truncate_index = max(cut for cut in markdown_element_ends if cut < truncate_index) return result[:markdown_truncate_index].strip(_TRUNCATE_STRIP_CHARACTERS) + "..." -- cgit v1.2.3 From fef6c50f0c8a9c54e6e0519c0feae5c8c32152c1 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 6 Jan 2021 06:57:54 +0100 Subject: Remove redundant variable Co-authored-by: MarkKoz --- bot/exts/info/doc/_parsing.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index f51ab4ea1..032fe3404 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -263,9 +263,8 @@ def _get_truncated_description( else: element_markdown = markdown_converter.process_text(element) - element_markdown_length = len(element_markdown) rendered_length += element_length - tag_end_index += element_markdown_length + tag_end_index += len(element_markdown) if not element_markdown.isspace(): markdown_element_ends.append(tag_end_index) -- cgit v1.2.3 From cbd84558ef4e5e89ce032c8b5d47f1bb94b89ba0 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 6 Jan 2021 18:27:10 +0100 Subject: Do not attempt to set cache values for symbols that were not found --- bot/exts/info/doc/_cog.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 11d1dc9ad..df5d417d7 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -144,7 +144,8 @@ class CachedParser: None, partial(get_symbol_markdown, soup, item), ) - await doc_cache.set(item, markdown) + if markdown is not None: + await doc_cache.set(item, markdown) except Exception: log.exception(f"Unexpected error when handling {item}") else: -- cgit v1.2.3 From 3439badedb65f7d37ba9733bc4e8268f2efe316e Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 9 Jan 2021 06:37:31 +0100 Subject: Ensure no symbols get overwritten while generating symbol mappings The code handling this was moved to a function to achieve this cleanly. Includes fixes for bugs where incorrect package was added to the symbol name in the second branch and an incorrect symbol being added in the third branch Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 74 +++++++++++++++++++++++++++++++++++------------ 1 file changed, 55 insertions(+), 19 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index df5d417d7..ed9432ed2 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -222,27 +222,19 @@ class DocCog(commands.Cog): if "/" in symbol: continue # skip unreachable symbols with slashes + # e.g. get 'class' from 'py:class' group_name = group.split(":")[1] - if (original_symbol := self.doc_symbols.get(symbol)) is not None: - if group_name in FORCE_PREFIX_GROUPS: - symbol = f"{group_name}.{symbol}" - self.renamed_symbols.add(symbol) - - elif (original_symbol_group := original_symbol.group) in FORCE_PREFIX_GROUPS: - overridden_symbol = f"{original_symbol_group}.{symbol}" - if overridden_symbol in self.renamed_symbols: - overridden_symbol = f"{api_package_name}.{overridden_symbol}" - - self.doc_symbols[overridden_symbol] = original_symbol - self.renamed_symbols.add(overridden_symbol) - - elif api_package_name in PRIORITY_PACKAGES: - self.doc_symbols[f"{original_symbol.package}.{symbol}"] = original_symbol - self.renamed_symbols.add(symbol) - + while (original_symbol := self.doc_symbols.get(symbol)) is not None: + replaced_symbol_name = self.ensure_unique_symbol_name( + api_package_name, + group_name, + original_symbol, + symbol, + ) + if replaced_symbol_name is None: + break else: - symbol = f"{api_package_name}.{symbol}" - self.renamed_symbols.add(symbol) + symbol = replaced_symbol_name relative_url_path, _, symbol_id = relative_doc_url.partition("#") # Intern fields that have shared content so we're not storing unique strings for every object @@ -289,6 +281,50 @@ class DocCog(commands.Cog): self.scheduled_inventories.discard(api_package_name) await self.update_single(api_package_name, base_url, package) + def ensure_unique_symbol_name( + self, + package_name: str, + group_name: str, + original_item: DocItem, + symbol_name: str + ) -> Optional[str]: + """ + Ensure `symbol_name` doesn't overwrite an another symbol in `doc_symbols`. + + Should only be called with symbol names that already have a conflict in `doc_symbols`. + + If None is returned, space was created for `symbol_name` in `doc_symbols` instead of + the symbol name being changed. + """ + # Certain groups are added as prefixes to disambiguate the symbols. + if group_name in FORCE_PREFIX_GROUPS: + self.renamed_symbols.add(symbol_name) + return f"{group_name}.{symbol_name}" + + # The existing symbol with which the current symbol conflicts should have a group prefix. + # It currently doesn't have the group prefix because it's only added once there's a conflict. + elif (original_symbol_group := original_item.group) in FORCE_PREFIX_GROUPS: + overridden_symbol = f"{original_symbol_group}.{symbol_name}" + if overridden_symbol in self.doc_symbols: + # If there's still a conflict, prefix with package name. + overridden_symbol = f"{original_item.package}.{overridden_symbol}" + + self.doc_symbols[overridden_symbol] = original_item + self.renamed_symbols.add(overridden_symbol) + + elif package_name in PRIORITY_PACKAGES: + overridden_symbol = f"{original_item.package}.{symbol_name}" + if overridden_symbol in self.doc_symbols: + # If there's still a conflict, add the symbol's group in the middle. + overridden_symbol = f"{original_item.package}.{original_item.group}.{symbol_name}" + + self.doc_symbols[overridden_symbol] = original_item + self.renamed_symbols.add(overridden_symbol) + + else: + self.renamed_symbols.add(symbol_name) + return f"{package_name}.{symbol_name}" + async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" REFRESH_EVENT.clear() -- cgit v1.2.3 From fcfb604bc9123254622b763dba46d3f25ed4d93c Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 9 Jan 2021 06:38:43 +0100 Subject: Do not ignore symbols with slashes. In some cases these are actual symbols that we can look up --- bot/exts/info/doc/_cog.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index ed9432ed2..7aa6d0428 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -219,8 +219,6 @@ class DocCog(commands.Cog): for group, items in package.items(): for symbol, relative_doc_url in items: - if "/" in symbol: - continue # skip unreachable symbols with slashes # e.g. get 'class' from 'py:class' group_name = group.split(":")[1] -- cgit v1.2.3 From 33c861b4e1fb88c52585647a958ac27810399704 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 9 Jan 2021 19:27:21 +0100 Subject: Do not add package name to the front of the symbol if it's already there --- bot/exts/info/doc/_cog.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 7aa6d0428..feb08e1cb 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -319,9 +319,18 @@ class DocCog(commands.Cog): self.doc_symbols[overridden_symbol] = original_item self.renamed_symbols.add(overridden_symbol) + # If we can't specially handle the symbol through its group or package, + # fall back to prepending its package name to the front. else: - self.renamed_symbols.add(symbol_name) - return f"{package_name}.{symbol_name}" + if symbol_name.startswith(package_name): + # If the symbol already starts with the package name, insert the group name after it. + split_symbol_name = symbol_name.split(".", maxsplit=1) + split_symbol_name.insert(1, group_name) + overridden_symbol = ".".join(split_symbol_name) + else: + overridden_symbol = f"{package_name}.{symbol_name}" + self.renamed_symbols.add(overridden_symbol) + return overridden_symbol async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" -- cgit v1.2.3 From 70609baca94dc7c7ad7598f707ac479efe348e88 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 9 Jan 2021 21:48:51 +0100 Subject: Periodically clear unnecessary futures from the _item_futures dict The code has no way of reaching futures through new requests after their result has been set as that also includes setting its value in redis. --- bot/exts/info/doc/_cog.py | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index feb08e1cb..364d99182 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -4,6 +4,7 @@ import asyncio import logging import re import sys +import time from collections import defaultdict from contextlib import suppress from functools import partial @@ -80,11 +81,25 @@ class QueueItem(NamedTuple): class ParseResultFuture(asyncio.Future): - """Future with the user_requested attribute to know which futures need to be waited for before clearing.""" + """ + Future with metadata for the parser class. + + `user_requested` is set by the parser when a Future is requested by an user and moved to the front, + allowing the futures to only be waited for when clearing if they were user requested. + + `result_set_time` provides the time at which the future's result has been set, + or -inf if the result hasn't been set yet + """ def __init__(self): super().__init__() self.user_requested = False + self.result_set_time = float("inf") + + def set_result(self, result: str, /) -> None: + """Set `self.result_set_time` to current time when the result is set.""" + self.result_set_time = time.time() + super().set_result(result) class CachedParser: @@ -102,6 +117,8 @@ class CachedParser: self._item_futures: Dict[DocItem, ParseResultFuture] = {} self._parse_task = None + self.cleanup_futures_task = bot_instance.loop.create_task(self._cleanup_futures()) + async def get_markdown(self, doc_item: DocItem) -> str: """ Get the result Markdown of `doc_item`. @@ -183,6 +200,21 @@ class CachedParser: self._page_symbols.clear() self._item_futures.clear() + async def _cleanup_futures(self) -> None: + """ + Clear old futures from internal results. + + After a future is set, we only need to wait for old requests to its associated DocItem to finish + as all new requests will get the value from the redis cache in the cog first. + Keeping them around for longer than a second is unnecessary and keeps the parsed Markdown strings alive. + """ + while True: + current_time = time.time() + for key, future in self._item_futures.copy().items(): + if current_time - future.result_set_time > 5: + del self._item_futures[key] + await asyncio.sleep(5) + class DocCog(commands.Cog): """A set of commands for querying & displaying documentation.""" -- cgit v1.2.3 From 5ad2afbc0160a7d9b0ab9c50b73044e7169db7cb Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 9 Jan 2021 21:59:03 +0100 Subject: Stop scheduled and long running tasks on cog unload --- bot/exts/info/doc/_cog.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 364d99182..61ac35b6f 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -567,3 +567,9 @@ class DocCog(commands.Cog): await ctx.send(f"Successfully cleared the cache for `{package_name}`.") else: await ctx.send("No keys matching the package found.") + + def cog_unload(self) -> None: + """Clear scheduled inventories, queued symbols and cleanup task on cog unload.""" + self.inventory_scheduler.cancel_all() + self.item_fetcher.cleanup_futures_task.cancel() + asyncio.create_task(self.item_fetcher.clear()) -- cgit v1.2.3 From 50bb3439824277991124b888d0b46c5936c2efce Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 00:11:16 +0100 Subject: Handle equal DocItems in the queue This could be handled by using sets to hold the items in _page_symbols, but ultimately the check has a much smaller cost than having thousands of sets for the urls. Because we create futures for every item that ends up in the queue we can also skip the .get is None check and instead fetch the future directly from the dict --- bot/exts/info/doc/_cog.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 61ac35b6f..cee482c30 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -157,6 +157,11 @@ class CachedParser: while self._queue: item, soup = self._queue.pop() try: + if (future := self._item_futures[item]).done(): + # Some items are present in the inventories multiple times under different symbols, + # if we already parsed an equal item, we can just skip it. + continue + markdown = await bot_instance.loop.run_in_executor( None, partial(get_symbol_markdown, soup, item), @@ -166,8 +171,7 @@ class CachedParser: except Exception: log.exception(f"Unexpected error when handling {item}") else: - if (future := self._item_futures.get(item)) is not None: - future.set_result(markdown) + future.set_result(markdown) await asyncio.sleep(0.1) finally: self._parse_task = None -- cgit v1.2.3 From 298ad2f8e8f31d9f06a9e01a91a4d08f5b5d6347 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 01:48:26 +0100 Subject: Refresh inventories when the redis cache is cleared Because the futures are cleaned up and Markdown only exists in the cache after a short time, items that were requested previously and had the cache cleared would be missing from the CachedParser --- bot/exts/info/doc/_cog.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index cee482c30..a78916d4a 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -565,9 +565,11 @@ class DocCog(commands.Cog): @docs_group.command(name="cleardoccache") @commands.has_any_role(*MODERATION_ROLES) + @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) async def clear_cache_command(self, ctx: commands.Context, package_name: PackageName) -> None: """Clear the persistent redis cache for `package`.""" if await doc_cache.delete(package_name): + await self.refresh_inventory() await ctx.send(f"Successfully cleared the cache for `{package_name}`.") else: await ctx.send("No keys matching the package found.") -- cgit v1.2.3 From 383e4e993c1bc9d31562748cc55ab4c468bcdd8d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 03:25:50 +0100 Subject: Set exception on future Without the exception set, to the user the bot would fail silently if an exception was handled here --- bot/exts/info/doc/_cog.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index a78916d4a..3f7604072 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -168,8 +168,9 @@ class CachedParser: ) if markdown is not None: await doc_cache.set(item, markdown) - except Exception: + except Exception as e: log.exception(f"Unexpected error when handling {item}") + future.set_exception(e) else: future.set_result(markdown) await asyncio.sleep(0.1) -- cgit v1.2.3 From 5df60dd2ad10aec1c0368ed357562338e89a1250 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 03:32:54 +0100 Subject: Bump markdownify to 0.6.1-0.6.* The 0.6 release brought a new parameter that has to be included in all tag handling methods --- Pipfile | 2 +- bot/exts/info/doc/_markdown.py | 21 +++++++++++++-------- bot/exts/info/doc/_parsing.py | 2 +- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/Pipfile b/Pipfile index 4ca651c92..a92f64f59 100644 --- a/Pipfile +++ b/Pipfile @@ -18,7 +18,7 @@ deepdiff = "~=4.0" feedparser = "~=5.2" fuzzywuzzy = "~=0.17" lxml = "~=4.4" -markdownify = "~=0.4" +markdownify = "~=0.6.1" more_itertools = "~=8.2" python-dateutil = "~=2.8" pyyaml = "~=5.1" diff --git a/bot/exts/info/doc/_markdown.py b/bot/exts/info/doc/_markdown.py index ba35a84c4..1b7d8232b 100644 --- a/bot/exts/info/doc/_markdown.py +++ b/bot/exts/info/doc/_markdown.py @@ -11,7 +11,7 @@ class DocMarkdownConverter(MarkdownConverter): super().__init__(**options) self.page_url = page_url - def convert_li(self, el: PageElement, text: str) -> str: + def convert_li(self, el: PageElement, text: str, convert_as_inline: bool) -> str: """Fix markdownify's erroneous indexing in ol tags.""" parent = el.parent if parent is not None and parent.name == "ol": @@ -27,27 +27,32 @@ class DocMarkdownConverter(MarkdownConverter): bullet = bullets[depth % len(bullets)] return f"{bullet} {text}\n" - def convert_hn(self, _n: int, el: PageElement, text: str) -> str: + def convert_hn(self, _n: int, el: PageElement, text: str, convert_as_inline: bool) -> str: """Convert h tags to bold text with ** instead of adding #.""" + if convert_as_inline: + return text return f"**{text}**\n\n" - def convert_code(self, el: PageElement, text: str) -> str: + def convert_code(self, el: PageElement, text: str, convert_as_inline: bool) -> str: """Undo `markdownify`s underscore escaping.""" return f"`{text}`".replace("\\", "") - def convert_pre(self, el: PageElement, text: str) -> str: + def convert_pre(self, el: PageElement, text: str, convert_as_inline: bool) -> str: """Wrap any codeblocks in `py` for syntax highlighting.""" code = "".join(el.strings) return f"```py\n{code}```" - def convert_a(self, el: PageElement, text: str) -> str: + def convert_a(self, el: PageElement, text: str, convert_as_inline: bool) -> str: """Resolve relative URLs to `self.page_url`.""" el["href"] = urljoin(self.page_url, el["href"]) - return super().convert_a(el, text) + return super().convert_a(el, text, convert_as_inline) - def convert_p(self, el: PageElement, text: str) -> str: + def convert_p(self, el: PageElement, text: str, convert_as_inline: bool) -> str: """Include only one newline instead of two when the parent is a li tag.""" + if convert_as_inline: + return text + parent = el.parent if parent is not None and parent.name == "li": return f"{text}\n" - return super().convert_p(el, text) + return super().convert_p(el, text, convert_as_inline) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 032fe3404..46ae33b92 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -259,7 +259,7 @@ def _get_truncated_description( if rendered_length + element_length < max_length: if is_tag: - element_markdown = markdown_converter.process_tag(element) + element_markdown = markdown_converter.process_tag(element, convert_as_inline=False) else: element_markdown = markdown_converter.process_text(element) -- cgit v1.2.3 From 58154398d0ed905e0418451cfa7d3e8b66508bc6 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 03:39:06 +0100 Subject: Expand docstring --- bot/exts/info/doc/_inventory_parser.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py index b38c3b2a8..886708867 100644 --- a/bot/exts/info/doc/_inventory_parser.py +++ b/bot/exts/info/doc/_inventory_parser.py @@ -95,7 +95,12 @@ async def _fetch_inventory(url: str) -> INVENTORY_DICT: async def fetch_inventory(url: str) -> Optional[INVENTORY_DICT]: - """Get inventory from `url`, retrying `FAILED_REQUEST_ATTEMPTS` times on errors.""" + """ + Get an inventory dict from `url`, retrying `FAILED_REQUEST_ATTEMPTS` times on errors. + + `url` should point at a valid sphinx objects.inv inventory file, which will be parsed into the + inventory dict in the format of {"domain:role": [("symbol_name", "relative_url_to_symbol"), ...], ...} + """ for attempt in range(1, FAILED_REQUEST_ATTEMPTS+1): try: inventory = await _fetch_inventory(url) -- cgit v1.2.3 From 695044167756eb2b6b4d953ef17f0359ba688246 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 03:58:43 +0100 Subject: Move functions strictly related to parsing html to the _html module Some constants need to be shared between html and parsing, because they may also be wanted to be edited by the cog user to change the behaviour, they were moved into the package's init. --- bot/exts/info/doc/__init__.py | 5 ++ bot/exts/info/doc/_cog.py | 4 +- bot/exts/info/doc/_html.py | 112 ++++++++++++++++++++++++++++++++++++- bot/exts/info/doc/_parsing.py | 125 ++++-------------------------------------- 4 files changed, 126 insertions(+), 120 deletions(-) diff --git a/bot/exts/info/doc/__init__.py b/bot/exts/info/doc/__init__.py index e9eb9428c..af0bbff2d 100644 --- a/bot/exts/info/doc/__init__.py +++ b/bot/exts/info/doc/__init__.py @@ -1,6 +1,11 @@ from bot.bot import Bot from ._cog import DocCog +MAX_SIGNATURE_AMOUNT = 3 +PRIORITY_PACKAGES = ( + "python", +) + def setup(bot: Bot) -> None: """Load the Doc cog.""" diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 3f7604072..fd211d9f1 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -24,6 +24,7 @@ from bot.pagination import LinePaginator from bot.utils.lock import lock from bot.utils.messages import send_denial, wait_for_deletion from bot.utils.scheduling import Scheduler +from . import PRIORITY_PACKAGES from ._inventory_parser import INVENTORY_DICT, fetch_inventory from ._parsing import get_symbol_markdown from ._redis_cache import DocRedisCache @@ -38,9 +39,6 @@ FORCE_PREFIX_GROUPS = ( "pdbcommand", "term", ) -PRIORITY_PACKAGES = ( - "python", -) WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay # Delay to wait before trying to reach a rescheduled inventory again, in minutes diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py index 88fbc8825..f9fe542ce 100644 --- a/bot/exts/info/doc/_html.py +++ b/bot/exts/info/doc/_html.py @@ -1,10 +1,27 @@ import logging -from typing import List, Union +import re +from functools import partial +from typing import Callable, Container, Iterable, List, Union -from bs4.element import PageElement, SoupStrainer +from bs4 import BeautifulSoup +from bs4.element import NavigableString, PageElement, SoupStrainer, Tag + +from . import MAX_SIGNATURE_AMOUNT log = logging.getLogger(__name__) +_UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") +_SEARCH_END_TAG_ATTRS = ( + "data", + "function", + "class", + "exception", + "seealso", + "section", + "rubric", + "sphinxsidebar", +) + class Strainer(SoupStrainer): """Subclass of SoupStrainer to allow matching of both `Tag`s and `NavigableString`s.""" @@ -26,3 +43,94 @@ class Strainer(SoupStrainer): return markup else: return super().search(markup) + + +def _find_elements_until_tag( + start_element: PageElement, + end_tag_filter: Union[Container[str], Callable[[Tag], bool]], + *, + func: Callable, + include_strings: bool = False, + limit: int = None, +) -> List[Union[Tag, NavigableString]]: + """ + Get all elements up to `limit` or until a tag matching `tag_filter` is found. + + `end_tag_filter` can be either a container of string names to check against, + or a filtering callable that's applied to tags. + + When `include_strings` is True, `NavigableString`s from the document will be included in the result along `Tag`s. + + `func` takes in a BeautifulSoup unbound method for finding multiple elements, such as `BeautifulSoup.find_all`. + The method is then iterated over and all elements until the matching tag or the limit are added to the return list. + """ + use_container_filter = not callable(end_tag_filter) + elements = [] + + for element in func(start_element, name=Strainer(include_strings=include_strings), limit=limit): + if isinstance(element, Tag): + if use_container_filter: + if element.name in end_tag_filter: + break + elif end_tag_filter(element): + break + elements.append(element) + + return elements + + +_find_next_children_until_tag = partial(_find_elements_until_tag, func=partial(BeautifulSoup.find_all, recursive=False)) +_find_recursive_children_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_all) +_find_next_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_next_siblings) +_find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) + + +def _class_filter_factory(class_names: Iterable[str]) -> Callable[[Tag], bool]: + """Create callable that returns True when the passed in tag's class is in `class_names` or when it's is a table.""" + def match_tag(tag: Tag) -> bool: + for attr in class_names: + if attr in tag.get("class", ()): + return True + return tag.name == "table" + + return match_tag + + +def get_general_description(start_element: Tag) -> List[Union[Tag, NavigableString]]: + """ + Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. + + A headerlink a tag is attempted to be found to skip repeating the symbol information in the description, + if it's found it's used as the tag to start the search from instead of the `start_element`. + """ + child_tags = _find_recursive_children_until_tag(start_element, _class_filter_factory(["section"]), limit=100) + header = next(filter(_class_filter_factory(["headerlink"]), child_tags), None) + start_tag = header.parent if header is not None else start_element + return _find_next_siblings_until_tag(start_tag, _class_filter_factory(_SEARCH_END_TAG_ATTRS), include_strings=True) + + +def get_dd_description(symbol: PageElement) -> List[Union[Tag, NavigableString]]: + """Get the contents of the next dd tag, up to a dt or a dl tag.""" + description_tag = symbol.find_next("dd") + return _find_next_children_until_tag(description_tag, ("dt", "dl"), include_strings=True) + + +def get_signatures(start_signature: PageElement) -> List[str]: + """ + Collect up to `_MAX_SIGNATURE_AMOUNT` signatures from dt tags around the `start_signature` dt tag. + + First the signatures under the `start_signature` are included; + if less than 2 are found, tags above the start signature are added to the result if any are present. + """ + signatures = [] + for element in ( + *reversed(_find_previous_siblings_until_tag(start_signature, ("dd",), limit=2)), + start_signature, + *_find_next_siblings_until_tag(start_signature, ("dd",), limit=2), + )[-MAX_SIGNATURE_AMOUNT:]: + signature = _UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) + + if signature: + signatures.append(signature) + + return signatures diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 46ae33b92..d68f7c8d7 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -5,37 +5,23 @@ import re import string import textwrap from collections import namedtuple -from functools import partial -from typing import Callable, Collection, Container, Iterable, Iterator, List, Optional, TYPE_CHECKING, Union +from typing import Collection, Iterable, Iterator, List, Optional, TYPE_CHECKING, Union from bs4 import BeautifulSoup -from bs4.element import NavigableString, PageElement, Tag +from bs4.element import NavigableString, Tag from bot.utils.helpers import find_nth_occurrence -from ._html import Strainer +from . import MAX_SIGNATURE_AMOUNT +from ._html import get_dd_description, get_general_description, get_signatures from ._markdown import DocMarkdownConverter if TYPE_CHECKING: from ._cog import DocItem log = logging.getLogger(__name__) -_MAX_SIGNATURE_AMOUNT = 3 - -_UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") _WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") _PARAMETERS_RE = re.compile(r"\((.+)\)") -_SEARCH_END_TAG_ATTRS = ( - "data", - "function", - "class", - "exception", - "seealso", - "section", - "rubric", - "sphinxsidebar", -) - _NO_SIGNATURE_GROUPS = { "attribute", "envvar", @@ -46,7 +32,7 @@ _NO_SIGNATURE_GROUPS = { } _EMBED_CODE_BLOCK_LINE_LENGTH = 61 # _MAX_SIGNATURE_AMOUNT code block wrapped lines with py syntax highlight -_MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LINE_LENGTH + 8) * _MAX_SIGNATURE_AMOUNT +_MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LINE_LENGTH + 8) * MAX_SIGNATURE_AMOUNT # Maximum discord message length - signatures on top _MAX_DESCRIPTION_LENGTH = 2000 - _MAX_SIGNATURES_LENGTH _TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace @@ -118,86 +104,6 @@ def _split_parameters(parameters_string: str) -> Iterator[str]: yield parameters_string[last_split:] -def _find_elements_until_tag( - start_element: PageElement, - end_tag_filter: Union[Container[str], Callable[[Tag], bool]], - *, - func: Callable, - include_strings: bool = False, - limit: int = None, -) -> List[Union[Tag, NavigableString]]: - """ - Get all elements up to `limit` or until a tag matching `tag_filter` is found. - - `end_tag_filter` can be either a container of string names to check against, - or a filtering callable that's applied to tags. - - When `include_strings` is True, `NavigableString`s from the document will be included in the result along `Tag`s. - - `func` takes in a BeautifulSoup unbound method for finding multiple elements, such as `BeautifulSoup.find_all`. - The method is then iterated over and all elements until the matching tag or the limit are added to the return list. - """ - use_container_filter = not callable(end_tag_filter) - elements = [] - - for element in func(start_element, name=Strainer(include_strings=include_strings), limit=limit): - if isinstance(element, Tag): - if use_container_filter: - if element.name in end_tag_filter: - break - elif end_tag_filter(element): - break - elements.append(element) - - return elements - - -_find_next_children_until_tag = partial(_find_elements_until_tag, func=partial(BeautifulSoup.find_all, recursive=False)) -_find_recursive_children_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_all) -_find_next_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_next_siblings) -_find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) - - -def _get_general_description(start_element: Tag) -> List[Union[Tag, NavigableString]]: - """ - Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. - - A headerlink a tag is attempted to be found to skip repeating the symbol information in the description, - if it's found it's used as the tag to start the search from instead of the `start_element`. - """ - child_tags = _find_recursive_children_until_tag(start_element, _class_filter_factory(["section"]), limit=100) - header = next(filter(_class_filter_factory(["headerlink"]), child_tags), None) - start_tag = header.parent if header is not None else start_element - return _find_next_siblings_until_tag(start_tag, _class_filter_factory(_SEARCH_END_TAG_ATTRS), include_strings=True) - - -def _get_dd_description(symbol: PageElement) -> List[Union[Tag, NavigableString]]: - """Get the contents of the next dd tag, up to a dt or a dl tag.""" - description_tag = symbol.find_next("dd") - return _find_next_children_until_tag(description_tag, ("dt", "dl"), include_strings=True) - - -def _get_signatures(start_signature: PageElement) -> List[str]: - """ - Collect up to `_MAX_SIGNATURE_AMOUNT` signatures from dt tags around the `start_signature` dt tag. - - First the signatures under the `start_signature` are included; - if less than 2 are found, tags above the start signature are added to the result if any are present. - """ - signatures = [] - for element in ( - *reversed(_find_previous_siblings_until_tag(start_signature, ("dd",), limit=2)), - start_signature, - *_find_next_siblings_until_tag(start_signature, ("dd",), limit=2), - )[-(_MAX_SIGNATURE_AMOUNT):]: - signature = _UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) - - if signature: - signatures.append(signature) - - return signatures - - def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collection[str]]: """ Truncate passed signatures to not exceed `_MAX_SIGNAUTRES_LENGTH`. @@ -210,7 +116,7 @@ def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collec if not sum(len(signature) for signature in signatures) > _MAX_SIGNATURES_LENGTH: return signatures - max_signature_length = _EMBED_CODE_BLOCK_LINE_LENGTH * (_MAX_SIGNATURE_AMOUNT + 1 - len(signatures)) + max_signature_length = _EMBED_CODE_BLOCK_LINE_LENGTH * (MAX_SIGNATURE_AMOUNT + 1 - len(signatures)) formatted_signatures = [] for signature in signatures: signature = signature.strip() @@ -317,17 +223,6 @@ def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag] return formatted_markdown -def _class_filter_factory(class_names: Iterable[str]) -> Callable[[Tag], bool]: - """Create callable that returns True when the passed in tag's class is in `class_names` or when it's is a table.""" - def match_tag(tag: Tag) -> bool: - for attr in class_names: - if attr in tag.get("class", ()): - return True - return tag.name == "table" - - return match_tag - - def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[str]: """ Return parsed markdown of the passed symbol using the passed in soup, truncated to 1000 characters. @@ -342,12 +237,12 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[s # Modules, doc pages and labels don't point to description list tags but to tags like divs, # no special parsing can be done so we only try to include what's under them. if symbol_data.group in {"module", "doc", "label"} or symbol_heading.name != "dt": - description = _get_general_description(symbol_heading) + description = get_general_description(symbol_heading) elif symbol_data.group in _NO_SIGNATURE_GROUPS: - description = _get_dd_description(symbol_heading) + description = get_dd_description(symbol_heading) else: - signature = _get_signatures(symbol_heading) - description = _get_dd_description(symbol_heading) + signature = get_signatures(symbol_heading) + description = get_dd_description(symbol_heading) return _create_markdown(signature, description, symbol_data.url).replace('¶', '').strip() -- cgit v1.2.3 From 22520b9b37e161437a376a6067955e0c9b91cc76 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 04:01:34 +0100 Subject: Defer import to avoid circular imports --- bot/exts/info/doc/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/__init__.py b/bot/exts/info/doc/__init__.py index af0bbff2d..dff7a0269 100644 --- a/bot/exts/info/doc/__init__.py +++ b/bot/exts/info/doc/__init__.py @@ -1,5 +1,4 @@ from bot.bot import Bot -from ._cog import DocCog MAX_SIGNATURE_AMOUNT = 3 PRIORITY_PACKAGES = ( @@ -9,4 +8,5 @@ PRIORITY_PACKAGES = ( def setup(bot: Bot) -> None: """Load the Doc cog.""" + from ._cog import DocCog bot.add_cog(DocCog(bot)) -- cgit v1.2.3 From 33b408d9e2cc805e2cfc6851225929c50725ea80 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 06:15:27 +0100 Subject: Rename CachedParser to BatchParser and move it to its own module --- bot/exts/info/doc/__init__.py | 3 + bot/exts/info/doc/_batch_parser.py | 173 +++++++++++++++++++++++++++++++++++++ bot/exts/info/doc/_cog.py | 170 +----------------------------------- 3 files changed, 180 insertions(+), 166 deletions(-) create mode 100644 bot/exts/info/doc/_batch_parser.py diff --git a/bot/exts/info/doc/__init__.py b/bot/exts/info/doc/__init__.py index dff7a0269..2bb43a950 100644 --- a/bot/exts/info/doc/__init__.py +++ b/bot/exts/info/doc/__init__.py @@ -1,10 +1,13 @@ from bot.bot import Bot +from ._redis_cache import DocRedisCache MAX_SIGNATURE_AMOUNT = 3 PRIORITY_PACKAGES = ( "python", ) +doc_cache = DocRedisCache(namespace="Docs") + def setup(bot: Bot) -> None: """Load the Doc cog.""" diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py new file mode 100644 index 000000000..edd6bb090 --- /dev/null +++ b/bot/exts/info/doc/_batch_parser.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +import asyncio +import logging +import time +from collections import defaultdict +from contextlib import suppress +from functools import partial +from operator import attrgetter +from typing import Dict, List, NamedTuple, TYPE_CHECKING, Union + +from bs4 import BeautifulSoup + +import bot +from . import doc_cache +from ._parsing import get_symbol_markdown +if TYPE_CHECKING: + from ._cog import DocItem + +log = logging.getLogger(__name__) + + +class QueueItem(NamedTuple): + """Contains a symbol and the BeautifulSoup object needed to parse it.""" + + symbol: DocItem + soup: BeautifulSoup + + def __eq__(self, other: Union[QueueItem, DocItem]): + if isinstance(other, type(self.symbol)): + return self.symbol == other + return NamedTuple.__eq__(self, other) + + +class ParseResultFuture(asyncio.Future): + """ + Future with metadata for the parser class. + + `user_requested` is set by the parser when a Future is requested by an user and moved to the front, + allowing the futures to only be waited for when clearing if they were user requested. + + `result_set_time` provides the time at which the future's result has been set, + or -inf if the result hasn't been set yet + """ + + def __init__(self): + super().__init__() + self.user_requested = False + self.result_set_time = float("inf") + + def set_result(self, result: str, /) -> None: + """Set `self.result_set_time` to current time when the result is set.""" + self.result_set_time = time.time() + super().set_result(result) + + +class BatchParser: + """ + Get the Markdown of all symbols on a page and send them to redis when a symbol is requested. + + DocItems are added through the `add_item` method which adds them to the `_page_symbols` dict. + `get_markdown` is used to fetch the Markdown; when this is used for the first time on a page, + all of the symbols are queued to be parsed to avoid multiple web requests to the same page. + """ + + def __init__(self): + self._queue: List[QueueItem] = [] + self._page_symbols: Dict[str, List[DocItem]] = defaultdict(list) + self._item_futures: Dict[DocItem, ParseResultFuture] = {} + self._parse_task = None + + self.cleanup_futures_task = bot.instance.loop.create_task(self._cleanup_futures()) + + async def get_markdown(self, doc_item: DocItem) -> str: + """ + Get the result Markdown of `doc_item`. + + If no symbols were fetched from `doc_item`s page before, + the HTML has to be fetched and then all items from the page are put into the parse queue. + + Not safe to run while `self.clear` is running. + """ + if (symbols_to_queue := self._page_symbols.get(doc_item.url)) is not None: + async with bot.instance.http_session.get(doc_item.url) as response: + soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") + + self._queue.extend(QueueItem(symbol, soup) for symbol in symbols_to_queue) + self._item_futures.update((symbol, ParseResultFuture()) for symbol in symbols_to_queue) + del self._page_symbols[doc_item.url] + log.debug(f"Added symbols from {doc_item.url} to parse queue.") + + if self._parse_task is None: + self._parse_task = asyncio.create_task(self._parse_queue()) + + with suppress(ValueError): + # If the item is not in the list then the item is already parsed or is being parsed + self._move_to_front(doc_item) + self._item_futures[doc_item].user_requested = True + return await self._item_futures[doc_item] + + async def _parse_queue(self) -> None: + """ + Parse all item from the queue, setting their result markdown on the futures and sending them to redis. + + The coroutine will run as long as the queue is not empty, resetting `self._parse_task` to None when finished. + """ + log.trace("Starting queue parsing.") + try: + while self._queue: + item, soup = self._queue.pop() + try: + if (future := self._item_futures[item]).done(): + # Some items are present in the inventories multiple times under different symbols, + # if we already parsed an equal item, we can just skip it. + continue + + markdown = await bot.instance.loop.run_in_executor( + None, + partial(get_symbol_markdown, soup, item), + ) + if markdown is not None: + await doc_cache.set(item, markdown) + except Exception as e: + log.exception(f"Unexpected error when handling {item}") + future.set_exception(e) + else: + future.set_result(markdown) + await asyncio.sleep(0.1) + finally: + self._parse_task = None + log.trace("Finished parsing queue.") + + def _move_to_front(self, item: Union[QueueItem, DocItem]) -> None: + """Move `item` to the front of the parse queue.""" + # The parse queue stores soups along with the doc symbols in QueueItem objects, + # in case we're moving a DocItem we have to get the associated QueueItem first and then move it. + item_index = self._queue.index(item) + queue_item = self._queue.pop(item_index) + + self._queue.append(queue_item) + + def add_item(self, doc_item: DocItem) -> None: + """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" + self._page_symbols[doc_item.url].append(doc_item) + + async def clear(self) -> None: + """ + Clear all internal symbol data. + + All currently requested items are waited to be parsed before clearing. + """ + for future in filter(attrgetter("user_requested"), self._item_futures.values()): + await future + if self._parse_task is not None: + self._parse_task.cancel() + self._queue.clear() + self._page_symbols.clear() + self._item_futures.clear() + + async def _cleanup_futures(self) -> None: + """ + Clear old futures from internal results. + + After a future is set, we only need to wait for old requests to its associated `DocItem` to finish + as all new requests will get the value from the redis cache in the cog first. + Keeping them around for longer than a second is unnecessary and keeps the parsed Markdown strings alive. + """ + while True: + current_time = time.time() + for key, future in self._item_futures.copy().items(): + if current_time - future.result_set_time > 5: + del self._item_futures[key] + await asyncio.sleep(5) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index fd211d9f1..7a943f1a4 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -4,19 +4,13 @@ import asyncio import logging import re import sys -import time -from collections import defaultdict from contextlib import suppress -from functools import partial -from operator import attrgetter from types import SimpleNamespace -from typing import Dict, List, NamedTuple, Optional, Union +from typing import Dict, NamedTuple, Optional import discord -from bs4 import BeautifulSoup from discord.ext import commands -from bot import instance as bot_instance from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput from bot.converters import Inventory, PackageName, ValidURL @@ -24,10 +18,9 @@ from bot.pagination import LinePaginator from bot.utils.lock import lock from bot.utils.messages import send_denial, wait_for_deletion from bot.utils.scheduling import Scheduler -from . import PRIORITY_PACKAGES +from . import PRIORITY_PACKAGES, doc_cache +from ._batch_parser import BatchParser from ._inventory_parser import INVENTORY_DICT, fetch_inventory -from ._parsing import get_symbol_markdown -from ._redis_cache import DocRedisCache log = logging.getLogger(__name__) @@ -48,8 +41,6 @@ REFRESH_EVENT = asyncio.Event() REFRESH_EVENT.set() COMMAND_LOCK_SINGLETON = "inventory refresh" -doc_cache = DocRedisCache(namespace="Docs") - class DocItem(NamedTuple): """Holds inventory symbol information.""" @@ -66,159 +57,6 @@ class DocItem(NamedTuple): return self.base_url + self.relative_url_path -class QueueItem(NamedTuple): - """Contains a symbol and the BeautifulSoup object needed to parse it.""" - - symbol: DocItem - soup: BeautifulSoup - - def __eq__(self, other: Union[QueueItem, DocItem]): - if isinstance(other, DocItem): - return self.symbol == other - return NamedTuple.__eq__(self, other) - - -class ParseResultFuture(asyncio.Future): - """ - Future with metadata for the parser class. - - `user_requested` is set by the parser when a Future is requested by an user and moved to the front, - allowing the futures to only be waited for when clearing if they were user requested. - - `result_set_time` provides the time at which the future's result has been set, - or -inf if the result hasn't been set yet - """ - - def __init__(self): - super().__init__() - self.user_requested = False - self.result_set_time = float("inf") - - def set_result(self, result: str, /) -> None: - """Set `self.result_set_time` to current time when the result is set.""" - self.result_set_time = time.time() - super().set_result(result) - - -class CachedParser: - """ - Get the symbol Markdown from pages with smarter caching. - - DocItems are added through the `add_item` method which adds them to the `_page_symbols` dict. - `get_markdown` is used to fetch the Markdown; when this is used for the first time on a page, - all of the symbols are queued to be parsed to avoid multiple web requests to the same page. - """ - - def __init__(self): - self._queue: List[QueueItem] = [] - self._page_symbols: Dict[str, List[DocItem]] = defaultdict(list) - self._item_futures: Dict[DocItem, ParseResultFuture] = {} - self._parse_task = None - - self.cleanup_futures_task = bot_instance.loop.create_task(self._cleanup_futures()) - - async def get_markdown(self, doc_item: DocItem) -> str: - """ - Get the result Markdown of `doc_item`. - - If no symbols were fetched from `doc_item`s page before, - the HTML has to be fetched before parsing can be queued. - - Not safe to run while `self.clear` is running. - """ - if (symbols_to_queue := self._page_symbols.get(doc_item.url)) is not None: - async with bot_instance.http_session.get(doc_item.url) as response: - soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") - - self._queue.extend(QueueItem(symbol, soup) for symbol in symbols_to_queue) - self._item_futures.update((symbol, ParseResultFuture()) for symbol in symbols_to_queue) - del self._page_symbols[doc_item.url] - log.debug(f"Added symbols from {doc_item.url} to parse queue.") - - if self._parse_task is None: - self._parse_task = asyncio.create_task(self._parse_queue()) - - with suppress(ValueError): - # If the item is not in the list then the item is already parsed or is being parsed - self._move_to_front(doc_item) - self._item_futures[doc_item].user_requested = True - return await self._item_futures[doc_item] - - async def _parse_queue(self) -> None: - """ - Parse all item from the queue, setting associated events for symbols if present. - - The coroutine will run as long as the queue is not empty, resetting `self._parse_task` to None when finished. - """ - log.trace("Starting queue parsing.") - try: - while self._queue: - item, soup = self._queue.pop() - try: - if (future := self._item_futures[item]).done(): - # Some items are present in the inventories multiple times under different symbols, - # if we already parsed an equal item, we can just skip it. - continue - - markdown = await bot_instance.loop.run_in_executor( - None, - partial(get_symbol_markdown, soup, item), - ) - if markdown is not None: - await doc_cache.set(item, markdown) - except Exception as e: - log.exception(f"Unexpected error when handling {item}") - future.set_exception(e) - else: - future.set_result(markdown) - await asyncio.sleep(0.1) - finally: - self._parse_task = None - log.trace("Finished parsing queue.") - - def _move_to_front(self, item: Union[QueueItem, DocItem]) -> None: - """Move `item` to the front of the parse queue.""" - # The parse queue stores soups along with the doc symbols in QueueItem objects, - # in case we're moving a DocItem we have to get the associated QueueItem first and then move it. - item_index = self._queue.index(item) - queue_item = self._queue.pop(item_index) - - self._queue.append(queue_item) - - def add_item(self, doc_item: DocItem) -> None: - """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" - self._page_symbols[doc_item.url].append(doc_item) - - async def clear(self) -> None: - """ - Clear all internal symbol data. - - All currently requested items are waited to be parsed before clearing. - """ - for future in filter(attrgetter("user_requested"), self._item_futures.values()): - await future - if self._parse_task is not None: - self._parse_task.cancel() - self._queue.clear() - self._page_symbols.clear() - self._item_futures.clear() - - async def _cleanup_futures(self) -> None: - """ - Clear old futures from internal results. - - After a future is set, we only need to wait for old requests to its associated DocItem to finish - as all new requests will get the value from the redis cache in the cog first. - Keeping them around for longer than a second is unnecessary and keeps the parsed Markdown strings alive. - """ - while True: - current_time = time.time() - for key, future in self._item_futures.copy().items(): - if current_time - future.result_set_time > 5: - del self._item_futures[key] - await asyncio.sleep(5) - - class DocCog(commands.Cog): """A set of commands for querying & displaying documentation.""" @@ -226,7 +64,7 @@ class DocCog(commands.Cog): self.base_urls = {} self.bot = bot self.doc_symbols: Dict[str, DocItem] = {} - self.item_fetcher = CachedParser() + self.item_fetcher = BatchParser() self.renamed_symbols = set() self.inventory_scheduler = Scheduler(self.__class__.__name__) -- cgit v1.2.3 From a50239e6fe46e2da36f925967db21c95f940597a Mon Sep 17 00:00:00 2001 From: Harbys Date: Sun, 10 Jan 2021 19:45:25 +0100 Subject: indent fix --- config-default.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config-default.yml b/config-default.yml index 042d80408..03d568285 100644 --- a/config-default.yml +++ b/config-default.yml @@ -255,7 +255,7 @@ guild: team_leaders: 737250302834638889 # Streaming - video: 764245844798079016 + video: 764245844798079016 moderation_roles: - *OWNERS_ROLE -- cgit v1.2.3 From 074ce91205539ca06c1c048c62b7e649c4ae78b5 Mon Sep 17 00:00:00 2001 From: Harbys Date: Sun, 10 Jan 2021 20:08:01 +0100 Subject: add 30 minute default for stream command --- bot/exts/moderation/stream.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index d8c2a8628..d8ffe32ff 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -1,3 +1,5 @@ +import datetime + import discord from discord.ext import commands @@ -32,7 +34,7 @@ class Stream(commands.Cog): self, ctx: commands.Context, user: discord.Member, - duration: Expiry, + duration: Expiry = datetime.datetime.utcnow() + datetime.timedelta(minutes=30), *_ ) -> None: """ -- cgit v1.2.3 From a0e2179da398a9164582e7debb7e139754434385 Mon Sep 17 00:00:00 2001 From: Harbys Date: Sun, 10 Jan 2021 20:31:05 +0100 Subject: move default duration for stream command to config --- bot/constants.py | 6 ++++++ bot/exts/moderation/stream.py | 5 +++-- config-default.yml | 4 ++++ 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index dca83e7ab..d912a5a9a 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -635,6 +635,12 @@ class Event(Enum): voice_state_update = "voice_state_update" +class VideoPermission(metaclass=YAMLGetter): + section = "video_permission" + + default_permission_duration: int + + # Debug mode DEBUG_MODE = 'local' in os.environ.get("SITE_URL", "local") diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index d8ffe32ff..b590956a3 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -4,7 +4,7 @@ import discord from discord.ext import commands from bot.bot import Bot -from bot.constants import Emojis, Roles, STAFF_ROLES +from bot.constants import Emojis, Roles, STAFF_ROLES, VideoPermission from bot.converters import Expiry from bot.utils.scheduling import Scheduler from bot.utils.time import format_infraction_with_duration @@ -34,7 +34,8 @@ class Stream(commands.Cog): self, ctx: commands.Context, user: discord.Member, - duration: Expiry = datetime.datetime.utcnow() + datetime.timedelta(minutes=30), + duration: Expiry = + datetime.datetime.utcnow() + datetime.timedelta(minutes=VideoPermission.default_permission_duration), *_ ) -> None: """ diff --git a/config-default.yml b/config-default.yml index 03d568285..ec982b0d3 100644 --- a/config-default.yml +++ b/config-default.yml @@ -533,3 +533,7 @@ voice_gate: config: required_keys: ['bot.token'] + + +video_permission: + default_permission_duration: 30 # Default duration for stream command in minutes -- cgit v1.2.3 From 9a4ad5f73cd2c42087643cb36b9e6076c24695fb Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 22:00:59 +0100 Subject: Change the func name to wrapped for clarity --- bot/utils/function.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bot/utils/function.py b/bot/utils/function.py index 8b8c7ba5c..037516ac4 100644 --- a/bot/utils/function.py +++ b/bot/utils/function.py @@ -76,23 +76,23 @@ def get_bound_args(func: t.Callable, args: t.Tuple, kwargs: t.Dict[str, t.Any]) return bound_args.arguments -def update_wrapper_globals(wrapper: types.FunctionType, func: types.FunctionType) -> types.FunctionType: +def update_wrapper_globals(wrapper: types.FunctionType, wrapped: types.FunctionType) -> types.FunctionType: """ - Update globals of `wrapper` with the globals from `func`. + Update globals of `wrapper` with the globals from `wrapped`. For forwardrefs in command annotations discordpy uses the __global__ attribute of the function to resolve their values, with decorators that replace the function this breaks because they have their own globals. This function creates a new function functionally identical to `wrapper`, which has the globals replaced with - a merge of `func`s globals and the `wrapper`s globals. + a merge of `wrapped`s globals and the `wrapper`s globals. - In case a global name from `func` conflicts with a name from `wrapper`'s globals, `wrapper` will win + In case a global name from `wrapped` conflicts with a name from `wrapper`'s globals, `wrapper` will win to keep it functional, but this may cause problems if the name is used as an annotation and - discord.py uses it as a converter on a parameter from `func`. + discord.py uses it as a converter on a parameter from `wrapped`. """ new_globals = wrapper.__globals__.copy() - new_globals.update((k, v) for k, v in func.__globals__.items() if k not in wrapper.__code__.co_names) + new_globals.update((k, v) for k, v in wrapped.__globals__.items() if k not in wrapper.__code__.co_names) return types.FunctionType( code=wrapper.__code__, globals=new_globals, -- cgit v1.2.3 From 4788a9364ac84cf0ee210c8b026ea7f2d5dd31ee Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 22:07:58 +0100 Subject: Create decorator for update_wrapper_globals mimicking functools.wraps --- bot/decorators.py | 14 ++++++++------ bot/utils/function.py | 15 +++++++++++++++ bot/utils/lock.py | 10 +++++++--- 3 files changed, 30 insertions(+), 9 deletions(-) diff --git a/bot/decorators.py b/bot/decorators.py index a37996e80..02735d0dc 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -1,8 +1,8 @@ import asyncio import logging +import types import typing as t from contextlib import suppress -from functools import update_wrapper from discord import Member, NotFound from discord.ext import commands @@ -11,6 +11,7 @@ from discord.ext.commands import Cog, Context from bot.constants import Channels, RedirectOutput from bot.utils import function from bot.utils.checks import in_whitelist_check +from bot.utils.function import command_wraps log = logging.getLogger(__name__) @@ -70,7 +71,8 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N This decorator must go before (below) the `command` decorator. """ - def wrap(func: t.Callable) -> t.Callable: + def wrap(func: types.FunctionType) -> types.FunctionType: + @command_wraps(func) async def inner(self: Cog, ctx: Context, *args, **kwargs) -> None: if ctx.channel.id == destination_channel: log.trace(f"Command {ctx.command.name} was invoked in destination_channel, not redirecting") @@ -104,8 +106,7 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N with suppress(NotFound): await ctx.message.delete() log.trace("Redirect output: Deleted invocation message") - - return update_wrapper(function.update_wrapper_globals(inner, func), func) + return inner return wrap @@ -121,7 +122,8 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: This decorator must go before (below) the `command` decorator. """ - def decorator(func: t.Callable) -> t.Callable: + def decorator(func: types.FunctionType) -> types.FunctionType: + @command_wraps(func) async def wrapper(*args, **kwargs) -> None: log.trace(f"{func.__name__}: respect role hierarchy decorator called") @@ -149,5 +151,5 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: else: log.trace(f"{func.__name__}: {target.top_role=} < {actor.top_role=}; calling func") await func(*args, **kwargs) - return update_wrapper(function.update_wrapper_globals(wrapper, func), func) + return wrapper return decorator diff --git a/bot/utils/function.py b/bot/utils/function.py index 037516ac4..5fd70e1e8 100644 --- a/bot/utils/function.py +++ b/bot/utils/function.py @@ -1,5 +1,6 @@ """Utilities for interaction with functions.""" +import functools import inspect import types import typing as t @@ -100,3 +101,17 @@ def update_wrapper_globals(wrapper: types.FunctionType, wrapped: types.FunctionT argdefs=wrapper.__defaults__, closure=wrapper.__closure__, ) + + +def command_wraps( + wrapped: types.FunctionType, + assigned: t.Sequence[str] = functools.WRAPPER_ASSIGNMENTS, + updated: t.Sequence[str] = functools.WRAPPER_UPDATES, +) -> t.Callable[[types.FunctionType], types.FunctionType]: + """Update `wrapped` to look like the decorated function and update globals for discordpy forwardref evaluation.""" + def decorator(wrapper: types.FunctionType) -> types.FunctionType: + return functools.update_wrapper( + update_wrapper_globals(wrapper, wrapped), wrapped, assigned, updated + ) + + return decorator diff --git a/bot/utils/lock.py b/bot/utils/lock.py index 02188c827..978e3ae94 100644 --- a/bot/utils/lock.py +++ b/bot/utils/lock.py @@ -1,12 +1,14 @@ import inspect import logging +import types from collections import defaultdict -from functools import partial, update_wrapper +from functools import partial from typing import Any, Awaitable, Callable, Hashable, Union from weakref import WeakValueDictionary from bot.errors import LockedResourceError from bot.utils import function +from bot.utils.function import command_wraps log = logging.getLogger(__name__) __lock_dicts = defaultdict(WeakValueDictionary) @@ -58,9 +60,10 @@ def lock(namespace: Hashable, resource_id: ResourceId, *, raise_error: bool = Fa If decorating a command, this decorator must go before (below) the `command` decorator. """ - def decorator(func: Callable) -> Callable: + def decorator(func: types.FunctionType) -> types.FunctionType: name = func.__name__ + @command_wraps(func) async def wrapper(*args, **kwargs) -> Any: log.trace(f"{name}: mutually exclusive decorator called") @@ -91,7 +94,8 @@ def lock(namespace: Hashable, resource_id: ResourceId, *, raise_error: bool = Fa log.info(f"{name}: aborted because resource {namespace!r}:{id_!r} is locked") if raise_error: raise LockedResourceError(str(namespace), id_) - return update_wrapper(function.update_wrapper_globals(wrapper, func), func) + return wrapper + return decorator -- cgit v1.2.3 From d50ae50681f552c9a0d3e2c797b0916a09da54da Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 10 Jan 2021 22:10:12 +0100 Subject: Resolve wrapped command callbacks in the source command Without this the command will fetch the source of the wrapper --- bot/exts/info/source.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py index 7b41352d4..ae68ef7e8 100644 --- a/bot/exts/info/source.py +++ b/bot/exts/info/source.py @@ -68,7 +68,10 @@ class BotSource(commands.Cog): Raise BadArgument if `source_item` is a dynamically-created object (e.g. via internal eval). """ if isinstance(source_item, commands.Command): - src = source_item.callback.__code__ + source_item = source_item.callback + while hasattr(source_item, "__wrapped__"): + source_item = source_item.__wrapped__ + src = source_item.__code__ filename = src.co_filename elif isinstance(source_item, str): tags_cog = self.bot.get_cog("Tags") -- cgit v1.2.3 From 760ca7e9a0996865ee4d9e127baef8f0246a9e25 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 00:54:37 +0100 Subject: Send a message to devlog instead of logging a warning --- bot/exts/info/doc/_batch_parser.py | 30 ++++++++++++++++++++++++++++++ bot/exts/info/doc/_parsing.py | 1 - 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index edd6bb090..ebae6efb8 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -9,9 +9,11 @@ from functools import partial from operator import attrgetter from typing import Dict, List, NamedTuple, TYPE_CHECKING, Union +import discord from bs4 import BeautifulSoup import bot +from bot.constants import Channels from . import doc_cache from ._parsing import get_symbol_markdown if TYPE_CHECKING: @@ -20,6 +22,30 @@ if TYPE_CHECKING: log = logging.getLogger(__name__) +class StaleInventoryNotifier: + """Handle sending notifications about stale inventories through `DocItem`s to dev log.""" + + def __init__(self): + self._init_task = bot.instance.loop.create_task(self._init_channel()) + self._warned_urls = set() + + async def _init_channel(self) -> None: + """Wait for guild and get channel.""" + await bot.instance.wait_until_guild_available() + self._dev_log = bot.instance.get_channel(Channels.dev_log) + + async def send_warning(self, item: DocItem) -> None: + """Send a warning to dev log is one wasn't already sent for `item`'s url.""" + if item.url not in self._warned_urls: + self._warned_urls.add(item.url) + await self._init_task + embed = discord.Embed( + description=f"Doc item `{item.symbol_id=}` present in loaded documentation inventories " + f"not found on [site]({item.url}), inventories may need to be refreshed." + ) + await self._dev_log.send(embed=embed) + + class QueueItem(NamedTuple): """Contains a symbol and the BeautifulSoup object needed to parse it.""" @@ -71,6 +97,8 @@ class BatchParser: self.cleanup_futures_task = bot.instance.loop.create_task(self._cleanup_futures()) + self.stale_inventory_notifier = StaleInventoryNotifier() + async def get_markdown(self, doc_item: DocItem) -> str: """ Get the result Markdown of `doc_item`. @@ -120,6 +148,8 @@ class BatchParser: ) if markdown is not None: await doc_cache.set(item, markdown) + else: + asyncio.create_task(self.stale_inventory_notifier.send_warning(item)) except Exception as e: log.exception(f"Unexpected error when handling {item}") future.set_exception(e) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index d68f7c8d7..257161dd5 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -231,7 +231,6 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[s """ symbol_heading = soup.find(id=symbol_data.symbol_id) if symbol_heading is None: - log.warning("Symbol present in loaded inventories not found on site, consider refreshing inventories.") return None signature = None # Modules, doc pages and labels don't point to description list tags but to tags like divs, -- cgit v1.2.3 From c2447e0f2a3f28f79ec73d82b3ba4923b377f3e9 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 01:07:07 +0100 Subject: Update outdated docstring --- bot/exts/info/doc/_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 257161dd5..f07b530c1 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -225,7 +225,7 @@ def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag] def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[str]: """ - Return parsed markdown of the passed symbol using the passed in soup, truncated to 1000 characters. + Return parsed markdown of the passed symbol using the passed in soup, truncated to fit within a discord message. The method of parsing and what information gets included depends on the symbol's group. """ -- cgit v1.2.3 From 3b735398ca88b022e2fd815d715f3965c87f32ce Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 02:01:00 +0100 Subject: Handle renaming conflicting symbols in ensure_unique_symbol_name Previously update_single looped this function until there were no duplicates and when creating new symbols the function had to check if the symbol to create a new name from started with a group/package to avoid redundancy. The new approach ensures a new symbol is always unique when returning by handling the containment check inside and outputting a symbol name in the format of package.group.symbol which should always be unique --- bot/exts/info/doc/_cog.py | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 7a943f1a4..5b38af95b 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -95,16 +95,14 @@ class DocCog(commands.Cog): # e.g. get 'class' from 'py:class' group_name = group.split(":")[1] - while (original_symbol := self.doc_symbols.get(symbol)) is not None: + if (original_symbol := self.doc_symbols.get(symbol)) is not None: replaced_symbol_name = self.ensure_unique_symbol_name( api_package_name, group_name, original_symbol, symbol, ) - if replaced_symbol_name is None: - break - else: + if replaced_symbol_name is not None: symbol = replaced_symbol_name relative_url_path, _, symbol_id = relative_doc_url.partition("#") @@ -169,8 +167,12 @@ class DocCog(commands.Cog): """ # Certain groups are added as prefixes to disambiguate the symbols. if group_name in FORCE_PREFIX_GROUPS: - self.renamed_symbols.add(symbol_name) - return f"{group_name}.{symbol_name}" + new_symbol = f"{group_name}.{symbol_name}" + if new_symbol in self.doc_symbols: + # If there's still a conflict, prefix with package name. + new_symbol = f"{package_name}.{new_symbol}" + self.renamed_symbols.add(new_symbol) + return new_symbol # The existing symbol with which the current symbol conflicts should have a group prefix. # It currently doesn't have the group prefix because it's only added once there's a conflict. @@ -195,15 +197,12 @@ class DocCog(commands.Cog): # If we can't specially handle the symbol through its group or package, # fall back to prepending its package name to the front. else: - if symbol_name.startswith(package_name): - # If the symbol already starts with the package name, insert the group name after it. - split_symbol_name = symbol_name.split(".", maxsplit=1) - split_symbol_name.insert(1, group_name) - overridden_symbol = ".".join(split_symbol_name) - else: - overridden_symbol = f"{package_name}.{symbol_name}" - self.renamed_symbols.add(overridden_symbol) - return overridden_symbol + new_symbol = f"{package_name}.{symbol_name}" + if new_symbol in self.doc_symbols: + # If there's still a conflict, add the symbol's group in the middle. + new_symbol = f"{package_name}.{group_name}.{symbol_name}" + self.renamed_symbols.add(new_symbol) + return new_symbol async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" -- cgit v1.2.3 From c92a9985a5a43dc26e7590d7581d47fbbc5e27a8 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 02:02:56 +0100 Subject: Use a dictionary of lists instead of set for renamed symbols A dictionary allows us to grab the original symbol name and then get all the renamed symbols from it, with the improvements to `ensure_unique_symbol_name` we can also use lists instead of sets as each symbol we add should be unique --- bot/exts/info/doc/_cog.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 5b38af95b..deef37f8f 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -4,6 +4,7 @@ import asyncio import logging import re import sys +from collections import defaultdict from contextlib import suppress from types import SimpleNamespace from typing import Dict, NamedTuple, Optional @@ -65,7 +66,7 @@ class DocCog(commands.Cog): self.bot = bot self.doc_symbols: Dict[str, DocItem] = {} self.item_fetcher = BatchParser() - self.renamed_symbols = set() + self.renamed_symbols = defaultdict(list) self.inventory_scheduler = Scheduler(self.__class__.__name__) self.scheduled_inventories = set() @@ -171,7 +172,7 @@ class DocCog(commands.Cog): if new_symbol in self.doc_symbols: # If there's still a conflict, prefix with package name. new_symbol = f"{package_name}.{new_symbol}" - self.renamed_symbols.add(new_symbol) + self.renamed_symbols[symbol_name].append(new_symbol) return new_symbol # The existing symbol with which the current symbol conflicts should have a group prefix. @@ -183,7 +184,7 @@ class DocCog(commands.Cog): overridden_symbol = f"{original_item.package}.{overridden_symbol}" self.doc_symbols[overridden_symbol] = original_item - self.renamed_symbols.add(overridden_symbol) + self.renamed_symbols[symbol_name].append(overridden_symbol) elif package_name in PRIORITY_PACKAGES: overridden_symbol = f"{original_item.package}.{symbol_name}" @@ -192,7 +193,7 @@ class DocCog(commands.Cog): overridden_symbol = f"{original_item.package}.{original_item.group}.{symbol_name}" self.doc_symbols[overridden_symbol] = original_item - self.renamed_symbols.add(overridden_symbol) + self.renamed_symbols[symbol_name].append(overridden_symbol) # If we can't specially handle the symbol through its group or package, # fall back to prepending its package name to the front. @@ -201,7 +202,7 @@ class DocCog(commands.Cog): if new_symbol in self.doc_symbols: # If there's still a conflict, add the symbol's group in the middle. new_symbol = f"{package_name}.{group_name}.{symbol_name}" - self.renamed_symbols.add(new_symbol) + self.renamed_symbols[symbol_name].append(new_symbol) return new_symbol async def refresh_inventory(self) -> None: @@ -265,9 +266,7 @@ class DocCog(commands.Cog): description=markdown ) # Show all symbols with the same name that were renamed in the footer. - embed.set_footer( - text=", ".join(renamed for renamed in self.renamed_symbols - {symbol} if renamed.endswith(f".{symbol}")) - ) + embed.set_footer(text=", ".join(self.renamed_symbols[symbol])) return embed @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) -- cgit v1.2.3 From 8d927ff13e0fd93e80102b43c2568f1e74a29a7c Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 02:10:35 +0100 Subject: Ensure footer fits into message The footer also now says Moved: at the start to clarify the meaning of the symbols to the user --- bot/exts/info/doc/_cog.py | 7 ++++++- bot/exts/info/doc/_parsing.py | 4 ++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index deef37f8f..b8c1a10d4 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -4,6 +4,7 @@ import asyncio import logging import re import sys +import textwrap from collections import defaultdict from contextlib import suppress from types import SimpleNamespace @@ -266,7 +267,11 @@ class DocCog(commands.Cog): description=markdown ) # Show all symbols with the same name that were renamed in the footer. - embed.set_footer(text=", ".join(self.renamed_symbols[symbol])) + if renamed_symbols := self.renamed_symbols[symbol]: + footer_text = f"Moved: {textwrap.shorten(', '.join(renamed_symbols), 100, placeholder=' ...')}" + else: + footer_text = "" + embed.set_footer(text=footer_text) return embed @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index f07b530c1..45a81a4cb 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -33,8 +33,8 @@ _NO_SIGNATURE_GROUPS = { _EMBED_CODE_BLOCK_LINE_LENGTH = 61 # _MAX_SIGNATURE_AMOUNT code block wrapped lines with py syntax highlight _MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LINE_LENGTH + 8) * MAX_SIGNATURE_AMOUNT -# Maximum discord message length - signatures on top -_MAX_DESCRIPTION_LENGTH = 2000 - _MAX_SIGNATURES_LENGTH +# Maximum discord message length - signatures on top - space for footer +_MAX_DESCRIPTION_LENGTH = 1900 - _MAX_SIGNATURES_LENGTH _TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace BracketPair = namedtuple("BracketPair", ["opening_bracket", "closing_bracket"]) -- cgit v1.2.3 From 5c97efab1bf3d15911a343687b50af92b57bc036 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 02:13:58 +0100 Subject: Don't convert package names into lowercase The converter used to set them already ensures this for us, making the call redundant --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index b8c1a10d4..0e7eff9d9 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -250,7 +250,7 @@ class DocCog(commands.Cog): if symbol_info is None: log.debug("Symbol does not exist.") return None - self.bot.stats.incr(f"doc_fetches.{symbol_info.package.lower()}") + self.bot.stats.incr(f"doc_fetches.{symbol_info.package}") markdown = await doc_cache.get(symbol_info) if markdown is None: -- cgit v1.2.3 From a7ba149904ac0643cc7e267d219fe86c159816e0 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 02:37:53 +0100 Subject: Notify the user that inventories were refreshed on cache clears --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 0e7eff9d9..822c984d7 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -410,7 +410,7 @@ class DocCog(commands.Cog): """Clear the persistent redis cache for `package`.""" if await doc_cache.delete(package_name): await self.refresh_inventory() - await ctx.send(f"Successfully cleared the cache for `{package_name}`.") + await ctx.send(f"Successfully cleared the cache for `{package_name}` and refreshed the inventories.") else: await ctx.send("No keys matching the package found.") -- cgit v1.2.3 From f5235b16343816b02ceef56d1e753cb0167c6b03 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 02:42:19 +0100 Subject: Check for containment instead of always getting the value from the dict Getting the value from a defaultdict will always create the key for it, creating unnecessary entries every time a symbol is fetched from the bot --- bot/exts/info/doc/_cog.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 822c984d7..b35469787 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -267,8 +267,9 @@ class DocCog(commands.Cog): description=markdown ) # Show all symbols with the same name that were renamed in the footer. - if renamed_symbols := self.renamed_symbols[symbol]: - footer_text = f"Moved: {textwrap.shorten(', '.join(renamed_symbols), 100, placeholder=' ...')}" + if symbol in self.renamed_symbols: + renamed_symbols = ', '.join(self.renamed_symbols[symbol]) + footer_text = f"Moved: {textwrap.shorten(renamed_symbols, 100, placeholder=' ...')}" else: footer_text = "" embed.set_footer(text=footer_text) -- cgit v1.2.3 From 780dbc7683c7ce9cece6f0707840f56005466dfe Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 02:52:34 +0100 Subject: Remove old reference to CachedParser and unused const --- bot/exts/info/doc/_cog.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index b35469787..bc230b74b 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -2,7 +2,6 @@ from __future__ import annotations import asyncio import logging -import re import sys import textwrap from collections import defaultdict @@ -34,7 +33,6 @@ FORCE_PREFIX_GROUPS = ( "pdbcommand", "term", ) -WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay # Delay to wait before trying to reach a rescheduled inventory again, in minutes FETCH_RESCHEDULE_DELAY = SimpleNamespace(first=2, repeated=5) @@ -238,8 +236,7 @@ class DocCog(commands.Cog): If the symbol is known, an Embed with documentation about it is returned. - First check the DocRedisCache before querying the cog's `CachedParser`, - if not present also create a redis entry for the symbol. + First check the DocRedisCache before querying the cog's `BatchParser`. """ log.trace(f"Building embed for symbol `{symbol}`") if not REFRESH_EVENT.is_set(): -- cgit v1.2.3 From a2c1e67ac764b363d48d685ace707a650279e009 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 04:32:10 +0100 Subject: Make REFRESH_EVENT an instance variable --- bot/exts/info/doc/_cog.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index bc230b74b..7bb819987 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -37,8 +37,6 @@ NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay # Delay to wait before trying to reach a rescheduled inventory again, in minutes FETCH_RESCHEDULE_DELAY = SimpleNamespace(first=2, repeated=5) -REFRESH_EVENT = asyncio.Event() -REFRESH_EVENT.set() COMMAND_LOCK_SINGLETON = "inventory refresh" @@ -70,6 +68,8 @@ class DocCog(commands.Cog): self.inventory_scheduler = Scheduler(self.__class__.__name__) self.scheduled_inventories = set() + self.refresh_event = asyncio.Event() + self.refresh_event.set() self.bot.loop.create_task(self.init_refresh_inventory()) @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) @@ -206,7 +206,7 @@ class DocCog(commands.Cog): async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" - REFRESH_EVENT.clear() + self.refresh_event.clear() log.debug("Refreshing documentation inventory...") self.inventory_scheduler.cancel_all() @@ -228,7 +228,7 @@ class DocCog(commands.Cog): ] await asyncio.gather(*coros) log.debug("Finished inventory refresh.") - REFRESH_EVENT.set() + self.refresh_event.set() async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: """ @@ -239,9 +239,9 @@ class DocCog(commands.Cog): First check the DocRedisCache before querying the cog's `BatchParser`. """ log.trace(f"Building embed for symbol `{symbol}`") - if not REFRESH_EVENT.is_set(): + if not self.refresh_event.is_set(): log.debug("Waiting for inventories to be refreshed before processing item.") - await REFRESH_EVENT.wait() + await self.refresh_event.wait() symbol_info = self.doc_symbols.get(symbol) if symbol_info is None: -- cgit v1.2.3 From 551c01e2537b036c17253d5cbfc4cfee6150cc4a Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 04:59:25 +0100 Subject: Return whitespace to its previous state --- bot/utils/lock.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/utils/lock.py b/bot/utils/lock.py index 978e3ae94..997c653a1 100644 --- a/bot/utils/lock.py +++ b/bot/utils/lock.py @@ -94,8 +94,8 @@ def lock(namespace: Hashable, resource_id: ResourceId, *, raise_error: bool = Fa log.info(f"{name}: aborted because resource {namespace!r}:{id_!r} is locked") if raise_error: raise LockedResourceError(str(namespace), id_) - return wrapper + return wrapper return decorator -- cgit v1.2.3 From bf2d3d58dda76e7407b2d10f1dd9c89ce8f17d8f Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 11 Jan 2021 10:35:56 +0100 Subject: Fix docstring The decorator works in revers to what the docstring explained --- bot/utils/function.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/utils/function.py b/bot/utils/function.py index 5fd70e1e8..ab7f45761 100644 --- a/bot/utils/function.py +++ b/bot/utils/function.py @@ -108,7 +108,7 @@ def command_wraps( assigned: t.Sequence[str] = functools.WRAPPER_ASSIGNMENTS, updated: t.Sequence[str] = functools.WRAPPER_UPDATES, ) -> t.Callable[[types.FunctionType], types.FunctionType]: - """Update `wrapped` to look like the decorated function and update globals for discordpy forwardref evaluation.""" + """Update the decorated function to look like `wrapped` and update globals for discordpy forwardref evaluation.""" def decorator(wrapper: types.FunctionType) -> types.FunctionType: return functools.update_wrapper( update_wrapper_globals(wrapper, wrapped), wrapped, assigned, updated -- cgit v1.2.3 From d32e8f1029be8deb76e8c0d9bb457c9768ca878e Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Wed, 13 Jan 2021 19:08:32 +0200 Subject: Better regex, moved pattern handlers to __init__, and constant header --- bot/exts/info/code_snippets.py | 52 +++++++++++++++++++++++------------------- 1 file changed, 28 insertions(+), 24 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 669a21c7d..1899b139b 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -12,24 +12,27 @@ from bot.utils.messages import wait_for_deletion log = logging.getLogger(__name__) GITHUB_RE = re.compile( - r'https://github\.com/(?P.+?)/blob/(?P.+/.+)' - r'#L(?P\d+)([-~]L(?P\d+))?\b' + r'https://github\.com/(?P\S+?)/blob/(?P\S+/[^\s#]+)' + r'(#L(?P\d+)([-~:]L(?P\d+))?)?($|\s)' ) GITHUB_GIST_RE = re.compile( r'https://gist\.github\.com/([^/]+)/(?P[^\W_]+)/*' - r'(?P[^\W_]*)/*#file-(?P.+?)' - r'-L(?P\d+)([-~]L(?P\d+))?\b' + r'(?P[^\W_]*)/*#file-(?P\S+?)' + r'(-L(?P\d+)([-~:]L(?P\d+))?)?($|\s)' ) +GITHUB_HEADERS = {'Accept': 'application/vnd.github.v3.raw'} + GITLAB_RE = re.compile( - r'https://gitlab\.com/(?P.+?)/\-/blob/(?P.+/.+)' - r'#L(?P\d+)([-](?P\d+))?\b' + r'https://gitlab\.com/(?P\S+?)/\-/blob/(?P\S+/[^\s#]+)' + r'(#L(?P\d+)([-](?P\d+))?)?($|\s)' ) BITBUCKET_RE = re.compile( - r'https://bitbucket\.org/(?P.+?)/src/(?P.+?)/' - r'(?P.+?)#lines-(?P\d+)(:(?P\d+))?\b' + r'https://bitbucket\.org/(?P\S+?)/src/' + r'(?P\S+?)/(?P[^\s#]+)' + r'(#lines-(?P\d+)(:(?P\d+))?)?($|\s)' ) @@ -71,18 +74,20 @@ class CodeSnippets(Cog): end_line: str ) -> str: """Fetches a snippet from a GitHub repo.""" - headers = {'Accept': 'application/vnd.github.v3.raw'} - # Search the GitHub API for the specified branch - branches = await self._fetch_response(f'https://api.github.com/repos/{repo}/branches', 'json', headers=headers) - tags = await self._fetch_response(f'https://api.github.com/repos/{repo}/tags', 'json', headers=headers) + branches = await self._fetch_response( + f'https://api.github.com/repos/{repo}/branches', + 'json', + headers=GITHUB_HEADERS + ) + tags = await self._fetch_response(f'https://api.github.com/repos/{repo}/tags', 'json', headers=GITHUB_HEADERS) refs = branches + tags ref, file_path = self._find_ref(path, refs) file_contents = await self._fetch_response( f'https://api.github.com/repos/{repo}/contents/{file_path}?ref={ref}', 'text', - headers=headers, + headers=GITHUB_HEADERS, ) return self._snippet_to_codeblock(file_contents, file_path, start_line, end_line) @@ -95,12 +100,10 @@ class CodeSnippets(Cog): end_line: str ) -> str: """Fetches a snippet from a GitHub gist.""" - headers = {'Accept': 'application/vnd.github.v3.raw'} - gist_json = await self._fetch_response( f'https://api.github.com/gists/{gist_id}{f"/{revision}" if len(revision) > 0 else ""}', 'json', - headers=headers, + headers=GITHUB_HEADERS, ) # Check each file in the gist for the specified file @@ -207,19 +210,20 @@ class CodeSnippets(Cog): """Initializes the cog's bot.""" self.bot = bot + self.pattern_handlers = [ + (GITHUB_RE, self._fetch_github_snippet), + (GITHUB_GIST_RE, self._fetch_github_gist_snippet), + (GITLAB_RE, self._fetch_gitlab_snippet), + (BITBUCKET_RE, self._fetch_bitbucket_snippet) + ] + @Cog.listener() async def on_message(self, message: Message) -> None: """Checks if the message has a snippet link, removes the embed, then sends the snippet contents.""" if not message.author.bot: message_to_send = '' - pattern_handlers = [ - (GITHUB_RE, self._fetch_github_snippet), - (GITHUB_GIST_RE, self._fetch_github_gist_snippet), - (GITLAB_RE, self._fetch_gitlab_snippet), - (BITBUCKET_RE, self._fetch_bitbucket_snippet) - ] - - for pattern, handler in pattern_handlers: + + for pattern, handler in self.pattern_handlers: for match in pattern.finditer(message.content): message_to_send += await handler(**match.groupdict()) -- cgit v1.2.3 From 1856ed852515c17c2095c10b93d4d418787ec178 Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Wed, 13 Jan 2021 19:10:03 +0200 Subject: Better regex now works for --- bot/exts/info/code_snippets.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 1899b139b..1d1bc2850 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -12,27 +12,27 @@ from bot.utils.messages import wait_for_deletion log = logging.getLogger(__name__) GITHUB_RE = re.compile( - r'https://github\.com/(?P\S+?)/blob/(?P\S+/[^\s#]+)' - r'(#L(?P\d+)([-~:]L(?P\d+))?)?($|\s)' + r'https://github\.com/(?P\S+?)/blob/(?P\S+/[^\s#,>]+)' + r'(#L(?P\d+)([-~:]L(?P\d+))?)?($|\s|,|>)' ) GITHUB_GIST_RE = re.compile( r'https://gist\.github\.com/([^/]+)/(?P[^\W_]+)/*' r'(?P[^\W_]*)/*#file-(?P\S+?)' - r'(-L(?P\d+)([-~:]L(?P\d+))?)?($|\s)' + r'(-L(?P\d+)([-~:]L(?P\d+))?)?($|\s|,|>)' ) GITHUB_HEADERS = {'Accept': 'application/vnd.github.v3.raw'} GITLAB_RE = re.compile( - r'https://gitlab\.com/(?P\S+?)/\-/blob/(?P\S+/[^\s#]+)' - r'(#L(?P\d+)([-](?P\d+))?)?($|\s)' + r'https://gitlab\.com/(?P\S+?)/\-/blob/(?P\S+/[^\s#,>]+)' + r'(#L(?P\d+)([-](?P\d+))?)?($|\s|,|>)' ) BITBUCKET_RE = re.compile( r'https://bitbucket\.org/(?P\S+?)/src/' - r'(?P\S+?)/(?P[^\s#]+)' - r'(#lines-(?P\d+)(:(?P\d+))?)?($|\s)' + r'(?P\S+?)/(?P[^\s#,>]+)' + r'(#lines-(?P\d+)(:(?P\d+))?)?($|\s|,|>)' ) -- cgit v1.2.3 From 08b793024f271de009aab2391cd85576af5313cf Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Wed, 13 Jan 2021 19:19:49 +0200 Subject: Better error reporting in _fetch_response(?) --- bot/exts/info/code_snippets.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 1d1bc2850..3469b88f4 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -3,6 +3,7 @@ import re import textwrap from urllib.parse import quote_plus +from aiohttp import ClientResponseError from discord import Message from discord.ext.commands import Cog @@ -46,13 +47,13 @@ class CodeSnippets(Cog): async def _fetch_response(self, url: str, response_format: str, **kwargs) -> str: """Makes http requests using aiohttp.""" try: - async with self.bot.http_session.get(url, **kwargs) as response: + async with self.bot.http_session.get(url, raise_for_status=True, **kwargs) as response: if response_format == 'text': return await response.text() elif response_format == 'json': return await response.json() - except Exception: - log.exception(f'Failed to fetch code snippet from {url}.') + except ClientResponseError as error: + log.error(f'Failed to fetch code snippet from {url}. HTTP Status: {error.status}. Message: {str(error)}.') def _find_ref(self, path: str, refs: tuple) -> tuple: """Loops through all branches and tags to find the required ref.""" -- cgit v1.2.3 From a3145654ab5c90d16f9b4ff53f3df40d7e35f683 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 13 Jan 2021 12:56:57 +0100 Subject: Turn update_single into a normal function The method no longer runs anything asynchronous --- bot/exts/info/doc/_cog.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 7bb819987..f008f2c28 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -78,7 +78,7 @@ class DocCog(commands.Cog): await self.bot.wait_until_guild_available() await self.refresh_inventory() - async def update_single(self, api_package_name: str, base_url: str, package: INVENTORY_DICT) -> None: + def update_single(self, api_package_name: str, base_url: str, package: INVENTORY_DICT) -> None: """ Rebuild the inventory for a single package. @@ -148,7 +148,7 @@ class DocCog(commands.Cog): return self.scheduled_inventories.discard(api_package_name) - await self.update_single(api_package_name, base_url, package) + self.update_single(api_package_name, base_url, package) def ensure_unique_symbol_name( self, @@ -357,7 +357,7 @@ class DocCog(commands.Cog): + "\n".join(f"{key}: {value}" for key, value in body.items()) ) - await self.update_single(package_name, base_url, inventory_dict) + self.update_single(package_name, base_url, inventory_dict) await ctx.send(f"Added the package `{package_name}` to the database and refreshed the inventory.") @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) -- cgit v1.2.3 From a74d7f81f258b4e70221c445b351fe646d385dd5 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 13 Jan 2021 12:57:36 +0100 Subject: Correct return type annotation --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index f008f2c28..ac74e7997 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -124,7 +124,7 @@ class DocCog(commands.Cog): api_package_name: str, base_url: str, inventory_url: str - ) -> Optional[INVENTORY_DICT]: + ) -> None: """ Update the cog's inventory, or reschedule this method to execute again if the remote inventory unreachable. -- cgit v1.2.3 From f3323503ff84b67ae2b8d4412001238937b7f684 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 13 Jan 2021 21:28:21 +0100 Subject: Use different task ids for every inventory reschedule attempts The scheduler can't keep track of multiple tasks with the same id, and rescheduling the update task using the same id within an already scheduled update task caused the new task to get ignored as the old task only got deleted from the scheduler after it was finished --- bot/exts/info/doc/_cog.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index ac74e7997..43407d5ba 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -66,7 +66,7 @@ class DocCog(commands.Cog): self.renamed_symbols = defaultdict(list) self.inventory_scheduler = Scheduler(self.__class__.__name__) - self.scheduled_inventories = set() + self.inventory_reschedule_attempts = defaultdict(int) self.refresh_event = asyncio.Event() self.refresh_event.set() @@ -134,20 +134,20 @@ class DocCog(commands.Cog): package = await fetch_inventory(inventory_url) if not package: - if inventory_url not in self.scheduled_inventories: + attempt = self.inventory_reschedule_attempts[package] + self.inventory_reschedule_attempts[package] += 1 + if attempt == 0: delay = FETCH_RESCHEDULE_DELAY.first else: delay = FETCH_RESCHEDULE_DELAY.repeated log.info(f"Failed to fetch inventory; attempting again in {delay} minutes.") self.inventory_scheduler.schedule_later( delay*60, - api_package_name, + (attempt, api_package_name), self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url) ) - self.scheduled_inventories.add(api_package_name) return - self.scheduled_inventories.discard(api_package_name) self.update_single(api_package_name, base_url, package) def ensure_unique_symbol_name( @@ -209,6 +209,7 @@ class DocCog(commands.Cog): self.refresh_event.clear() log.debug("Refreshing documentation inventory...") self.inventory_scheduler.cancel_all() + self.inventory_reschedule_attempts.clear() # Clear the old base URLS and doc symbols to ensure # that we start from a fresh local dataset. @@ -216,7 +217,6 @@ class DocCog(commands.Cog): self.base_urls.clear() self.doc_symbols.clear() self.renamed_symbols.clear() - self.scheduled_inventories.clear() await self.item_fetcher.clear() # Run all coroutines concurrently - since each of them performs an HTTP -- cgit v1.2.3 From 93ef70f7bcbb638fbdf55fb278cf16c2605db63b Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 13 Jan 2021 21:30:18 +0100 Subject: Simplify control flow Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 43407d5ba..eea380fc0 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -146,9 +146,8 @@ class DocCog(commands.Cog): (attempt, api_package_name), self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url) ) - return - - self.update_single(api_package_name, base_url, package) + else: + self.update_single(api_package_name, base_url, package) def ensure_unique_symbol_name( self, -- cgit v1.2.3 From e7b20b90efb50169aecf865168840a319037c776 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 13 Jan 2021 21:30:40 +0100 Subject: Keep trakck of the init task and cancel it when the cog is unloaded --- bot/exts/info/doc/_cog.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index eea380fc0..aa9642016 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -70,7 +70,7 @@ class DocCog(commands.Cog): self.refresh_event = asyncio.Event() self.refresh_event.set() - self.bot.loop.create_task(self.init_refresh_inventory()) + self.init_refresh_task = self.bot.loop.create_task(self.init_refresh_inventory()) @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) async def init_refresh_inventory(self) -> None: @@ -415,4 +415,5 @@ class DocCog(commands.Cog): """Clear scheduled inventories, queued symbols and cleanup task on cog unload.""" self.inventory_scheduler.cancel_all() self.item_fetcher.cleanup_futures_task.cancel() + self.init_refresh_task.cancel() asyncio.create_task(self.item_fetcher.clear()) -- cgit v1.2.3 From a4de9fe294b7626dc81ee191d2d6bce751ad91c7 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 13 Jan 2021 21:31:55 +0100 Subject: Change typehint name casing to PascalCase --- bot/converters.py | 2 +- bot/exts/info/doc/_cog.py | 4 ++-- bot/exts/info/doc/_inventory_parser.py | 11 ++++++----- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/bot/converters.py b/bot/converters.py index 6bbc22c3a..2b383636c 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -187,7 +187,7 @@ class Inventory(Converter): """ @staticmethod - async def convert(ctx: Context, url: str) -> t.Tuple[str, _inventory_parser.INVENTORY_DICT]: + async def convert(ctx: Context, url: str) -> t.Tuple[str, _inventory_parser.InventoryDict]: """Convert url to Intersphinx inventory URL.""" await ctx.trigger_typing() if (inventory := await _inventory_parser.fetch_inventory(url)) is None: diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index aa9642016..51283a67e 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -21,7 +21,7 @@ from bot.utils.messages import send_denial, wait_for_deletion from bot.utils.scheduling import Scheduler from . import PRIORITY_PACKAGES, doc_cache from ._batch_parser import BatchParser -from ._inventory_parser import INVENTORY_DICT, fetch_inventory +from ._inventory_parser import InventoryDict, fetch_inventory log = logging.getLogger(__name__) @@ -78,7 +78,7 @@ class DocCog(commands.Cog): await self.bot.wait_until_guild_available() await self.refresh_inventory() - def update_single(self, api_package_name: str, base_url: str, package: INVENTORY_DICT) -> None: + def update_single(self, api_package_name: str, base_url: str, package: InventoryDict) -> None: """ Rebuild the inventory for a single package. diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py index 886708867..1615f15bd 100644 --- a/bot/exts/info/doc/_inventory_parser.py +++ b/bot/exts/info/doc/_inventory_parser.py @@ -11,9 +11,10 @@ import bot log = logging.getLogger(__name__) FAILED_REQUEST_ATTEMPTS = 3 -INVENTORY_DICT = DefaultDict[str, List[Tuple[str, str]]] _V2_LINE_RE = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+?(\S*)\s+(.*)') +InventoryDict = DefaultDict[str, List[Tuple[str, str]]] + class ZlibStreamReader: """Class used for decoding zlib data of a stream line by line.""" @@ -43,7 +44,7 @@ class ZlibStreamReader: pos = buf.find(b'\n') -async def _load_v1(stream: aiohttp.StreamReader) -> INVENTORY_DICT: +async def _load_v1(stream: aiohttp.StreamReader) -> InventoryDict: invdata = defaultdict(list) async for line in stream: @@ -59,7 +60,7 @@ async def _load_v1(stream: aiohttp.StreamReader) -> INVENTORY_DICT: return invdata -async def _load_v2(stream: aiohttp.StreamReader) -> INVENTORY_DICT: +async def _load_v2(stream: aiohttp.StreamReader) -> InventoryDict: invdata = defaultdict(list) async for line in ZlibStreamReader(stream): @@ -72,7 +73,7 @@ async def _load_v2(stream: aiohttp.StreamReader) -> INVENTORY_DICT: return invdata -async def _fetch_inventory(url: str) -> INVENTORY_DICT: +async def _fetch_inventory(url: str) -> InventoryDict: """Fetch, parse and return an intersphinx inventory file from an url.""" timeout = aiohttp.ClientTimeout(sock_connect=5, sock_read=5) async with bot.instance.http_session.get(url, timeout=timeout, raise_for_status=True) as response: @@ -94,7 +95,7 @@ async def _fetch_inventory(url: str) -> INVENTORY_DICT: raise ValueError(f"Invalid inventory file at url {url}.") -async def fetch_inventory(url: str) -> Optional[INVENTORY_DICT]: +async def fetch_inventory(url: str) -> Optional[InventoryDict]: """ Get an inventory dict from `url`, retrying `FAILED_REQUEST_ATTEMPTS` times on errors. -- cgit v1.2.3 From d972b7800346b4d1ee88c706354bb1c18ba4b725 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 13 Jan 2021 21:33:56 +0100 Subject: Reuse the redis key instead of creating a new string for the expires set --- bot/exts/info/doc/_redis_cache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py index 52cb2bc94..cab51c3f1 100644 --- a/bot/exts/info/doc/_redis_cache.py +++ b/bot/exts/info/doc/_redis_cache.py @@ -27,8 +27,8 @@ class DocRedisCache(RedisObject): needs_expire = False with await self._get_pool_connection() as connection: - if item.package+url_key not in self._set_expires: - self._set_expires.add(item.package+url_key) + if redis_key not in self._set_expires: + self._set_expires.add(redis_key) needs_expire = not await connection.exists(redis_key) await connection.hset(redis_key, item.symbol_id, value) -- cgit v1.2.3 From 7342510667ea159fcc83927cb9caee14661c12a8 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 13 Jan 2021 23:18:33 +0100 Subject: Set the user_requested attribute at the start of the coroutine A context switch may occur when we're waiting for the web page response, during which a clear could be triggered. If the event is not set before that we could end up with the dictionary changing sizes, or if a copy was made, a future that'd never finish as it'd be cleared from the queue and the futures dict --- bot/exts/info/doc/_batch_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index ebae6efb8..4a6d9b544 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -108,6 +108,7 @@ class BatchParser: Not safe to run while `self.clear` is running. """ + self._item_futures[doc_item].user_requested = True if (symbols_to_queue := self._page_symbols.get(doc_item.url)) is not None: async with bot.instance.http_session.get(doc_item.url) as response: soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") @@ -123,7 +124,6 @@ class BatchParser: with suppress(ValueError): # If the item is not in the list then the item is already parsed or is being parsed self._move_to_front(doc_item) - self._item_futures[doc_item].user_requested = True return await self._item_futures[doc_item] async def _parse_queue(self) -> None: -- cgit v1.2.3 From 1bdfdac30d27d67d95c49b5b66a0a4de919afa21 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 14 Jan 2021 01:10:48 +0100 Subject: Ensure footer is actually max 100 chars Shortening the renamed symbols string to 100 chars is not accurate as the footer also contains a string before that, subtracting its length fixes this. --- bot/exts/info/doc/_cog.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 51283a67e..942d685af 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -262,10 +262,11 @@ class DocCog(commands.Cog): url=f"{symbol_info.url}#{symbol_info.symbol_id}", description=markdown ) - # Show all symbols with the same name that were renamed in the footer. + # Show all symbols with the same name that were renamed in the footer, + # with a max of 100 chars. if symbol in self.renamed_symbols: renamed_symbols = ', '.join(self.renamed_symbols[symbol]) - footer_text = f"Moved: {textwrap.shorten(renamed_symbols, 100, placeholder=' ...')}" + footer_text = f"Moved: {textwrap.shorten(renamed_symbols, 100-7, placeholder=' ...')}" else: footer_text = "" embed.set_footer(text=footer_text) -- cgit v1.2.3 From e86e9f921a4bbbe42a5fb6fd8486425f11af62cf Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 14 Jan 2021 05:00:22 +0100 Subject: Raise an error or log a warning if there's a global name conflict When wrapper uses a global name, which conflicts with a global name from wrapped's module that wrapped uses for its annotations, we run into a situation that can't be solved without changing one of the names, so an error is raised to give this clearer meaning. The check may be erroneous in some edge cases or the objects the conflicting names refer to can be functionally identical, so the error can be turned into a logged warning. --- bot/utils/function.py | 40 +++++++++++++++++++++++++++++++++++----- 1 file changed, 35 insertions(+), 5 deletions(-) diff --git a/bot/utils/function.py b/bot/utils/function.py index ab7f45761..4fa7a9f60 100644 --- a/bot/utils/function.py +++ b/bot/utils/function.py @@ -2,15 +2,22 @@ import functools import inspect +import logging import types import typing as t +log = logging.getLogger(__name__) + Argument = t.Union[int, str] BoundArgs = t.OrderedDict[str, t.Any] Decorator = t.Callable[[t.Callable], t.Callable] ArgValGetter = t.Callable[[BoundArgs], t.Any] +class GlobalNameConflictError(Exception): + """Raised when there's a conflict between the globals used to resolve annotations of wrapped and its wrapper.""" + + def get_arg_value(name_or_pos: Argument, arguments: BoundArgs) -> t.Any: """ Return a value from `arguments` based on a name or position. @@ -77,7 +84,12 @@ def get_bound_args(func: t.Callable, args: t.Tuple, kwargs: t.Dict[str, t.Any]) return bound_args.arguments -def update_wrapper_globals(wrapper: types.FunctionType, wrapped: types.FunctionType) -> types.FunctionType: +def update_wrapper_globals( + wrapper: types.FunctionType, + wrapped: types.FunctionType, + *, + error_on_conflict: bool = True, +) -> types.FunctionType: """ Update globals of `wrapper` with the globals from `wrapped`. @@ -88,10 +100,26 @@ def update_wrapper_globals(wrapper: types.FunctionType, wrapped: types.FunctionT This function creates a new function functionally identical to `wrapper`, which has the globals replaced with a merge of `wrapped`s globals and the `wrapper`s globals. - In case a global name from `wrapped` conflicts with a name from `wrapper`'s globals, `wrapper` will win - to keep it functional, but this may cause problems if the name is used as an annotation and - discord.py uses it as a converter on a parameter from `wrapped`. + If `error_on_conflict` is True, an exception will be raised in case `wrapper` and `wrapped` share a global name + that is used by `wrapped`'s typehints, as this can cause incorrect objects being used by discordpy's converters. + The error can be turned into a warning by setting the argument to False. """ + forwardrefs = (ann for ann in wrapped.__annotations__.values() if isinstance(ann, str)) + annotation_global_names = (ann.split(".", maxsplit=1)[0] for ann in forwardrefs) + # Conflicting globals from both functions' modules that are also used in the wrapper and in wrapped's annotations. + shared_globals = set(wrapper.__code__.co_names) & set(annotation_global_names) + shared_globals &= set(wrapped.__globals__) & set(wrapper.__globals__) + if shared_globals: + message = ( + f"wrapper and the wrapped function share the following " + f"global names used by annotations: {', '.join(shared_globals)}. " + f"Resolve the conflicts or pass error_on_conflict=False to suppress this error if this is intentional." + ) + if error_on_conflict: + raise GlobalNameConflictError(message) + else: + log.info(message) + new_globals = wrapper.__globals__.copy() new_globals.update((k, v) for k, v in wrapped.__globals__.items() if k not in wrapper.__code__.co_names) return types.FunctionType( @@ -107,11 +135,13 @@ def command_wraps( wrapped: types.FunctionType, assigned: t.Sequence[str] = functools.WRAPPER_ASSIGNMENTS, updated: t.Sequence[str] = functools.WRAPPER_UPDATES, + *, + error_on_conflict: bool = True, ) -> t.Callable[[types.FunctionType], types.FunctionType]: """Update the decorated function to look like `wrapped` and update globals for discordpy forwardref evaluation.""" def decorator(wrapper: types.FunctionType) -> types.FunctionType: return functools.update_wrapper( - update_wrapper_globals(wrapper, wrapped), wrapped, assigned, updated + update_wrapper_globals(wrapper, wrapped, error_on_conflict=error_on_conflict), wrapped, assigned, updated ) return decorator -- cgit v1.2.3 From b1250515e7d6d3545bcfd850c6286c69239cb420 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 14 Jan 2021 05:17:07 +0100 Subject: Prevent an inventory refresh while waiting for item cache If an inventory refresh was started while the symbol embed coroutine was suspended, it could cause the parser to try to fetch a non existent future if the markdown was requested after it was cleared but before new inventories were loaded in. --- bot/exts/info/doc/_cog.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 942d685af..7b9dad135 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -70,6 +70,9 @@ class DocCog(commands.Cog): self.refresh_event = asyncio.Event() self.refresh_event.set() + self.symbol_get_event = asyncio.Event() + self.symbol_get_event.set() + self.init_refresh_task = self.bot.loop.create_task(self.init_refresh_inventory()) @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) @@ -206,6 +209,7 @@ class DocCog(commands.Cog): async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" self.refresh_event.clear() + await self.symbol_get_event.wait() log.debug("Refreshing documentation inventory...") self.inventory_scheduler.cancel_all() self.inventory_reschedule_attempts.clear() @@ -248,7 +252,10 @@ class DocCog(commands.Cog): return None self.bot.stats.incr(f"doc_fetches.{symbol_info.package}") + self.symbol_get_event.clear() markdown = await doc_cache.get(symbol_info) + self.symbol_get_event.set() + if markdown is None: log.debug(f"Redis cache miss for symbol `{symbol}`.") markdown = await self.item_fetcher.get_markdown(symbol_info) -- cgit v1.2.3 From f1103aeade13f964282154d5d1597b81188ce98f Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 15 Jan 2021 23:11:57 +0100 Subject: Use a defaultdict for item futures To be able to set the attribute at the start of the coro we need to be able to access the item's future before we know about all the other items. This also saves us from having to add them all as the queue parser or get_markdown will create the futures for us dynamically --- bot/exts/info/doc/_batch_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 4a6d9b544..606c5d803 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -92,7 +92,7 @@ class BatchParser: def __init__(self): self._queue: List[QueueItem] = [] self._page_symbols: Dict[str, List[DocItem]] = defaultdict(list) - self._item_futures: Dict[DocItem, ParseResultFuture] = {} + self._item_futures: Dict[DocItem, ParseResultFuture] = defaultdict(ParseResultFuture) self._parse_task = None self.cleanup_futures_task = bot.instance.loop.create_task(self._cleanup_futures()) @@ -114,7 +114,6 @@ class BatchParser: soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") self._queue.extend(QueueItem(symbol, soup) for symbol in symbols_to_queue) - self._item_futures.update((symbol, ParseResultFuture()) for symbol in symbols_to_queue) del self._page_symbols[doc_item.url] log.debug(f"Added symbols from {doc_item.url} to parse queue.") @@ -168,6 +167,7 @@ class BatchParser: queue_item = self._queue.pop(item_index) self._queue.append(queue_item) + log.trace(f"Moved {item} to the front of the queue.") def add_item(self, doc_item: DocItem) -> None: """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" -- cgit v1.2.3 From 318a0f6c5e597c61833984cd608359c8b4e5ddf0 Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 19 Jan 2021 21:00:34 +0200 Subject: Better GitHub regex --- bot/exts/info/code_snippets.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 3469b88f4..84f606036 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -13,27 +13,27 @@ from bot.utils.messages import wait_for_deletion log = logging.getLogger(__name__) GITHUB_RE = re.compile( - r'https://github\.com/(?P\S+?)/blob/(?P\S+/[^\s#,>]+)' - r'(#L(?P\d+)([-~:]L(?P\d+))?)?($|\s|,|>)' + r'https://github\.com/(?P[a-zA-Z0-9-]+/[\w.-]+)/blob/' + r'(?P[^#>]+/{0,1})(#L(?P\d+)([-~:]L(?P\d+))?)' ) GITHUB_GIST_RE = re.compile( r'https://gist\.github\.com/([^/]+)/(?P[^\W_]+)/*' r'(?P[^\W_]*)/*#file-(?P\S+?)' - r'(-L(?P\d+)([-~:]L(?P\d+))?)?($|\s|,|>)' + r'(-L(?P\d+)([-~:]L(?P\d+))?)' ) GITHUB_HEADERS = {'Accept': 'application/vnd.github.v3.raw'} GITLAB_RE = re.compile( r'https://gitlab\.com/(?P\S+?)/\-/blob/(?P\S+/[^\s#,>]+)' - r'(#L(?P\d+)([-](?P\d+))?)?($|\s|,|>)' + r'(#L(?P\d+)([-](?P\d+))?)' ) BITBUCKET_RE = re.compile( r'https://bitbucket\.org/(?P\S+?)/src/' r'(?P\S+?)/(?P[^\s#,>]+)' - r'(#lines-(?P\d+)(:(?P\d+))?)?($|\s|,|>)' + r'(#lines-(?P\d+)(:(?P\d+))?)' ) -- cgit v1.2.3 From e9f48d83d482502a846dd8d37cee6ab4c01fdf7e Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Tue, 19 Jan 2021 21:14:19 +0200 Subject: Account for query params in bitbucket --- bot/exts/info/code_snippets.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 84f606036..75d8ac290 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -18,22 +18,21 @@ GITHUB_RE = re.compile( ) GITHUB_GIST_RE = re.compile( - r'https://gist\.github\.com/([^/]+)/(?P[^\W_]+)/*' - r'(?P[^\W_]*)/*#file-(?P\S+?)' + r'https://gist\.github\.com/([^/]+)/(?P[a-zA-Z0-9]+)/*' + r'(?P[a-zA-Z0-9-]*)/*#file-(?P[^#>]+?)' r'(-L(?P\d+)([-~:]L(?P\d+))?)' ) GITHUB_HEADERS = {'Accept': 'application/vnd.github.v3.raw'} GITLAB_RE = re.compile( - r'https://gitlab\.com/(?P\S+?)/\-/blob/(?P\S+/[^\s#,>]+)' - r'(#L(?P\d+)([-](?P\d+))?)' + r'https://gitlab\.com/(?P[a-zA-Z0-9-]+?)/\-/blob/(?P[^#>]+/{0,1})' + r'(#L(?P\d+)(-(?P\d+))?)' ) BITBUCKET_RE = re.compile( - r'https://bitbucket\.org/(?P\S+?)/src/' - r'(?P\S+?)/(?P[^\s#,>]+)' - r'(#lines-(?P\d+)(:(?P\d+))?)' + r'https://bitbucket\.org/(?P[a-zA-Z0-9-]+/[\w.-]+?)/src/(?P[0-9a-zA-Z]+?)' + r'/(?P[^#>]+?)(\?[^#>]+)?(#lines-(?P\d+)(:(?P\d+))?)' ) -- cgit v1.2.3 From 69c98d95b436063684d5d004aead85ba3b9514ef Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 21 Jan 2021 03:51:32 +0100 Subject: Use inspect.unwrap instead of manually unwrapping --- bot/exts/info/source.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py index ae68ef7e8..f03b6a46f 100644 --- a/bot/exts/info/source.py +++ b/bot/exts/info/source.py @@ -68,9 +68,7 @@ class BotSource(commands.Cog): Raise BadArgument if `source_item` is a dynamically-created object (e.g. via internal eval). """ if isinstance(source_item, commands.Command): - source_item = source_item.callback - while hasattr(source_item, "__wrapped__"): - source_item = source_item.__wrapped__ + source_item = inspect.unwrap(source_item.callback) src = source_item.__code__ filename = src.co_filename elif isinstance(source_item, str): -- cgit v1.2.3 From 72a805c779b79ef5c0aeed7a9dd4b2096e3b35c9 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 22 Jan 2021 11:35:00 +0100 Subject: Fix issues with multiple gets being suspended when a refresh starts With a normal event, if multiple gets were suspended and a refresh started, we'd continue the refresh after the first get finished and set the event which would be the same behaviour as the one it tried to fix. This is avoided by using a counter that's incremented every time a context manager is entered around an event and only setting the event when that counter reaches a zero after everything exited the context mgr --- bot/exts/info/doc/_cog.py | 10 ++++------ bot/utils/lock.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 7b9dad135..26694ae55 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -16,7 +16,7 @@ from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput from bot.converters import Inventory, PackageName, ValidURL from bot.pagination import LinePaginator -from bot.utils.lock import lock +from bot.utils.lock import SharedEvent, lock from bot.utils.messages import send_denial, wait_for_deletion from bot.utils.scheduling import Scheduler from . import PRIORITY_PACKAGES, doc_cache @@ -70,8 +70,7 @@ class DocCog(commands.Cog): self.refresh_event = asyncio.Event() self.refresh_event.set() - self.symbol_get_event = asyncio.Event() - self.symbol_get_event.set() + self.symbol_get_event = SharedEvent() self.init_refresh_task = self.bot.loop.create_task(self.init_refresh_inventory()) @@ -252,9 +251,8 @@ class DocCog(commands.Cog): return None self.bot.stats.incr(f"doc_fetches.{symbol_info.package}") - self.symbol_get_event.clear() - markdown = await doc_cache.get(symbol_info) - self.symbol_get_event.set() + with self.symbol_get_event: + markdown = await doc_cache.get(symbol_info) if markdown is None: log.debug(f"Redis cache miss for symbol `{symbol}`.") diff --git a/bot/utils/lock.py b/bot/utils/lock.py index 997c653a1..b4bb0ebc7 100644 --- a/bot/utils/lock.py +++ b/bot/utils/lock.py @@ -1,3 +1,4 @@ +import asyncio import inspect import logging import types @@ -18,6 +19,35 @@ _IdCallable = Callable[[function.BoundArgs], _IdCallableReturn] ResourceId = Union[Hashable, _IdCallable] +class SharedEvent: + """ + Context manager managing an internal event exposed through the wait coro. + + While any code is executing in this context manager, the underyling event will not be set; + when all of the holders finish the event will be set. + """ + + def __init__(self): + self._active_count = 0 + self._event = asyncio.Event() + self._event.set() + + def __enter__(self): + """Increment the count of the active holders and clear the internal event.""" + self._active_count += 1 + self._event.clear() + + def __exit__(self, _exc_type, _exc_val, _exc_tb): # noqa: ANN001 + """Decrement the count of the active holders; if 0 is reached set the internal event.""" + self._active_count -= 1 + if not self._active_count: + self._event.set() + + async def wait(self) -> None: + """Wait for all active holders to exit.""" + await self._event.wait() + + class LockGuard: """ A context manager which acquires and releases a lock (mutex). -- cgit v1.2.3 From 59ca1cbed6bcf234b9eb277da291bdaeb259e939 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 23 Jan 2021 04:36:26 +0100 Subject: Properly handle cache being cleared Previously the code deleted the entry of all of the DocItems of the page after its contents were requested once, but this caused problems when the cache was cleared when it expired. Instead of deleting the entry to check if it should be queued on the next item request, we keep it and create an entry in the _item_futures dict for all items again and check for containment there. To avoid populating the queue multiple times with the same item in some cases the futures cleanup task will now only run when the queue is empty --- bot/exts/info/doc/_batch_parser.py | 25 +++++++++++++++---------- bot/exts/info/doc/_cog.py | 4 +--- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 606c5d803..42d81e98c 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -14,6 +14,7 @@ from bs4 import BeautifulSoup import bot from bot.constants import Channels +from bot.utils.lock import lock_arg from . import doc_cache from ._parsing import get_symbol_markdown if TYPE_CHECKING: @@ -92,13 +93,14 @@ class BatchParser: def __init__(self): self._queue: List[QueueItem] = [] self._page_symbols: Dict[str, List[DocItem]] = defaultdict(list) - self._item_futures: Dict[DocItem, ParseResultFuture] = defaultdict(ParseResultFuture) + self._item_futures: Dict[DocItem, ParseResultFuture] = {} self._parse_task = None self.cleanup_futures_task = bot.instance.loop.create_task(self._cleanup_futures()) self.stale_inventory_notifier = StaleInventoryNotifier() + @lock_arg("doc.get_markdown", "doc_item", attrgetter("url"), wait=True) async def get_markdown(self, doc_item: DocItem) -> str: """ Get the result Markdown of `doc_item`. @@ -108,18 +110,20 @@ class BatchParser: Not safe to run while `self.clear` is running. """ - self._item_futures[doc_item].user_requested = True - if (symbols_to_queue := self._page_symbols.get(doc_item.url)) is not None: + if doc_item not in self._item_futures: + self._item_futures.update((symbol, ParseResultFuture()) for symbol in self._page_symbols[doc_item.url]) + self._item_futures[doc_item].user_requested = True + async with bot.instance.http_session.get(doc_item.url) as response: soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") - self._queue.extend(QueueItem(symbol, soup) for symbol in symbols_to_queue) - del self._page_symbols[doc_item.url] + self._queue.extend(QueueItem(symbol, soup) for symbol in self._page_symbols[doc_item.url]) log.debug(f"Added symbols from {doc_item.url} to parse queue.") if self._parse_task is None: self._parse_task = asyncio.create_task(self._parse_queue()) - + else: + self._item_futures[doc_item].user_requested = True with suppress(ValueError): # If the item is not in the list then the item is already parsed or is being parsed self._move_to_front(doc_item) @@ -196,8 +200,9 @@ class BatchParser: Keeping them around for longer than a second is unnecessary and keeps the parsed Markdown strings alive. """ while True: - current_time = time.time() - for key, future in self._item_futures.copy().items(): - if current_time - future.result_set_time > 5: - del self._item_futures[key] + if not self._queue: + current_time = time.time() + for key, future in self._item_futures.copy().items(): + if current_time - future.result_set_time > 5: + del self._item_futures[key] await asyncio.sleep(5) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 26694ae55..c3458d776 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -408,12 +408,10 @@ class DocCog(commands.Cog): @docs_group.command(name="cleardoccache") @commands.has_any_role(*MODERATION_ROLES) - @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) async def clear_cache_command(self, ctx: commands.Context, package_name: PackageName) -> None: """Clear the persistent redis cache for `package`.""" if await doc_cache.delete(package_name): - await self.refresh_inventory() - await ctx.send(f"Successfully cleared the cache for `{package_name}` and refreshed the inventories.") + await ctx.send(f"Successfully cleared the cache for `{package_name}`.") else: await ctx.send("No keys matching the package found.") -- cgit v1.2.3 From c2e3d6fac2ac615dea230671068790d8c9df71ba Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 23 Jan 2021 04:45:44 +0100 Subject: Create a namespace var --- bot/exts/info/doc/__init__.py | 3 ++- bot/exts/info/doc/_batch_parser.py | 4 ++-- bot/exts/info/doc/_cog.py | 10 +++++----- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/bot/exts/info/doc/__init__.py b/bot/exts/info/doc/__init__.py index 2bb43a950..38a8975c0 100644 --- a/bot/exts/info/doc/__init__.py +++ b/bot/exts/info/doc/__init__.py @@ -5,8 +5,9 @@ MAX_SIGNATURE_AMOUNT = 3 PRIORITY_PACKAGES = ( "python", ) +NAMESPACE = "doc" -doc_cache = DocRedisCache(namespace="Docs") +doc_cache = DocRedisCache(namespace=NAMESPACE) def setup(bot: Bot) -> None: diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 42d81e98c..872f08ea9 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -15,7 +15,7 @@ from bs4 import BeautifulSoup import bot from bot.constants import Channels from bot.utils.lock import lock_arg -from . import doc_cache +from . import NAMESPACE, doc_cache from ._parsing import get_symbol_markdown if TYPE_CHECKING: from ._cog import DocItem @@ -100,7 +100,7 @@ class BatchParser: self.stale_inventory_notifier = StaleInventoryNotifier() - @lock_arg("doc.get_markdown", "doc_item", attrgetter("url"), wait=True) + @lock_arg(NAMESPACE, "doc_item", attrgetter("url"), wait=True) async def get_markdown(self, doc_item: DocItem) -> str: """ Get the result Markdown of `doc_item`. diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index c3458d776..430e8ebcb 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -19,7 +19,7 @@ from bot.pagination import LinePaginator from bot.utils.lock import SharedEvent, lock from bot.utils.messages import send_denial, wait_for_deletion from bot.utils.scheduling import Scheduler -from . import PRIORITY_PACKAGES, doc_cache +from . import NAMESPACE, PRIORITY_PACKAGES, doc_cache from ._batch_parser import BatchParser from ._inventory_parser import InventoryDict, fetch_inventory @@ -74,7 +74,7 @@ class DocCog(commands.Cog): self.init_refresh_task = self.bot.loop.create_task(self.init_refresh_inventory()) - @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) + @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) async def init_refresh_inventory(self) -> None: """Refresh documentation inventory on cog initialization.""" await self.bot.wait_until_guild_available() @@ -330,7 +330,7 @@ class DocCog(commands.Cog): @docs_group.command(name='setdoc', aliases=('s',)) @commands.has_any_role(*MODERATION_ROLES) - @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) + @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) async def set_command( self, ctx: commands.Context, @@ -367,7 +367,7 @@ class DocCog(commands.Cog): @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) @commands.has_any_role(*MODERATION_ROLES) - @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) + @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) async def delete_command(self, ctx: commands.Context, package_name: PackageName) -> None: """ Removes the specified package from the database. @@ -386,7 +386,7 @@ class DocCog(commands.Cog): @docs_group.command(name="refreshdoc", aliases=("rfsh", "r")) @commands.has_any_role(*MODERATION_ROLES) - @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) + @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) async def refresh_command(self, ctx: commands.Context) -> None: """Refresh inventories and show the difference.""" old_inventories = set(self.base_urls) -- cgit v1.2.3 From 1b9aee6239aef1b0a3ce016145c1212e892f7d22 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 23 Jan 2021 05:50:34 +0100 Subject: lock markdownify version to 0.6.1 the 0.6.3 update brought a change that fails to ignore newlines in the html, introducing unnecessary lines into the output --- Pipfile | 2 +- Pipfile.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Pipfile b/Pipfile index 54bd46ce8..2e76d2ede 100644 --- a/Pipfile +++ b/Pipfile @@ -18,7 +18,7 @@ deepdiff = "~=4.0" feedparser = "~=5.2" fuzzywuzzy = "~=0.17" lxml = "~=4.4" -markdownify = "~=0.6.1" +markdownify = "==0.6.1" more_itertools = "~=8.2" python-dateutil = "~=2.8" pyyaml = "~=5.1" diff --git a/Pipfile.lock b/Pipfile.lock index 5aff33383..ec88e5530 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "33874d325a918682da3ae4d833748263695836d0cda4c1b0627ce5a5f29746e5" + "sha256": "cd61b7be63278d2f5b073e98c507aa50affe97e590bb25e37c521754e65bc110" }, "pipfile-spec": 6, "requires": { @@ -376,11 +376,11 @@ }, "markdownify": { "hashes": [ - "sha256:2147197d9c45cdd24d57302b94e01cac44988862960ac42eba730345a31aebbc", - "sha256:3de08764db001e7119cb06481de4ec0b2ea0338fd26cf49bdf16c4475ef44b81" + "sha256:31d7c13ac2ada8bfc7535a25fee6622ca720e1b5f2d4a9cbc429d167c21f886d", + "sha256:7489fd5c601536996a376c4afbcd1dd034db7690af807120681461e82fbc0acc" ], "index": "pypi", - "version": "==0.6.3" + "version": "==0.6.1" }, "more-itertools": { "hashes": [ -- cgit v1.2.3 From f0b468d9c22eea43e36cd14960c23cb2c30cb335 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 23 Jan 2021 06:08:01 +0100 Subject: Avoid errors when the first element is longer than the truncation index --- bot/exts/info/doc/_parsing.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 45a81a4cb..0251b0105 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -195,9 +195,16 @@ def _get_truncated_description( return result # Determine the actual truncation index. - # Truncate at the last Markdown element that comes before the truncation index. - markdown_truncate_index = max(cut for cut in markdown_element_ends if cut < truncate_index) - return result[:markdown_truncate_index].strip(_TRUNCATE_STRIP_CHARACTERS) + "..." + possible_truncation_indices = [cut for cut in markdown_element_ends if cut < truncate_index] + if not possible_truncation_indices: + # In case there is no Markdown element ending before the truncation index, use shorten as a fallback. + truncated_result = textwrap.shorten(result, truncate_index) + else: + # Truncate at the last Markdown element that comes before the truncation index. + markdown_truncate_index = max(possible_truncation_indices) + truncated_result = result[:markdown_truncate_index] + + return truncated_result.strip(_TRUNCATE_STRIP_CHARACTERS) + "..." def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag], url: str) -> str: -- cgit v1.2.3 From f91c1595c80fe68a4e9261ce5277f8d2e94ccfa2 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 23 Jan 2021 06:17:41 +0100 Subject: Wrap whole string in shorten This helps avoid subtracting the length of "Moved: " from the shorten index Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 430e8ebcb..a074d8daa 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -271,7 +271,7 @@ class DocCog(commands.Cog): # with a max of 100 chars. if symbol in self.renamed_symbols: renamed_symbols = ', '.join(self.renamed_symbols[symbol]) - footer_text = f"Moved: {textwrap.shorten(renamed_symbols, 100-7, placeholder=' ...')}" + footer_text = textwrap.shorten("Moved: " + renamed_symbols, 100, placeholder=' ...') else: footer_text = "" embed.set_footer(text=footer_text) -- cgit v1.2.3 From 9695d7d8022729efe8fab36eb7ef854aeece8163 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 23 Jan 2021 06:38:17 +0100 Subject: Cancel current task inside coro to not keep track of attempts manually The scheduler shields the coroutine from cancellation so we can cancel the scheduler's tasks inside of it to avoid the error from multiple tasks with the same id trying to be scheduled which the manual tracking of attempts solved Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index a074d8daa..df076f162 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -66,7 +66,6 @@ class DocCog(commands.Cog): self.renamed_symbols = defaultdict(list) self.inventory_scheduler = Scheduler(self.__class__.__name__) - self.inventory_reschedule_attempts = defaultdict(int) self.refresh_event = asyncio.Event() self.refresh_event.set() @@ -136,16 +135,15 @@ class DocCog(commands.Cog): package = await fetch_inventory(inventory_url) if not package: - attempt = self.inventory_reschedule_attempts[package] - self.inventory_reschedule_attempts[package] += 1 - if attempt == 0: - delay = FETCH_RESCHEDULE_DELAY.first - else: + if api_package_name in self.inventory_scheduler: + self.inventory_scheduler.cancel(api_package_name) delay = FETCH_RESCHEDULE_DELAY.repeated + else: + delay = FETCH_RESCHEDULE_DELAY.first log.info(f"Failed to fetch inventory; attempting again in {delay} minutes.") self.inventory_scheduler.schedule_later( delay*60, - (attempt, api_package_name), + api_package_name, self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url) ) else: @@ -211,7 +209,6 @@ class DocCog(commands.Cog): await self.symbol_get_event.wait() log.debug("Refreshing documentation inventory...") self.inventory_scheduler.cancel_all() - self.inventory_reschedule_attempts.clear() # Clear the old base URLS and doc symbols to ensure # that we start from a fresh local dataset. -- cgit v1.2.3 From 87facef69acfaa1d8b69b5a03bfabc9582aa1ace Mon Sep 17 00:00:00 2001 From: Andi Qu <31325319+dolphingarlic@users.noreply.github.com> Date: Sun, 24 Jan 2021 19:57:26 +0200 Subject: More restrictive GitHub gist regex for usernames --- bot/exts/info/code_snippets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 75d8ac290..e1b2079d0 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -18,7 +18,7 @@ GITHUB_RE = re.compile( ) GITHUB_GIST_RE = re.compile( - r'https://gist\.github\.com/([^/]+)/(?P[a-zA-Z0-9]+)/*' + r'https://gist\.github\.com/([a-zA-Z0-9-]+)/(?P[a-zA-Z0-9]+)/*' r'(?P[a-zA-Z0-9-]*)/*#file-(?P[^#>]+?)' r'(-L(?P\d+)([-~:]L(?P\d+))?)' ) -- cgit v1.2.3 From 69a87371aeaf815cea71d5b44a7b6a824f7fa5ed Mon Sep 17 00:00:00 2001 From: Andi Qu <31325319+dolphingarlic@users.noreply.github.com> Date: Sun, 24 Jan 2021 19:58:36 +0200 Subject: Don't match dashes in GitHub gist revisions Gist revisions don't allow dashes oops --- bot/exts/info/code_snippets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index e1b2079d0..44f11cdbd 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -19,7 +19,7 @@ GITHUB_RE = re.compile( GITHUB_GIST_RE = re.compile( r'https://gist\.github\.com/([a-zA-Z0-9-]+)/(?P[a-zA-Z0-9]+)/*' - r'(?P[a-zA-Z0-9-]*)/*#file-(?P[^#>]+?)' + r'(?P[a-zA-Z0-9]*)/*#file-(?P[^#>]+?)' r'(-L(?P\d+)([-~:]L(?P\d+))?)' ) -- cgit v1.2.3 From ae5e1c64983431e1bcac1fc9a50255fdc32777ee Mon Sep 17 00:00:00 2001 From: Andi Qu <31325319+dolphingarlic@users.noreply.github.com> Date: Sun, 24 Jan 2021 19:59:49 +0200 Subject: Add matching for query params to all the regexes --- bot/exts/info/code_snippets.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 44f11cdbd..3f943aea8 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -14,12 +14,12 @@ log = logging.getLogger(__name__) GITHUB_RE = re.compile( r'https://github\.com/(?P[a-zA-Z0-9-]+/[\w.-]+)/blob/' - r'(?P[^#>]+/{0,1})(#L(?P\d+)([-~:]L(?P\d+))?)' + r'(?P[^#>]+/{0,1})(\?[^#>]+)?(#L(?P\d+)([-~:]L(?P\d+))?)' ) GITHUB_GIST_RE = re.compile( r'https://gist\.github\.com/([a-zA-Z0-9-]+)/(?P[a-zA-Z0-9]+)/*' - r'(?P[a-zA-Z0-9]*)/*#file-(?P[^#>]+?)' + r'(?P[a-zA-Z0-9]*)/*#file-(?P[^#>]+?)(\?[^#>]+)?' r'(-L(?P\d+)([-~:]L(?P\d+))?)' ) @@ -27,7 +27,7 @@ GITHUB_HEADERS = {'Accept': 'application/vnd.github.v3.raw'} GITLAB_RE = re.compile( r'https://gitlab\.com/(?P[a-zA-Z0-9-]+?)/\-/blob/(?P[^#>]+/{0,1})' - r'(#L(?P\d+)(-(?P\d+))?)' + r'(\?[^#>]+)?(#L(?P\d+)(-(?P\d+))?)' ) BITBUCKET_RE = re.compile( -- cgit v1.2.3 From 64596679aeed67a0bfdb645ade5065af129c8c56 Mon Sep 17 00:00:00 2001 From: Andi Qu <31325319+dolphingarlic@users.noreply.github.com> Date: Sun, 24 Jan 2021 20:06:20 +0200 Subject: Match both username *and* repo in the GitLab regex --- bot/exts/info/code_snippets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 3f943aea8..e825ec513 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -26,7 +26,7 @@ GITHUB_GIST_RE = re.compile( GITHUB_HEADERS = {'Accept': 'application/vnd.github.v3.raw'} GITLAB_RE = re.compile( - r'https://gitlab\.com/(?P[a-zA-Z0-9-]+?)/\-/blob/(?P[^#>]+/{0,1})' + r'https://gitlab\.com/(?P[\w.-]+/[\w.-]+)/\-/blob/(?P[^#>]+/{0,1})' r'(\?[^#>]+)?(#L(?P\d+)(-(?P\d+))?)' ) -- cgit v1.2.3 From f23c2e78fb9ac6e6c2f7faeaeaf652c89ad8c263 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Tue, 26 Jan 2021 18:06:32 +0200 Subject: Make the cog update even if write to DB fails The defcon cog should be functional even if there is some issue with writing to the DB for some reason. The functionality should have retention across restarts, but it shouldn't be its failing point. If necessary, it should be able to work with no DB at all --- bot/exts/moderation/defcon.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 4b25c36df..00b108feb 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -157,13 +157,13 @@ class Defcon(Cog): } ) - self.days = timedelta(days=days) - self.update_notifier() - except Exception as err: log.exception("Unable to update DEFCON settings.") error = err finally: + self.days = timedelta(days=days) + self.update_notifier() + await ctx.send(self.build_defcon_msg(action, error)) await self.send_defcon_log(action, ctx.author, error) -- cgit v1.2.3 From f2c8e29f79c19cbef0d0477b668d30aca5efb099 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Tue, 26 Jan 2021 19:36:35 +0200 Subject: Moved self.enabled update to _defcon_action --- bot/exts/moderation/defcon.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 00b108feb..f34f8fa28 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -44,13 +44,11 @@ class Action(Enum): class Defcon(Cog): """Time-sensitive server defense mechanisms.""" - days = None # type: timedelta - enabled = False # type: bool - def __init__(self, bot: Bot): self.bot = bot self.channel = None self.days = timedelta(days=0) + self.enabled = False self.bot.loop.create_task(self.sync_settings()) @@ -142,6 +140,9 @@ class Defcon(Cog): except Exception: pass + self.days = timedelta(days=days) + self.enabled = action != Action.DISABLED + error = None try: await self.bot.api_client.put( @@ -150,8 +151,8 @@ class Defcon(Cog): 'name': 'defcon', 'data': { # TODO: retrieve old days count - 'days': days, - 'enabled': action is not Action.DISABLED, + 'days': self.days.days, + 'enabled': self.enabled, 'enable_date': datetime.now().isoformat() } } @@ -161,7 +162,6 @@ class Defcon(Cog): log.exception("Unable to update DEFCON settings.") error = err finally: - self.days = timedelta(days=days) self.update_notifier() await ctx.send(self.build_defcon_msg(action, error)) @@ -178,7 +178,6 @@ class Defcon(Cog): Currently, this just adds an account age requirement. Use !defcon days to set how old an account must be, in days. """ - self.enabled = True await self._defcon_action(ctx, days=0, action=Action.ENABLED) await self.update_channel_topic() @@ -186,7 +185,6 @@ class Defcon(Cog): @has_any_role(*MODERATION_ROLES) async def disable_command(self, ctx: Context) -> None: """Disable DEFCON mode. Useful in a pinch, but be sure you know what you're doing!""" - self.enabled = False await self._defcon_action(ctx, days=0, action=Action.DISABLED) await self.update_channel_topic() @@ -206,7 +204,6 @@ class Defcon(Cog): @has_any_role(*MODERATION_ROLES) async def days_command(self, ctx: Context, days: int) -> None: """Set how old an account must be to join the server, in days, with DEFCON mode enabled.""" - self.enabled = True await self._defcon_action(ctx, days=days, action=Action.UPDATED) await self.update_channel_topic() -- cgit v1.2.3 From 76574adda0e4a033b93b976278904d796ef055aa Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Tue, 26 Jan 2021 19:41:20 +0200 Subject: Moved channel topic change to _defcon_action --- bot/exts/moderation/defcon.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index f34f8fa28..1e88a8d9c 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -166,6 +166,7 @@ class Defcon(Cog): await ctx.send(self.build_defcon_msg(action, error)) await self.send_defcon_log(action, ctx.author, error) + await self.update_channel_topic() self.bot.stats.gauge("defcon.threshold", days) @@ -179,14 +180,12 @@ class Defcon(Cog): in days. """ await self._defcon_action(ctx, days=0, action=Action.ENABLED) - await self.update_channel_topic() @defcon_group.command(name='disable', aliases=('off', 'd'), root_aliases=("defoff",)) @has_any_role(*MODERATION_ROLES) async def disable_command(self, ctx: Context) -> None: """Disable DEFCON mode. Useful in a pinch, but be sure you know what you're doing!""" await self._defcon_action(ctx, days=0, action=Action.DISABLED) - await self.update_channel_topic() @defcon_group.command(name='status', aliases=('s',)) @has_any_role(*MODERATION_ROLES) @@ -205,7 +204,6 @@ class Defcon(Cog): async def days_command(self, ctx: Context, days: int) -> None: """Set how old an account must be to join the server, in days, with DEFCON mode enabled.""" await self._defcon_action(ctx, days=days, action=Action.UPDATED) - await self.update_channel_topic() async def update_channel_topic(self) -> None: """Update the #defcon channel topic with the current DEFCON status.""" -- cgit v1.2.3 From e3949433fc87cd58f1e0645756bd0d8de60798ee Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Tue, 26 Jan 2021 20:03:45 +0200 Subject: Added cog unloader to cancel notifier --- bot/exts/moderation/defcon.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 1e88a8d9c..a180d7aae 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -59,7 +59,10 @@ class Defcon(Cog): async def sync_settings(self) -> None: """On cog load, try to synchronize DEFCON settings to the API.""" + log.trace("Waiting for the guild to become available before syncing.") await self.bot.wait_until_guild_available() + + log.trace("Syncing settings.") self.channel = await self.bot.fetch_channel(Channels.defcon) try: @@ -268,6 +271,11 @@ class Defcon(Cog): """Routinely notify moderators that DEFCON is active.""" await self.channel.send(f"Defcon is on and is set to {self.days.days} day{ngettext('', 's', self.days.days)}.") + def cog_unload(self) -> None: + """Cancel the notifer task when the cog unloads.""" + log.trace("Cog unload: canceling defcon notifier task.") + self.defcon_notifier.cancel() + def setup(bot: Bot) -> None: """Load the Defcon cog.""" -- cgit v1.2.3 From aeaebbd9a49afa5e53070afc5498ad5a25cad6fe Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Tue, 26 Jan 2021 21:51:02 +0200 Subject: Defon doesn't reset the number of days --- bot/exts/moderation/defcon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index a180d7aae..e0baab099 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -182,7 +182,7 @@ class Defcon(Cog): Currently, this just adds an account age requirement. Use !defcon days to set how old an account must be, in days. """ - await self._defcon_action(ctx, days=0, action=Action.ENABLED) + await self._defcon_action(ctx, days=self.days, action=Action.ENABLED) @defcon_group.command(name='disable', aliases=('off', 'd'), root_aliases=("defoff",)) @has_any_role(*MODERATION_ROLES) -- cgit v1.2.3 From 82f1c37cff8213963d7950240bc770bec63472dc Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 30 Jan 2021 10:06:02 +0100 Subject: Require a set of names to ignore instead of a blanket ignoring As work is done on the modules the wrapper and wrapped functions are in more conflicts can occur, previously this could be missed as the info log that was done in case the error was suppressed was done when modules were being initialized during which there is a logging spam. --- bot/utils/function.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/bot/utils/function.py b/bot/utils/function.py index 4fa7a9f60..9bc44e753 100644 --- a/bot/utils/function.py +++ b/bot/utils/function.py @@ -88,7 +88,7 @@ def update_wrapper_globals( wrapper: types.FunctionType, wrapped: types.FunctionType, *, - error_on_conflict: bool = True, + ignored_conflict_names: t.Set[str] = frozenset(), ) -> types.FunctionType: """ Update globals of `wrapper` with the globals from `wrapped`. @@ -100,25 +100,22 @@ def update_wrapper_globals( This function creates a new function functionally identical to `wrapper`, which has the globals replaced with a merge of `wrapped`s globals and the `wrapper`s globals. - If `error_on_conflict` is True, an exception will be raised in case `wrapper` and `wrapped` share a global name - that is used by `wrapped`'s typehints, as this can cause incorrect objects being used by discordpy's converters. - The error can be turned into a warning by setting the argument to False. + An exception will be raised in case `wrapper` and `wrapped` share a global name that is used by + `wrapped`'s typehints and is not in `ignored_conflict_names`, + as this can cause incorrect objects being used by discordpy's converters. """ - forwardrefs = (ann for ann in wrapped.__annotations__.values() if isinstance(ann, str)) - annotation_global_names = (ann.split(".", maxsplit=1)[0] for ann in forwardrefs) + annotation_global_names = ( + ann.split(".", maxsplit=1)[0] for ann in wrapped.__annotations__.values() if isinstance(ann, str) + ) # Conflicting globals from both functions' modules that are also used in the wrapper and in wrapped's annotations. shared_globals = set(wrapper.__code__.co_names) & set(annotation_global_names) - shared_globals &= set(wrapped.__globals__) & set(wrapper.__globals__) + shared_globals &= set(wrapped.__globals__) & set(wrapper.__globals__) - ignored_conflict_names if shared_globals: - message = ( + raise GlobalNameConflictError( f"wrapper and the wrapped function share the following " - f"global names used by annotations: {', '.join(shared_globals)}. " - f"Resolve the conflicts or pass error_on_conflict=False to suppress this error if this is intentional." + f"global names used by annotations: {', '.join(shared_globals)}. Resolve the conflicts or add " + f"the name to the `ignored_conflict_names` set to suppress this error if this is intentional." ) - if error_on_conflict: - raise GlobalNameConflictError(message) - else: - log.info(message) new_globals = wrapper.__globals__.copy() new_globals.update((k, v) for k, v in wrapped.__globals__.items() if k not in wrapper.__code__.co_names) @@ -136,12 +133,15 @@ def command_wraps( assigned: t.Sequence[str] = functools.WRAPPER_ASSIGNMENTS, updated: t.Sequence[str] = functools.WRAPPER_UPDATES, *, - error_on_conflict: bool = True, + ignored_conflict_names: t.Set[str] = frozenset(), ) -> t.Callable[[types.FunctionType], types.FunctionType]: """Update the decorated function to look like `wrapped` and update globals for discordpy forwardref evaluation.""" def decorator(wrapper: types.FunctionType) -> types.FunctionType: return functools.update_wrapper( - update_wrapper_globals(wrapper, wrapped, error_on_conflict=error_on_conflict), wrapped, assigned, updated + update_wrapper_globals(wrapper, wrapped, ignored_conflict_names=ignored_conflict_names), + wrapped, + assigned, + updated, ) return decorator -- cgit v1.2.3 From 4dee6d3c4e18144b35011fc4441738a82fcb522b Mon Sep 17 00:00:00 2001 From: Andi Qu <31325319+dolphingarlic@users.noreply.github.com> Date: Sat, 30 Jan 2021 11:43:14 +0200 Subject: Got rid of unnecessary regex matching things Stuff like `/{0,1}` and `?` at the ends of groups --- bot/exts/info/code_snippets.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index e825ec513..4c8de05fc 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -14,7 +14,7 @@ log = logging.getLogger(__name__) GITHUB_RE = re.compile( r'https://github\.com/(?P[a-zA-Z0-9-]+/[\w.-]+)/blob/' - r'(?P[^#>]+/{0,1})(\?[^#>]+)?(#L(?P\d+)([-~:]L(?P\d+))?)' + r'(?P[^#>]+)(\?[^#>]+)?(#L(?P\d+)([-~:]L(?P\d+))?)' ) GITHUB_GIST_RE = re.compile( @@ -26,13 +26,13 @@ GITHUB_GIST_RE = re.compile( GITHUB_HEADERS = {'Accept': 'application/vnd.github.v3.raw'} GITLAB_RE = re.compile( - r'https://gitlab\.com/(?P[\w.-]+/[\w.-]+)/\-/blob/(?P[^#>]+/{0,1})' + r'https://gitlab\.com/(?P[\w.-]+/[\w.-]+)/\-/blob/(?P[^#>]+)' r'(\?[^#>]+)?(#L(?P\d+)(-(?P\d+))?)' ) BITBUCKET_RE = re.compile( - r'https://bitbucket\.org/(?P[a-zA-Z0-9-]+/[\w.-]+?)/src/(?P[0-9a-zA-Z]+?)' - r'/(?P[^#>]+?)(\?[^#>]+)?(#lines-(?P\d+)(:(?P\d+))?)' + r'https://bitbucket\.org/(?P[a-zA-Z0-9-]+/[\w.-]+)/src/(?P[0-9a-zA-Z]+)' + r'/(?P[^#>]+)(\?[^#>]+)?(#lines-(?P\d+)(:(?P\d+))?)' ) -- cgit v1.2.3 From 25702f7d44eefbdb3d727b39bc0752e042320d8d Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Sat, 30 Jan 2021 22:53:52 +0200 Subject: Use the GitLab API for GitLab snippets --- bot/exts/info/code_snippets.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 4c8de05fc..e149b5637 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -127,8 +127,11 @@ class CodeSnippets(Cog): enc_repo = quote_plus(repo) # Searches the GitLab API for the specified branch - branches = await self._fetch_response(f'https://api.github.com/repos/{repo}/branches', 'json') - tags = await self._fetch_response(f'https://api.github.com/repos/{repo}/tags', 'json') + branches = await self._fetch_response( + f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/branches', + 'json' + ) + tags = await self._fetch_response(f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/tags', 'json') refs = branches + tags ref, file_path = self._find_ref(path, refs) enc_ref = quote_plus(ref) -- cgit v1.2.3 From 7b27971c7d2cda0ebea091af76314f11bd6d0ba7 Mon Sep 17 00:00:00 2001 From: Andi Qu Date: Sat, 30 Jan 2021 22:56:25 +0200 Subject: Fixed syntax error with wait_for_deletion --- bot/exts/info/code_snippets.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index e149b5637..f0cd54c0c 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -234,8 +234,7 @@ class CodeSnippets(Cog): await message.edit(suppress=True) await wait_for_deletion( await message.channel.send(message_to_send), - (message.author.id,), - client=self.bot + (message.author.id,) ) -- cgit v1.2.3 From 085f8ac801316c6d8ad91a3b63b3e755c3a6aea3 Mon Sep 17 00:00:00 2001 From: Chris Date: Sun, 31 Jan 2021 15:32:30 +0000 Subject: return true when tag is on cd --- bot/exts/info/tags.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py index 00b4d1a78..e0556ee30 100644 --- a/bot/exts/info/tags.py +++ b/bot/exts/info/tags.py @@ -189,7 +189,7 @@ class Tags(Cog): If a tag is not specified, display a paginated embed of all tags. Tags are on cooldowns on a per-tag, per-channel basis. If a tag is on cooldown, display - nothing and return False. + nothing and return True. """ def _command_on_cooldown(tag_name: str) -> bool: """ @@ -217,7 +217,7 @@ class Tags(Cog): f"{ctx.author} tried to get the '{tag_name}' tag, but the tag is on cooldown. " f"Cooldown ends in {time_left:.1f} seconds." ) - return False + return True if tag_name is not None: temp_founds = self._get_tag(tag_name) @@ -285,7 +285,8 @@ class Tags(Cog): """ Get a specified tag, or a list of all tags if no tag is specified. - Returns False if a tag is on cooldown, or if no matches are found. + Returns True if something can be send, or the tag is on cooldown + Returns False if no matches are found. """ return await self.display_tag(ctx, tag_name) -- cgit v1.2.3 From cc23ad61e3a9d33ff835c04c2c8d0ddcb45de736 Mon Sep 17 00:00:00 2001 From: Chris Date: Sun, 31 Jan 2021 15:35:56 +0000 Subject: Grammar --- bot/exts/info/tags.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py index e0556ee30..bb91a8563 100644 --- a/bot/exts/info/tags.py +++ b/bot/exts/info/tags.py @@ -285,7 +285,7 @@ class Tags(Cog): """ Get a specified tag, or a list of all tags if no tag is specified. - Returns True if something can be send, or the tag is on cooldown + Returns True if something can be sent, or if the tag is on cooldown. Returns False if no matches are found. """ return await self.display_tag(ctx, tag_name) -- cgit v1.2.3 From 01ad92b2d3c4a3679f86ac8889736fa873e00ae4 Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Sun, 31 Jan 2021 17:35:30 +0100 Subject: Re created file from last point --- bot/resources/tags/empty-json.md | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 bot/resources/tags/empty-json.md diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md new file mode 100644 index 000000000..a54d3d71a --- /dev/null +++ b/bot/resources/tags/empty-json.md @@ -0,0 +1,27 @@ +When creating a new JSON file you might run into the following error. + +`JSONDecodeError: Expecting value: line 1 column 1 (char 0)` + +In short this error means your JSON is invalid in it's current state. +A JSON may never be completely empty and must always at least have one of the following items. + +``` +object +array +string +number +"true" +"false" +"null" +``` + +To resolve this issue, you create one of the above values in your JSON. It is very common to use `{}` to make an object. Adding the following to your JSON should resolve this issue. + +```json +{ + + +} +``` + +Make sure to put all your data between the `{}`, just like you would when making a dictionary. \ No newline at end of file -- cgit v1.2.3 From 5a6f77fde58f024ea151adfdc6a5745eeb0046cd Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Sun, 31 Jan 2021 17:41:06 +0100 Subject: name fix, added suggestions from previous PR --- bot/resources/tags/empty-json.md | 27 --------------------------- bot/resources/tags/empty_json.md | 24 ++++++++++++++++++++++++ 2 files changed, 24 insertions(+), 27 deletions(-) delete mode 100644 bot/resources/tags/empty-json.md create mode 100644 bot/resources/tags/empty_json.md diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md deleted file mode 100644 index a54d3d71a..000000000 --- a/bot/resources/tags/empty-json.md +++ /dev/null @@ -1,27 +0,0 @@ -When creating a new JSON file you might run into the following error. - -`JSONDecodeError: Expecting value: line 1 column 1 (char 0)` - -In short this error means your JSON is invalid in it's current state. -A JSON may never be completely empty and must always at least have one of the following items. - -``` -object -array -string -number -"true" -"false" -"null" -``` - -To resolve this issue, you create one of the above values in your JSON. It is very common to use `{}` to make an object. Adding the following to your JSON should resolve this issue. - -```json -{ - - -} -``` - -Make sure to put all your data between the `{}`, just like you would when making a dictionary. \ No newline at end of file diff --git a/bot/resources/tags/empty_json.md b/bot/resources/tags/empty_json.md new file mode 100644 index 000000000..36511abb6 --- /dev/null +++ b/bot/resources/tags/empty_json.md @@ -0,0 +1,24 @@ +When creating a new JSON file you might run into the following error. + +`JSONDecodeError: Expecting value: line 1 column 1 (char 0)` + +In short, this means that your JSON is invalid in its current state. This could very well happen because the file is just new and empty. +A JSON may never be completely empty. It is recommended to have at least one of the following in your json: + +``` +object +array +``` + +To resolve this issue, you create one of the above values in your JSON. It is very common to use `{}` to make an object, which is similar to a dictionary in python. +When this is added to your JSON, it will look like this: + +```json +{ + + +} +``` + +The error is resolved now. +Make sure to put all your data between the `{}`, just like you would when making a dictionary. \ No newline at end of file -- cgit v1.2.3 From 4704344807bf56a544ddeb8cdc592bcb69675cf6 Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Sun, 31 Jan 2021 17:48:09 +0100 Subject: Fixed EOF --- bot/resources/tags/empty_json.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/resources/tags/empty_json.md b/bot/resources/tags/empty_json.md index 36511abb6..8fb7c4e23 100644 --- a/bot/resources/tags/empty_json.md +++ b/bot/resources/tags/empty_json.md @@ -21,4 +21,4 @@ When this is added to your JSON, it will look like this: ``` The error is resolved now. -Make sure to put all your data between the `{}`, just like you would when making a dictionary. \ No newline at end of file +Make sure to put all your data between the `{}`, just like you would when making a dictionary. -- cgit v1.2.3 From 9931ebc20ff3dcda11cd2bca338c3a798e6f6b17 Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Mon, 1 Feb 2021 18:29:48 +0100 Subject: Removed extra whitespace line in last example --- bot/resources/tags/empty_json.md | 1 - 1 file changed, 1 deletion(-) diff --git a/bot/resources/tags/empty_json.md b/bot/resources/tags/empty_json.md index 8fb7c4e23..d5e0f843f 100644 --- a/bot/resources/tags/empty_json.md +++ b/bot/resources/tags/empty_json.md @@ -16,7 +16,6 @@ When this is added to your JSON, it will look like this: ```json { - } ``` -- cgit v1.2.3 From dc8a5d4084e124f9a7d6e0d31658b6eb0637bccc Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Mon, 1 Feb 2021 18:41:09 +0100 Subject: Changed some wording --- bot/resources/tags/empty_json.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/resources/tags/empty_json.md b/bot/resources/tags/empty_json.md index d5e0f843f..45df3fd54 100644 --- a/bot/resources/tags/empty_json.md +++ b/bot/resources/tags/empty_json.md @@ -2,16 +2,16 @@ When creating a new JSON file you might run into the following error. `JSONDecodeError: Expecting value: line 1 column 1 (char 0)` -In short, this means that your JSON is invalid in its current state. This could very well happen because the file is just new and empty. -A JSON may never be completely empty. It is recommended to have at least one of the following in your json: +In short, this means that your JSON is invalid in its current state. This could very well happen because the file is just new and completely empty. +Whilst the JSON data may be empty, the .json file must not. It is recommended to have at least one of the following data types in your .json file: ``` object array ``` -To resolve this issue, you create one of the above values in your JSON. It is very common to use `{}` to make an object, which is similar to a dictionary in python. -When this is added to your JSON, it will look like this: +To resolve this issue, you create one of the above data types in your .json file. It is very common to use `{}` to make an object, which works similar to a dictionary in python. +When this is added to your .json file, it will look like this: ```json { -- cgit v1.2.3 From 5040129e8b32ace05d8b391c1753a96769555bab Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Mon, 1 Feb 2021 18:42:46 +0100 Subject: added some more clarification --- bot/resources/tags/empty_json.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/resources/tags/empty_json.md b/bot/resources/tags/empty_json.md index 45df3fd54..eaeafeb18 100644 --- a/bot/resources/tags/empty_json.md +++ b/bot/resources/tags/empty_json.md @@ -3,7 +3,7 @@ When creating a new JSON file you might run into the following error. `JSONDecodeError: Expecting value: line 1 column 1 (char 0)` In short, this means that your JSON is invalid in its current state. This could very well happen because the file is just new and completely empty. -Whilst the JSON data may be empty, the .json file must not. It is recommended to have at least one of the following data types in your .json file: +Whilst the JSON data, the data you wish to store, may be empty, the .json file must not. It is recommended to have at least one of the following data types in your .json file: ``` object -- cgit v1.2.3 From 7b4833ed11f96d1e8bc26ec3997ee42956dca230 Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Mon, 1 Feb 2021 18:45:15 +0100 Subject: suggestion for: more friendly/less personal suggestion. --- bot/resources/tags/empty_json.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/resources/tags/empty_json.md b/bot/resources/tags/empty_json.md index eaeafeb18..9e5c5fd4f 100644 --- a/bot/resources/tags/empty_json.md +++ b/bot/resources/tags/empty_json.md @@ -3,14 +3,14 @@ When creating a new JSON file you might run into the following error. `JSONDecodeError: Expecting value: line 1 column 1 (char 0)` In short, this means that your JSON is invalid in its current state. This could very well happen because the file is just new and completely empty. -Whilst the JSON data, the data you wish to store, may be empty, the .json file must not. It is recommended to have at least one of the following data types in your .json file: +Whilst the JSON data, the data you wish to store, may be empty, the .json file must not. You most likely want to use one of the following data types in your .json file: ``` object array ``` -To resolve this issue, you create one of the above data types in your .json file. It is very common to use `{}` to make an object, which works similar to a dictionary in python. +To resolve this issue, create one of the above data types in your .json file. It is very common to use `{}` to make an object, which works similar to a dictionary in python. When this is added to your .json file, it will look like this: ```json -- cgit v1.2.3 From aace6002d8587b5a79c1ba456ac045a7351152dd Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 2 Feb 2021 21:53:44 +0100 Subject: Attempt to resolve first part of the argument in case of a failed lookup --- bot/exts/info/doc/_cog.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index df076f162..16baa6320 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -244,8 +244,18 @@ class DocCog(commands.Cog): symbol_info = self.doc_symbols.get(symbol) if symbol_info is None: - log.debug("Symbol does not exist.") - return None + if symbol.count(" "): + # If an invalid symbol contains a space, check if the command was invoked + # in the format !d + symbol = symbol.split(" ", maxsplit=1)[0] + symbol_info = self.doc_symbols.get(symbol) + if symbol_info is None: + log.debug("Symbol does not exist.") + return None + else: + log.debug("Symbol does not exist.") + return None + self.bot.stats.incr(f"doc_fetches.{symbol_info.package}") with self.symbol_get_event: -- cgit v1.2.3 From fd67924e9fb606626800bca81c2dd159cbf5c93b Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 3 Feb 2021 10:19:52 +0100 Subject: Import module directly to avoid circular dependency and use of type --- bot/exts/info/doc/_batch_parser.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 872f08ea9..072545e66 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -7,7 +7,7 @@ from collections import defaultdict from contextlib import suppress from functools import partial from operator import attrgetter -from typing import Dict, List, NamedTuple, TYPE_CHECKING, Union +from typing import Dict, List, NamedTuple, Union import discord from bs4 import BeautifulSoup @@ -15,10 +15,8 @@ from bs4 import BeautifulSoup import bot from bot.constants import Channels from bot.utils.lock import lock_arg -from . import NAMESPACE, doc_cache +from . import NAMESPACE, _cog, doc_cache from ._parsing import get_symbol_markdown -if TYPE_CHECKING: - from ._cog import DocItem log = logging.getLogger(__name__) @@ -35,7 +33,7 @@ class StaleInventoryNotifier: await bot.instance.wait_until_guild_available() self._dev_log = bot.instance.get_channel(Channels.dev_log) - async def send_warning(self, item: DocItem) -> None: + async def send_warning(self, item: _cog.DocItem) -> None: """Send a warning to dev log is one wasn't already sent for `item`'s url.""" if item.url not in self._warned_urls: self._warned_urls.add(item.url) @@ -50,11 +48,11 @@ class StaleInventoryNotifier: class QueueItem(NamedTuple): """Contains a symbol and the BeautifulSoup object needed to parse it.""" - symbol: DocItem + symbol: _cog.DocItem soup: BeautifulSoup - def __eq__(self, other: Union[QueueItem, DocItem]): - if isinstance(other, type(self.symbol)): + def __eq__(self, other: Union[QueueItem, _cog.DocItem]): + if isinstance(other, _cog.DocItem): return self.symbol == other return NamedTuple.__eq__(self, other) @@ -92,8 +90,8 @@ class BatchParser: def __init__(self): self._queue: List[QueueItem] = [] - self._page_symbols: Dict[str, List[DocItem]] = defaultdict(list) - self._item_futures: Dict[DocItem, ParseResultFuture] = {} + self._page_symbols: Dict[str, List[_cog.DocItem]] = defaultdict(list) + self._item_futures: Dict[_cog.DocItem, ParseResultFuture] = {} self._parse_task = None self.cleanup_futures_task = bot.instance.loop.create_task(self._cleanup_futures()) @@ -101,7 +99,7 @@ class BatchParser: self.stale_inventory_notifier = StaleInventoryNotifier() @lock_arg(NAMESPACE, "doc_item", attrgetter("url"), wait=True) - async def get_markdown(self, doc_item: DocItem) -> str: + async def get_markdown(self, doc_item: _cog.DocItem) -> str: """ Get the result Markdown of `doc_item`. @@ -163,7 +161,7 @@ class BatchParser: self._parse_task = None log.trace("Finished parsing queue.") - def _move_to_front(self, item: Union[QueueItem, DocItem]) -> None: + def _move_to_front(self, item: Union[QueueItem, _cog.DocItem]) -> None: """Move `item` to the front of the parse queue.""" # The parse queue stores soups along with the doc symbols in QueueItem objects, # in case we're moving a DocItem we have to get the associated QueueItem first and then move it. @@ -173,7 +171,7 @@ class BatchParser: self._queue.append(queue_item) log.trace(f"Moved {item} to the front of the queue.") - def add_item(self, doc_item: DocItem) -> None: + def add_item(self, doc_item: _cog.DocItem) -> None: """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" self._page_symbols[doc_item.url].append(doc_item) -- cgit v1.2.3 From 1df9459c2c9c33f3e6dbcbd1b76415610aade192 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 3 Feb 2021 11:49:53 +0100 Subject: Use more consistent naming for doc items and symbols Trailing commas were also added where missing --- bot/exts/info/doc/_batch_parser.py | 32 +++++------ bot/exts/info/doc/_cog.py | 108 ++++++++++++++++++------------------- bot/exts/info/doc/_parsing.py | 2 +- 3 files changed, 71 insertions(+), 71 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 072545e66..2edf05ff0 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -33,27 +33,27 @@ class StaleInventoryNotifier: await bot.instance.wait_until_guild_available() self._dev_log = bot.instance.get_channel(Channels.dev_log) - async def send_warning(self, item: _cog.DocItem) -> None: + async def send_warning(self, doc_item: _cog.DocItem) -> None: """Send a warning to dev log is one wasn't already sent for `item`'s url.""" - if item.url not in self._warned_urls: - self._warned_urls.add(item.url) + if doc_item.url not in self._warned_urls: + self._warned_urls.add(doc_item.url) await self._init_task embed = discord.Embed( - description=f"Doc item `{item.symbol_id=}` present in loaded documentation inventories " - f"not found on [site]({item.url}), inventories may need to be refreshed." + description=f"Doc item `{doc_item.symbol_id=}` present in loaded documentation inventories " + f"not found on [site]({doc_item.url}), inventories may need to be refreshed." ) await self._dev_log.send(embed=embed) class QueueItem(NamedTuple): - """Contains a symbol and the BeautifulSoup object needed to parse it.""" + """Contains a doc_item and the BeautifulSoup object needed to parse it.""" - symbol: _cog.DocItem + doc_item: _cog.DocItem soup: BeautifulSoup def __eq__(self, other: Union[QueueItem, _cog.DocItem]): if isinstance(other, _cog.DocItem): - return self.symbol == other + return self.doc_item == other return NamedTuple.__eq__(self, other) @@ -83,14 +83,14 @@ class BatchParser: """ Get the Markdown of all symbols on a page and send them to redis when a symbol is requested. - DocItems are added through the `add_item` method which adds them to the `_page_symbols` dict. + DocItems are added through the `add_item` method which adds them to the `_page_doc_items` dict. `get_markdown` is used to fetch the Markdown; when this is used for the first time on a page, all of the symbols are queued to be parsed to avoid multiple web requests to the same page. """ def __init__(self): self._queue: List[QueueItem] = [] - self._page_symbols: Dict[str, List[_cog.DocItem]] = defaultdict(list) + self._page_doc_items: Dict[str, List[_cog.DocItem]] = defaultdict(list) self._item_futures: Dict[_cog.DocItem, ParseResultFuture] = {} self._parse_task = None @@ -109,14 +109,14 @@ class BatchParser: Not safe to run while `self.clear` is running. """ if doc_item not in self._item_futures: - self._item_futures.update((symbol, ParseResultFuture()) for symbol in self._page_symbols[doc_item.url]) + self._item_futures.update((item, ParseResultFuture()) for item in self._page_doc_items[doc_item.url]) self._item_futures[doc_item].user_requested = True async with bot.instance.http_session.get(doc_item.url) as response: soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") - self._queue.extend(QueueItem(symbol, soup) for symbol in self._page_symbols[doc_item.url]) - log.debug(f"Added symbols from {doc_item.url} to parse queue.") + self._queue.extend(QueueItem(item, soup) for item in self._page_doc_items[doc_item.url]) + log.debug(f"Added items from {doc_item.url} to parse queue.") if self._parse_task is None: self._parse_task = asyncio.create_task(self._parse_queue()) @@ -139,7 +139,7 @@ class BatchParser: item, soup = self._queue.pop() try: if (future := self._item_futures[item]).done(): - # Some items are present in the inventories multiple times under different symbols, + # Some items are present in the inventories multiple times under different symbol names, # if we already parsed an equal item, we can just skip it. continue @@ -173,7 +173,7 @@ class BatchParser: def add_item(self, doc_item: _cog.DocItem) -> None: """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" - self._page_symbols[doc_item.url].append(doc_item) + self._page_doc_items[doc_item.url].append(doc_item) async def clear(self) -> None: """ @@ -186,7 +186,7 @@ class BatchParser: if self._parse_task is not None: self._parse_task.cancel() self._queue.clear() - self._page_symbols.clear() + self._page_doc_items.clear() self._item_futures.clear() async def _cleanup_futures(self) -> None: diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 16baa6320..0ff775ac7 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -92,31 +92,31 @@ class DocCog(commands.Cog): self.base_urls[api_package_name] = base_url for group, items in package.items(): - for symbol, relative_doc_url in items: + for symbol_name, relative_doc_url in items: # e.g. get 'class' from 'py:class' group_name = group.split(":")[1] - if (original_symbol := self.doc_symbols.get(symbol)) is not None: + if (original_item := self.doc_symbols.get(symbol_name)) is not None: replaced_symbol_name = self.ensure_unique_symbol_name( api_package_name, group_name, - original_symbol, - symbol, + original_item, + symbol_name, ) if replaced_symbol_name is not None: - symbol = replaced_symbol_name + symbol_name = replaced_symbol_name relative_url_path, _, symbol_id = relative_doc_url.partition("#") # Intern fields that have shared content so we're not storing unique strings for every object - symbol_item = DocItem( + doc_item = DocItem( api_package_name, sys.intern(group_name), base_url, sys.intern(relative_url_path), - symbol_id + symbol_id, ) - self.doc_symbols[symbol] = symbol_item - self.item_fetcher.add_item(symbol_item) + self.doc_symbols[symbol_name] = doc_item + self.item_fetcher.add_item(doc_item) log.trace(f"Fetched inventory for {api_package_name}.") @@ -124,7 +124,7 @@ class DocCog(commands.Cog): self, api_package_name: str, base_url: str, - inventory_url: str + inventory_url: str, ) -> None: """ Update the cog's inventory, or reschedule this method to execute again if the remote inventory unreachable. @@ -144,7 +144,7 @@ class DocCog(commands.Cog): self.inventory_scheduler.schedule_later( delay*60, api_package_name, - self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url) + self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url), ) else: self.update_single(api_package_name, base_url, package) @@ -154,7 +154,7 @@ class DocCog(commands.Cog): package_name: str, group_name: str, original_item: DocItem, - symbol_name: str + symbol_name: str, ) -> Optional[str]: """ Ensure `symbol_name` doesn't overwrite an another symbol in `doc_symbols`. @@ -166,42 +166,42 @@ class DocCog(commands.Cog): """ # Certain groups are added as prefixes to disambiguate the symbols. if group_name in FORCE_PREFIX_GROUPS: - new_symbol = f"{group_name}.{symbol_name}" - if new_symbol in self.doc_symbols: + new_symbol_name = f"{group_name}.{symbol_name}" + if new_symbol_name in self.doc_symbols: # If there's still a conflict, prefix with package name. - new_symbol = f"{package_name}.{new_symbol}" - self.renamed_symbols[symbol_name].append(new_symbol) - return new_symbol + new_symbol_name = f"{package_name}.{new_symbol_name}" + self.renamed_symbols[symbol_name].append(new_symbol_name) + return new_symbol_name # The existing symbol with which the current symbol conflicts should have a group prefix. # It currently doesn't have the group prefix because it's only added once there's a conflict. elif (original_symbol_group := original_item.group) in FORCE_PREFIX_GROUPS: - overridden_symbol = f"{original_symbol_group}.{symbol_name}" - if overridden_symbol in self.doc_symbols: + overridden_symbol_name = f"{original_symbol_group}.{symbol_name}" + if overridden_symbol_name in self.doc_symbols: # If there's still a conflict, prefix with package name. - overridden_symbol = f"{original_item.package}.{overridden_symbol}" + overridden_symbol_name = f"{original_item.package}.{overridden_symbol_name}" - self.doc_symbols[overridden_symbol] = original_item - self.renamed_symbols[symbol_name].append(overridden_symbol) + self.doc_symbols[overridden_symbol_name] = original_item + self.renamed_symbols[symbol_name].append(overridden_symbol_name) elif package_name in PRIORITY_PACKAGES: - overridden_symbol = f"{original_item.package}.{symbol_name}" - if overridden_symbol in self.doc_symbols: + overridden_symbol_name = f"{original_item.package}.{symbol_name}" + if overridden_symbol_name in self.doc_symbols: # If there's still a conflict, add the symbol's group in the middle. - overridden_symbol = f"{original_item.package}.{original_item.group}.{symbol_name}" + overridden_symbol_name = f"{original_item.package}.{original_item.group}.{symbol_name}" - self.doc_symbols[overridden_symbol] = original_item - self.renamed_symbols[symbol_name].append(overridden_symbol) + self.doc_symbols[overridden_symbol_name] = original_item + self.renamed_symbols[symbol_name].append(overridden_symbol_name) # If we can't specially handle the symbol through its group or package, # fall back to prepending its package name to the front. else: - new_symbol = f"{package_name}.{symbol_name}" - if new_symbol in self.doc_symbols: + new_symbol_name = f"{package_name}.{symbol_name}" + if new_symbol_name in self.doc_symbols: # If there's still a conflict, add the symbol's group in the middle. - new_symbol = f"{package_name}.{group_name}.{symbol_name}" - self.renamed_symbols[symbol_name].append(new_symbol) - return new_symbol + new_symbol_name = f"{package_name}.{group_name}.{symbol_name}" + self.renamed_symbols[symbol_name].append(new_symbol_name) + return new_symbol_name async def refresh_inventory(self) -> None: """Refresh internal documentation inventory.""" @@ -229,7 +229,7 @@ class DocCog(commands.Cog): log.debug("Finished inventory refresh.") self.refresh_event.set() - async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: + async def get_symbol_embed(self, symbol_name: str) -> Optional[discord.Embed]: """ Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents. @@ -237,47 +237,47 @@ class DocCog(commands.Cog): First check the DocRedisCache before querying the cog's `BatchParser`. """ - log.trace(f"Building embed for symbol `{symbol}`") + log.trace(f"Building embed for symbol `{symbol_name}`") if not self.refresh_event.is_set(): log.debug("Waiting for inventories to be refreshed before processing item.") await self.refresh_event.wait() - symbol_info = self.doc_symbols.get(symbol) - if symbol_info is None: - if symbol.count(" "): + doc_item = self.doc_symbols.get(symbol_name) + if doc_item is None: + if symbol_name.count(" "): # If an invalid symbol contains a space, check if the command was invoked # in the format !d - symbol = symbol.split(" ", maxsplit=1)[0] - symbol_info = self.doc_symbols.get(symbol) - if symbol_info is None: + symbol_name = symbol_name.split(" ", maxsplit=1)[0] + doc_item = self.doc_symbols.get(symbol_name) + if doc_item is None: log.debug("Symbol does not exist.") return None else: log.debug("Symbol does not exist.") return None - self.bot.stats.incr(f"doc_fetches.{symbol_info.package}") + self.bot.stats.incr(f"doc_fetches.{doc_item.package}") with self.symbol_get_event: - markdown = await doc_cache.get(symbol_info) + markdown = await doc_cache.get(doc_item) if markdown is None: - log.debug(f"Redis cache miss for symbol `{symbol}`.") - markdown = await self.item_fetcher.get_markdown(symbol_info) + log.debug(f"Redis cache miss for symbol `{symbol_name}`.") + markdown = await self.item_fetcher.get_markdown(doc_item) if markdown is not None: - await doc_cache.set(symbol_info, markdown) + await doc_cache.set(doc_item, markdown) else: markdown = "Unable to parse the requested symbol." embed = discord.Embed( - title=discord.utils.escape_markdown(symbol), - url=f"{symbol_info.url}#{symbol_info.symbol_id}", + title=discord.utils.escape_markdown(symbol_name), + url=f"{doc_item.url}#{doc_item.symbol_id}", description=markdown ) # Show all symbols with the same name that were renamed in the footer, # with a max of 100 chars. - if symbol in self.renamed_symbols: - renamed_symbols = ', '.join(self.renamed_symbols[symbol]) + if symbol_name in self.renamed_symbols: + renamed_symbols = ', '.join(self.renamed_symbols[symbol_name]) footer_text = textwrap.shorten("Moved: " + renamed_symbols, 100, placeholder=' ...') else: footer_text = "" @@ -285,12 +285,12 @@ class DocCog(commands.Cog): return embed @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) - async def docs_group(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: + async def docs_group(self, ctx: commands.Context, *, symbol_name: Optional[str]) -> None: """Look up documentation for Python symbols.""" - await self.get_command(ctx, symbol=symbol) + await self.get_command(ctx, symbol_name=symbol_name) @docs_group.command(name='getdoc', aliases=('g',)) - async def get_command(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: + async def get_command(self, ctx: commands.Context, *, symbol_name: Optional[str]) -> None: """ Return a documentation embed for a given symbol. @@ -302,7 +302,7 @@ class DocCog(commands.Cog): !docs aiohttp.ClientSession !docs getdoc aiohttp.ClientSession """ - if not symbol: + if not symbol_name: inventory_embed = discord.Embed( title=f"All inventories (`{len(self.base_urls)}` total)", colour=discord.Colour.blue() @@ -317,7 +317,7 @@ class DocCog(commands.Cog): await ctx.send(embed=inventory_embed) else: - symbol = symbol.strip("`") + symbol = symbol_name.strip("`") # Fetching documentation for a symbol (at least for the first time, since # caching is used) takes quite some time, so let's send typing to indicate # that we got the command, but are still working on it. diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 0251b0105..8e1b4d7a1 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -232,7 +232,7 @@ def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag] def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[str]: """ - Return parsed markdown of the passed symbol using the passed in soup, truncated to fit within a discord message. + Return parsed markdown of the passed item using the passed in soup, truncated to fit within a discord message. The method of parsing and what information gets included depends on the symbol's group. """ -- cgit v1.2.3 From a09886d6356be9ea5a98a7deea0cebf31e510095 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 3 Feb 2021 12:05:27 +0100 Subject: Remove url lock The items are added to the futures dict before a context switch can occur, making the subsequent requests to the url skip the queue extend and suspend at the future await --- bot/exts/info/doc/_batch_parser.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 2edf05ff0..c16cf6d28 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -14,8 +14,7 @@ from bs4 import BeautifulSoup import bot from bot.constants import Channels -from bot.utils.lock import lock_arg -from . import NAMESPACE, _cog, doc_cache +from . import _cog, doc_cache from ._parsing import get_symbol_markdown log = logging.getLogger(__name__) @@ -98,7 +97,6 @@ class BatchParser: self.stale_inventory_notifier = StaleInventoryNotifier() - @lock_arg(NAMESPACE, "doc_item", attrgetter("url"), wait=True) async def get_markdown(self, doc_item: _cog.DocItem) -> str: """ Get the result Markdown of `doc_item`. -- cgit v1.2.3 From ef5a98595ec647198f3d06375d2c1d4a5a54bf02 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 3 Feb 2021 12:07:13 +0100 Subject: Move BeautifulSoup parsing into an executor --- bot/exts/info/doc/_batch_parser.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index c16cf6d28..13e197587 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -111,7 +111,10 @@ class BatchParser: self._item_futures[doc_item].user_requested = True async with bot.instance.http_session.get(doc_item.url) as response: - soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") + soup = await bot.instance.loop.run_in_executor( + None, + partial(BeautifulSoup, await response.text(encoding="utf8"), "lxml") + ) self._queue.extend(QueueItem(item, soup) for item in self._page_doc_items[doc_item.url]) log.debug(f"Added items from {doc_item.url} to parse queue.") -- cgit v1.2.3 From c9039b1d012172e7ef3f0ea030420a58db1cbd2d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 3 Feb 2021 12:08:33 +0100 Subject: Create a task for setting the redis result instead of awaiting The queue parsing doesn't depend on anything with redis, so the await only delays the result being set on the future. --- bot/exts/info/doc/_batch_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 13e197587..2407a603a 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -149,7 +149,7 @@ class BatchParser: partial(get_symbol_markdown, soup, item), ) if markdown is not None: - await doc_cache.set(item, markdown) + asyncio.create_task(doc_cache.set(item, markdown)) else: asyncio.create_task(self.stale_inventory_notifier.send_warning(item)) except Exception as e: -- cgit v1.2.3 From b1c8e62a7e8ae600a672c4ad3e33b607c8570890 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 3 Feb 2021 12:25:51 +0100 Subject: Use create_task util --- bot/exts/info/doc/_batch_parser.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 2407a603a..d18a455d8 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -14,6 +14,7 @@ from bs4 import BeautifulSoup import bot from bot.constants import Channels +from bot.utils import scheduling from . import _cog, doc_cache from ._parsing import get_symbol_markdown @@ -149,9 +150,9 @@ class BatchParser: partial(get_symbol_markdown, soup, item), ) if markdown is not None: - asyncio.create_task(doc_cache.set(item, markdown)) + scheduling.create_task(doc_cache.set(item, markdown)) else: - asyncio.create_task(self.stale_inventory_notifier.send_warning(item)) + scheduling.create_task(self.stale_inventory_notifier.send_warning(item)) except Exception as e: log.exception(f"Unexpected error when handling {item}") future.set_exception(e) -- cgit v1.2.3 From 9d755707178f2c53bea209c42ab4e3154b0a6a60 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 3 Feb 2021 12:42:56 +0100 Subject: Avoid from import on _batch_parser The tests import the modules the other way around causing a circular import --- bot/exts/info/doc/_cog.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 0ff775ac7..e2204bd4a 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -19,8 +19,7 @@ from bot.pagination import LinePaginator from bot.utils.lock import SharedEvent, lock from bot.utils.messages import send_denial, wait_for_deletion from bot.utils.scheduling import Scheduler -from . import NAMESPACE, PRIORITY_PACKAGES, doc_cache -from ._batch_parser import BatchParser +from . import NAMESPACE, PRIORITY_PACKAGES, _batch_parser, doc_cache from ._inventory_parser import InventoryDict, fetch_inventory log = logging.getLogger(__name__) @@ -62,7 +61,7 @@ class DocCog(commands.Cog): self.base_urls = {} self.bot = bot self.doc_symbols: Dict[str, DocItem] = {} - self.item_fetcher = BatchParser() + self.item_fetcher = _batch_parser.BatchParser() self.renamed_symbols = defaultdict(list) self.inventory_scheduler = Scheduler(self.__class__.__name__) -- cgit v1.2.3 From 10ede89b2943d2118fed6448e1eb4383c78b8998 Mon Sep 17 00:00:00 2001 From: Harbys <44087388+Harbys@users.noreply.github.com> Date: Wed, 3 Feb 2021 14:06:39 +0100 Subject: Add a newline for readability. by @MarkKoz Co-authored-by: Mark --- bot/exts/moderation/stream.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index b590956a3..c90c25976 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -50,6 +50,7 @@ class Stream(commands.Cog): \u2003`h` - hours \u2003`M` - minutes∗ \u2003`s` - seconds + Alternatively, an ISO 8601 timestamp can be provided for the duration. """ # Check if user already has streaming permission -- cgit v1.2.3 From 8c75f488848064cd2de01012614a3e1f1c304c6a Mon Sep 17 00:00:00 2001 From: Harbys <44087388+Harbys@users.noreply.github.com> Date: Wed, 3 Feb 2021 14:07:34 +0100 Subject: remove unused constants by @MarkKoz Co-authored-by: Mark --- bot/exts/moderation/stream.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index c90c25976..c2f4d12b4 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -10,8 +10,6 @@ from bot.utils.scheduling import Scheduler from bot.utils.time import format_infraction_with_duration # Constant error messages -TIME_FORMAT_NOT_VALID = f"{Emojis.cross_mark}Please specify a valid time format ex. 10h or 1day." -TIME_LESS_EQ_0 = f"{Emojis.cross_mark}Duration can not be a 0 or lower." USER_ALREADY_ALLOWED_TO_STREAM = f"{Emojis.cross_mark}This user can already stream." USER_ALREADY_NOT_ALLOWED_TO_STREAM = f"{Emojis.cross_mark}This user already can't stream." -- cgit v1.2.3 From 446a89fdd9c193198199d98ddcd4da719cea2c13 Mon Sep 17 00:00:00 2001 From: Harbys Date: Wed, 3 Feb 2021 14:12:16 +0100 Subject: remove constants that were used once --- bot/exts/moderation/stream.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index c2f4d12b4..c9563e085 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -9,10 +9,6 @@ from bot.converters import Expiry from bot.utils.scheduling import Scheduler from bot.utils.time import format_infraction_with_duration -# Constant error messages -USER_ALREADY_ALLOWED_TO_STREAM = f"{Emojis.cross_mark}This user can already stream." -USER_ALREADY_NOT_ALLOWED_TO_STREAM = f"{Emojis.cross_mark}This user already can't stream." - class Stream(commands.Cog): """Grant and revoke streaming permissions from users.""" @@ -54,7 +50,7 @@ class Stream(commands.Cog): # Check if user already has streaming permission already_allowed = any(Roles.video == role.id for role in user.roles) if already_allowed: - await ctx.send(USER_ALREADY_ALLOWED_TO_STREAM) + await ctx.send(f"{Emojis.cross_mark} This user can already stream.") return # Schedule task to remove streaming permission from Member @@ -80,7 +76,7 @@ class Stream(commands.Cog): await user.remove_roles(discord.Object(Roles.video)) await ctx.send(f"{Emojis.check_mark}Streaming permission taken from {user.display_name}") else: - await ctx.send(USER_ALREADY_NOT_ALLOWED_TO_STREAM) + await ctx.send(f"{Emojis.cross_mark} This user already can't stream.") def setup(bot: Bot) -> None: -- cgit v1.2.3 From cd25e1e47079b6e9108a977fd915052c3428c10e Mon Sep 17 00:00:00 2001 From: Harbys Date: Wed, 3 Feb 2021 14:17:49 +0100 Subject: remove unused tests --- tests/bot/exts/moderation/test_stream.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/tests/bot/exts/moderation/test_stream.py b/tests/bot/exts/moderation/test_stream.py index 15956a9de..2ac274699 100644 --- a/tests/bot/exts/moderation/test_stream.py +++ b/tests/bot/exts/moderation/test_stream.py @@ -2,16 +2,11 @@ import unittest from bot.constants import Roles -from bot.exts.moderation.stream import Stream -from tests.helpers import MockBot, MockMember, MockRole +from tests.helpers import MockMember, MockRole class StreamCommandTest(unittest.IsolatedAsyncioTestCase): - def setUp(self) -> None: - self.bot = MockBot() - self.cog = Stream(self.bot) - def test_checking_if_user_has_streaming_permission(self): """ Test searching for video role in Member.roles -- cgit v1.2.3 From 5f76d9e309c3a65ae5ab9bea3c82db29a0553094 Mon Sep 17 00:00:00 2001 From: Harbys <44087388+Harbys@users.noreply.github.com> Date: Wed, 3 Feb 2021 14:21:54 +0100 Subject: add space for readablility Co-authored-by: Mark --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index c2f4d12b4..7e15864bf 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -78,7 +78,7 @@ class Stream(commands.Cog): if user.id in self.scheduler: self.scheduler.cancel(user.id) await user.remove_roles(discord.Object(Roles.video)) - await ctx.send(f"{Emojis.check_mark}Streaming permission taken from {user.display_name}") + await ctx.send(f"{Emojis.check_mark} Streaming permission taken from {user.display_name}.") else: await ctx.send(USER_ALREADY_NOT_ALLOWED_TO_STREAM) -- cgit v1.2.3 From 78ded411d8e57d399a00d9132d4caa94ba59f410 Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Fri, 5 Feb 2021 19:31:38 +0100 Subject: replaced underscore with dash --- bot/resources/tags/empty-json.md | 23 +++++++++++++++++++++++ bot/resources/tags/empty_json.md | 23 ----------------------- 2 files changed, 23 insertions(+), 23 deletions(-) create mode 100644 bot/resources/tags/empty-json.md delete mode 100644 bot/resources/tags/empty_json.md diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md new file mode 100644 index 000000000..9e5c5fd4f --- /dev/null +++ b/bot/resources/tags/empty-json.md @@ -0,0 +1,23 @@ +When creating a new JSON file you might run into the following error. + +`JSONDecodeError: Expecting value: line 1 column 1 (char 0)` + +In short, this means that your JSON is invalid in its current state. This could very well happen because the file is just new and completely empty. +Whilst the JSON data, the data you wish to store, may be empty, the .json file must not. You most likely want to use one of the following data types in your .json file: + +``` +object +array +``` + +To resolve this issue, create one of the above data types in your .json file. It is very common to use `{}` to make an object, which works similar to a dictionary in python. +When this is added to your .json file, it will look like this: + +```json +{ + +} +``` + +The error is resolved now. +Make sure to put all your data between the `{}`, just like you would when making a dictionary. diff --git a/bot/resources/tags/empty_json.md b/bot/resources/tags/empty_json.md deleted file mode 100644 index 9e5c5fd4f..000000000 --- a/bot/resources/tags/empty_json.md +++ /dev/null @@ -1,23 +0,0 @@ -When creating a new JSON file you might run into the following error. - -`JSONDecodeError: Expecting value: line 1 column 1 (char 0)` - -In short, this means that your JSON is invalid in its current state. This could very well happen because the file is just new and completely empty. -Whilst the JSON data, the data you wish to store, may be empty, the .json file must not. You most likely want to use one of the following data types in your .json file: - -``` -object -array -``` - -To resolve this issue, create one of the above data types in your .json file. It is very common to use `{}` to make an object, which works similar to a dictionary in python. -When this is added to your .json file, it will look like this: - -```json -{ - -} -``` - -The error is resolved now. -Make sure to put all your data between the `{}`, just like you would when making a dictionary. -- cgit v1.2.3 From f0afae3c7792d3c6b9899a915a05adb95de3b45d Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Fri, 5 Feb 2021 19:49:06 +0100 Subject: Rewrite to make it more compact and to the point --- bot/resources/tags/empty-json.md | 24 ++++++++---------------- 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md index 9e5c5fd4f..0246d346f 100644 --- a/bot/resources/tags/empty-json.md +++ b/bot/resources/tags/empty-json.md @@ -1,23 +1,15 @@ -When creating a new JSON file you might run into the following error. -`JSONDecodeError: Expecting value: line 1 column 1 (char 0)` - -In short, this means that your JSON is invalid in its current state. This could very well happen because the file is just new and completely empty. -Whilst the JSON data, the data you wish to store, may be empty, the .json file must not. You most likely want to use one of the following data types in your .json file: +When using JSON you might run into the following error: +``` +JSONDecodeError: Expecting value: line 1 column 1 (char 0) +``` +This error could have appeared because you just created the JSON file and there is nothing in it at the moment. +Whilst having the data empty is no problem, the file itself may never be completely empty. You most likely want one of the following in your json ``` object array ``` +This issue can be resolved by creating one of these data types. An object is the most common of the 2, and is created by editing your file to read `{}`. -To resolve this issue, create one of the above data types in your .json file. It is very common to use `{}` to make an object, which works similar to a dictionary in python. -When this is added to your .json file, it will look like this: - -```json -{ - -} -``` - -The error is resolved now. -Make sure to put all your data between the `{}`, just like you would when making a dictionary. +Different data types are also supported. If you wish to read more on these, please reffer to the following article: https://www.tutorialspoint.com/json/json_data_types.htm -- cgit v1.2.3 From 68d53cbe1955b486903a7c962f8d6602766375ce Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Fri, 5 Feb 2021 19:53:16 +0100 Subject: Removed an excess line --- bot/resources/tags/empty-json.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md index 0246d346f..a5de2380f 100644 --- a/bot/resources/tags/empty-json.md +++ b/bot/resources/tags/empty-json.md @@ -10,6 +10,6 @@ Whilst having the data empty is no problem, the file itself may never be complet object array ``` -This issue can be resolved by creating one of these data types. An object is the most common of the 2, and is created by editing your file to read `{}`. +An object is the most common of the 2, and is created by editing your file to read `{}`. Different data types are also supported. If you wish to read more on these, please reffer to the following article: https://www.tutorialspoint.com/json/json_data_types.htm -- cgit v1.2.3 From 04b99031d9895afda5a00080c894b07958919ae9 Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Fri, 5 Feb 2021 19:56:37 +0100 Subject: Fixed random newline at start --- bot/resources/tags/empty-json.md | 1 - 1 file changed, 1 deletion(-) diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md index a5de2380f..98bfe5fa7 100644 --- a/bot/resources/tags/empty-json.md +++ b/bot/resources/tags/empty-json.md @@ -1,4 +1,3 @@ - When using JSON you might run into the following error: ``` JSONDecodeError: Expecting value: line 1 column 1 (char 0) -- cgit v1.2.3 From 970b49aec1cfee6cdffe56b3d675224fecde382f Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Fri, 5 Feb 2021 20:47:31 +0100 Subject: Simplified language --- bot/resources/tags/empty-json.md | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md index 98bfe5fa7..3851dc142 100644 --- a/bot/resources/tags/empty-json.md +++ b/bot/resources/tags/empty-json.md @@ -4,11 +4,8 @@ JSONDecodeError: Expecting value: line 1 column 1 (char 0) ``` This error could have appeared because you just created the JSON file and there is nothing in it at the moment. -Whilst having the data empty is no problem, the file itself may never be completely empty. You most likely want one of the following in your json -``` -object -array -``` -An object is the most common of the 2, and is created by editing your file to read `{}`. +Whilst having the data empty is no problem, the file itself may never be completely empty. + +You most likely wanted to structure your JSON as a dictionary. For this change your JSON file to read `{}`. Different data types are also supported. If you wish to read more on these, please reffer to the following article: https://www.tutorialspoint.com/json/json_data_types.htm -- cgit v1.2.3 From 49527d94dd792ee3ac81d6f3ee309fcd4f2c63ad Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Feb 2021 21:51:34 +0100 Subject: Remove unnecessary use of partial run_in_executor can provide args to the func it's passed in, making the use of partial unnecessary. This will also make it more convenient to move to asyncio.to_thread when the codebase is switched to python 3.9 --- bot/exts/info/doc/_batch_parser.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index d18a455d8..b3f72bb89 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -5,7 +5,6 @@ import logging import time from collections import defaultdict from contextlib import suppress -from functools import partial from operator import attrgetter from typing import Dict, List, NamedTuple, Union @@ -114,7 +113,9 @@ class BatchParser: async with bot.instance.http_session.get(doc_item.url) as response: soup = await bot.instance.loop.run_in_executor( None, - partial(BeautifulSoup, await response.text(encoding="utf8"), "lxml") + BeautifulSoup, + await response.text(encoding="utf8"), + "lxml", ) self._queue.extend(QueueItem(item, soup) for item in self._page_doc_items[doc_item.url]) @@ -145,10 +146,7 @@ class BatchParser: # if we already parsed an equal item, we can just skip it. continue - markdown = await bot.instance.loop.run_in_executor( - None, - partial(get_symbol_markdown, soup, item), - ) + markdown = await bot.instance.loop.run_in_executor(None, get_symbol_markdown, soup, item) if markdown is not None: scheduling.create_task(doc_cache.set(item, markdown)) else: -- cgit v1.2.3 From 81af0099ffd552aa5cb2a61de30cf7bd16a013eb Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 6 Feb 2021 10:41:47 +0200 Subject: Implement showing filterlist entry comment in alerts --- bot/exts/filters/filtering.py | 63 +++++++++++++++++++++++++++---------------- 1 file changed, 40 insertions(+), 23 deletions(-) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index 3527bf8bb..6f1374cf4 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -2,7 +2,7 @@ import asyncio import logging import re from datetime import datetime, timedelta -from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Union +from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union import dateutil import discord.errors @@ -137,6 +137,10 @@ class Filtering(Cog): """Fetch items from the filter_list_cache.""" return self.bot.filter_list_cache[f"{list_type.upper()}.{allowed}"].keys() + def _get_filterlist_value(self, list_type: str, value: Any, *, allowed: bool) -> dict: + """Fetch one specific value from filter_list_cache.""" + return self.bot.filter_list_cache[f"{list_type.upper()}.{allowed}"][value] + @staticmethod def _expand_spoilers(text: str) -> str: """Return a string containing all interpretations of a spoilered message.""" @@ -236,7 +240,7 @@ class Filtering(Cog): # We also do not need to worry about filters that take the full message, # since all we have is an arbitrary string. if _filter["enabled"] and _filter["content_only"]: - match = await _filter["function"](result) + match, reason = await _filter["function"](result) if match: # If this is a filter (not a watchlist), we set the variable so we know @@ -245,7 +249,7 @@ class Filtering(Cog): filter_triggered = True stats = self._add_stats(filter_name, match, result) - await self._send_log(filter_name, _filter, msg, stats, is_eval=True) + await self._send_log(filter_name, _filter, msg, stats, reason, is_eval=True) break # We don't want multiple filters to trigger @@ -267,9 +271,9 @@ class Filtering(Cog): # Does the filter only need the message content or the full message? if _filter["content_only"]: - match = await _filter["function"](msg.content) + match, reason = await _filter["function"](msg.content) else: - match = await _filter["function"](msg) + match, reason = await _filter["function"](msg) if match: is_private = msg.channel.type is discord.ChannelType.private @@ -316,7 +320,7 @@ class Filtering(Cog): log.trace(f"Offensive message {msg.id} will be deleted on {delete_date}") stats = self._add_stats(filter_name, match, msg.content) - await self._send_log(filter_name, _filter, msg, stats) + await self._send_log(filter_name, _filter, msg, stats, reason) break # We don't want multiple filters to trigger @@ -326,6 +330,7 @@ class Filtering(Cog): _filter: Dict[str, Any], msg: discord.Message, stats: Stats, + reason: Optional[str] = None, *, is_eval: bool = False, ) -> None: @@ -339,6 +344,7 @@ class Filtering(Cog): ping_everyone = Filter.ping_everyone and _filter.get("ping_everyone", True) eval_msg = "using !eval " if is_eval else "" + footer = f"Entry comment: {reason}" if reason else None message = ( f"The {filter_name} {_filter['type']} was triggered by {format_user(msg.author)} " f"{channel_str} {eval_msg}with [the following message]({msg.jump_url}):\n\n" @@ -357,6 +363,7 @@ class Filtering(Cog): channel_id=Channels.mod_alerts, ping_everyone=ping_everyone, additional_embeds=stats.additional_embeds, + footer=footer, ) def _add_stats(self, name: str, match: FilterMatch, content: str) -> Stats: @@ -381,9 +388,11 @@ class Filtering(Cog): if name == "filter_invites" and match is not True: additional_embeds = [] for _, data in match.items(): + reason = f"\n**Entry comment:**\n{data['reason']}" if data.get('reason') else "" embed = discord.Embed(description=( f"**Members:**\n{data['members']}\n" f"**Active:**\n{data['active']}" + f"{reason}" )) embed.set_author(name=data["name"]) embed.set_thumbnail(url=data["icon"]) @@ -411,7 +420,7 @@ class Filtering(Cog): and not msg.author.bot # Author not a bot ) - async def _has_watch_regex_match(self, text: str) -> Union[bool, re.Match]: + async def _has_watch_regex_match(self, text: str) -> Tuple[Union[bool, re.Match], Optional[str]]: """ Return True if `text` matches any regex from `word_watchlist` or `token_watchlist` configs. @@ -429,9 +438,11 @@ class Filtering(Cog): for pattern in watchlist_patterns: match = re.search(pattern, text, flags=re.IGNORECASE) if match: - return match + return match, self._get_filterlist_value('filter_token', pattern, allowed=False)['comment'] + + return False, None - async def _has_urls(self, text: str) -> bool: + async def _has_urls(self, text: str) -> Tuple[bool, Optional[str]]: """Returns True if the text contains one of the blacklisted URLs from the config file.""" if not URL_RE.search(text): return False @@ -441,20 +452,21 @@ class Filtering(Cog): for url in domain_blacklist: if url.lower() in text: - return True + return True, self._get_filterlist_value("domain_name", url, allowed=False)["comment"] - return False + return False, None @staticmethod - async def _has_zalgo(text: str) -> bool: + async def _has_zalgo(text: str) -> Tuple[bool, None]: """ Returns True if the text contains zalgo characters. Zalgo range is \u0300 – \u036F and \u0489. + Return None as second value for compability with other filters. """ - return bool(ZALGO_RE.search(text)) + return bool(ZALGO_RE.search(text)), None - async def _has_invites(self, text: str) -> Union[dict, bool]: + async def _has_invites(self, text: str) -> Tuple[Union[dict, bool], None]: """ Checks if there's any invites in the text content that aren't in the guild whitelist. @@ -500,6 +512,10 @@ class Filtering(Cog): ) if invite_not_allowed: + reason = None + if guild_id in guild_invite_blacklist: + reason = self._get_filterlist_value("guild_invite", guild_id, allowed=False)["comment"] + guild_icon_hash = guild["icon"] guild_icon = ( "https://cdn.discordapp.com/icons/" @@ -511,13 +527,14 @@ class Filtering(Cog): "id": guild['id'], "icon": guild_icon, "members": response["approximate_member_count"], - "active": response["approximate_presence_count"] + "active": response["approximate_presence_count"], + "reason": reason } - return invite_data if invite_data else False + return invite_data if invite_data else False, None @staticmethod - async def _has_rich_embed(msg: Message) -> Union[bool, List[discord.Embed]]: + async def _has_rich_embed(msg: Message) -> Tuple[Union[bool, List[discord.Embed]], None]: """Determines if `msg` contains any rich embeds not auto-generated from a URL.""" if msg.embeds: for embed in msg.embeds: @@ -526,24 +543,24 @@ class Filtering(Cog): if not embed.url or embed.url not in urls: # If `embed.url` does not exist or if `embed.url` is not part of the content # of the message, it's unlikely to be an auto-generated embed by Discord. - return msg.embeds + return msg.embeds, None else: log.trace( "Found a rich embed sent by a regular user account, " "but it was likely just an automatic URL embed." ) - return False - return False + return False, None + return False, None @staticmethod - async def _has_everyone_ping(text: str) -> bool: + async def _has_everyone_ping(text: str) -> Tuple[bool, None]: """Determines if `msg` contains an @everyone or @here ping outside of a codeblock.""" # First pass to avoid running re.sub on every message if not EVERYONE_PING_RE.search(text): - return False + return False, None content_without_codeblocks = CODE_BLOCK_RE.sub("", text) - return bool(EVERYONE_PING_RE.search(content_without_codeblocks)) + return bool(EVERYONE_PING_RE.search(content_without_codeblocks)), None async def notify_member(self, filtered_member: Member, reason: str, channel: TextChannel) -> None: """ -- cgit v1.2.3 From 1a9d820638acce176f73867b6b321c8c1dbfb479 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 6 Feb 2021 13:38:39 +0200 Subject: Ignore attachment-only messages for duplicates antispam rule --- bot/rules/duplicates.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/rules/duplicates.py b/bot/rules/duplicates.py index 455764b53..23aefd3dc 100644 --- a/bot/rules/duplicates.py +++ b/bot/rules/duplicates.py @@ -13,6 +13,7 @@ async def apply( if ( msg.author == last_message.author and msg.content == last_message.content + and (msg.content and not msg.attachments) ) ) -- cgit v1.2.3 From 84a46c9ab27f0a593c413f5ee09ba19cf5fb1d1b Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 6 Feb 2021 13:45:07 +0200 Subject: Lower max attachments per 10 seconds to 3 --- config-default.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config-default.yml b/config-default.yml index d3b267159..d323a946d 100644 --- a/config-default.yml +++ b/config-default.yml @@ -367,7 +367,7 @@ anti_spam: rules: attachments: interval: 10 - max: 9 + max: 3 burst: interval: 10 -- cgit v1.2.3 From 898adc69661e06ee6d5bd0962d265bae5faed16c Mon Sep 17 00:00:00 2001 From: Harbys <44087388+Harbys@users.noreply.github.com> Date: Mon, 8 Feb 2021 19:40:07 +0100 Subject: change for style guidelines by @MarkKoz Co-authored-by: Mark --- bot/exts/moderation/stream.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 7e15864bf..6a889fe2f 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -60,8 +60,8 @@ class Stream(commands.Cog): # Schedule task to remove streaming permission from Member self.scheduler.schedule_at(duration, user.id, self._remove_streaming_permission(user)) await user.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") - await ctx.send(f"{Emojis.check_mark}{user.mention} can now stream until " - f"{format_infraction_with_duration(str(duration))}.") + duration = format_infraction_with_duration(str(duration)) + await ctx.send(f"{Emojis.check_mark} {user.mention} can now stream until {duration}.") @commands.command(aliases=("unstream", )) @commands.has_any_role(*STAFF_ROLES) -- cgit v1.2.3 From 90bf065874be13b77b9390d4378d03a8272d113e Mon Sep 17 00:00:00 2001 From: Harbys Date: Mon, 8 Feb 2021 20:02:42 +0100 Subject: fix default duration --- bot/exts/moderation/stream.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 8b8308925..63bc2c218 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -28,8 +28,7 @@ class Stream(commands.Cog): self, ctx: commands.Context, user: discord.Member, - duration: Expiry = - datetime.datetime.utcnow() + datetime.timedelta(minutes=VideoPermission.default_permission_duration), + duration: Expiry = None, *_ ) -> None: """ @@ -47,6 +46,11 @@ class Stream(commands.Cog): Alternatively, an ISO 8601 timestamp can be provided for the duration. """ + # if duration is none then calculate default duration + if duration is None: + now = datetime.datetime.utcnow() + duration = now + datetime.timedelta(minutes=VideoPermission.default_permission_duration) + # Check if user already has streaming permission already_allowed = any(Roles.video == role.id for role in user.roles) if already_allowed: -- cgit v1.2.3 From 16f8fd31b3cd321e4ac7d6eeb0ba20eeb8c78892 Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers Date: Tue, 9 Feb 2021 10:53:51 +0100 Subject: Tiny grammar edit --- bot/resources/tags/empty-json.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md index 3851dc142..ceb8c6eae 100644 --- a/bot/resources/tags/empty-json.md +++ b/bot/resources/tags/empty-json.md @@ -1,4 +1,4 @@ -When using JSON you might run into the following error: +When using JSON, you might run into the following error: ``` JSONDecodeError: Expecting value: line 1 column 1 (char 0) ``` @@ -6,6 +6,6 @@ This error could have appeared because you just created the JSON file and there Whilst having the data empty is no problem, the file itself may never be completely empty. -You most likely wanted to structure your JSON as a dictionary. For this change your JSON file to read `{}`. +You most likely wanted to structure your JSON as a dictionary. To do this, change your JSON to read `{}`. Different data types are also supported. If you wish to read more on these, please reffer to the following article: https://www.tutorialspoint.com/json/json_data_types.htm -- cgit v1.2.3 From 8d1a46c1866c12b719b991719c84a6c1d6f25bb4 Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers <61157793+sebkuip@users.noreply.github.com> Date: Tue, 9 Feb 2021 11:08:06 +0100 Subject: A small typo Co-authored-by: Kieran Siek --- bot/resources/tags/empty-json.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md index ceb8c6eae..21b0860c7 100644 --- a/bot/resources/tags/empty-json.md +++ b/bot/resources/tags/empty-json.md @@ -8,4 +8,4 @@ Whilst having the data empty is no problem, the file itself may never be complet You most likely wanted to structure your JSON as a dictionary. To do this, change your JSON to read `{}`. -Different data types are also supported. If you wish to read more on these, please reffer to the following article: https://www.tutorialspoint.com/json/json_data_types.htm +Different data types are also supported. If you wish to read more on these, please refer to the following article: https://www.tutorialspoint.com/json/json_data_types.htm -- cgit v1.2.3 From 2627bc98da2c71a6a10a6b7039522d1938c08552 Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers <61157793+sebkuip@users.noreply.github.com> Date: Tue, 9 Feb 2021 11:29:09 +0100 Subject: Hyperlink URL Suggestion of @Numelor Co-authored-by: Numerlor <25886452+Numerlor@users.noreply.github.com> --- bot/resources/tags/empty-json.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md index 21b0860c7..93e2cadba 100644 --- a/bot/resources/tags/empty-json.md +++ b/bot/resources/tags/empty-json.md @@ -8,4 +8,4 @@ Whilst having the data empty is no problem, the file itself may never be complet You most likely wanted to structure your JSON as a dictionary. To do this, change your JSON to read `{}`. -Different data types are also supported. If you wish to read more on these, please refer to the following article: https://www.tutorialspoint.com/json/json_data_types.htm +Different data types are also supported. If you wish to read more on these, please refer to [this article](https://www.tutorialspoint.com/json/json_data_types.htm). -- cgit v1.2.3 From 160bf89303436e3ba0ff566241a206a120a25d66 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> Date: Tue, 9 Feb 2021 13:37:28 +0300 Subject: Moves Off Topic Name Translator Breaks out the off topic name translation functionality into its own function. Signed-off-by: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> --- bot/converters.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/bot/converters.py b/bot/converters.py index 0d9a519df..80ce99459 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -357,27 +357,38 @@ class Duration(DurationDelta): class OffTopicName(Converter): """A converter that ensures an added off-topic name is valid.""" + ALLOWED_CHARACTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!?'`-" + + @classmethod + def translate_name(cls, name: str, *, from_unicode: bool = True) -> str: + """ + Translates `name` into a format that is allowed in discord channel names. + + If `from_unicode` is True, the name is translated from a discord-safe format, back to normalized text. + """ + if from_unicode: + table = str.maketrans(cls.ALLOWED_CHARACTERS, '𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-') + else: + table = str.maketrans('𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-', cls.ALLOWED_CHARACTERS) + + return name.translate(table) + async def convert(self, ctx: Context, argument: str) -> str: """Attempt to replace any invalid characters with their approximate Unicode equivalent.""" - allowed_characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!?'`-" - # Chain multiple words to a single one argument = "-".join(argument.split()) if not (2 <= len(argument) <= 96): raise BadArgument("Channel name must be between 2 and 96 chars long") - elif not all(c.isalnum() or c in allowed_characters for c in argument): + elif not all(c.isalnum() or c in self.ALLOWED_CHARACTERS for c in argument): raise BadArgument( "Channel name must only consist of " "alphanumeric characters, minus signs or apostrophes." ) # Replace invalid characters with unicode alternatives. - table = str.maketrans( - allowed_characters, '𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-' - ) - return argument.translate(table) + return self.translate_name(argument) class ISODateTime(Converter): -- cgit v1.2.3 From 66cda4fd2a0b26e2f9e983f1597a15bfb9527143 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> Date: Tue, 9 Feb 2021 13:38:12 +0300 Subject: Makes Off Topic Name Search Case Insensitive Modifies the off topic channel name search to match upper and lower cased letters, as well as punctuation. Signed-off-by: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> --- bot/exts/fun/off_topic_names.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/bot/exts/fun/off_topic_names.py b/bot/exts/fun/off_topic_names.py index 7fc93b88c..845b8175c 100644 --- a/bot/exts/fun/off_topic_names.py +++ b/bot/exts/fun/off_topic_names.py @@ -139,10 +139,20 @@ class OffTopicNames(Cog): @has_any_role(*MODERATION_ROLES) async def search_command(self, ctx: Context, *, query: OffTopicName) -> None: """Search for an off-topic name.""" - result = await self.bot.api_client.get('bot/off-topic-channel-names') - in_matches = {name for name in result if query in name} - close_matches = difflib.get_close_matches(query, result, n=10, cutoff=0.70) - lines = sorted(f"• {name}" for name in in_matches.union(close_matches)) + query = OffTopicName.translate_name(query, from_unicode=False).lower() + + # Map normalized names to returned names for search purposes + result = { + OffTopicName.translate_name(name, from_unicode=False).lower(): name + for name in await self.bot.api_client.get('bot/off-topic-channel-names') + } + + # Search normalized keys + in_matches = {name for name in result.keys() if query in name} + close_matches = difflib.get_close_matches(query, result.keys(), n=10, cutoff=0.70) + + # Send Results + lines = sorted(f"• {result[name]}" for name in in_matches.union(close_matches)) embed = Embed( title="Query results", colour=Colour.blue() -- cgit v1.2.3 From 6004b090beede1ad28e2412cdcb41a629a4077e2 Mon Sep 17 00:00:00 2001 From: Harbys Date: Wed, 10 Feb 2021 21:01:03 +0100 Subject: Add redis rescheduling --- bot/exts/moderation/stream.py | 37 ++++++++++++++++++++++++++++++++----- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 63bc2c218..af0633bdb 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -1,10 +1,11 @@ import datetime import discord +from async_rediscache import RedisCache from discord.ext import commands from bot.bot import Bot -from bot.constants import Emojis, Roles, STAFF_ROLES, VideoPermission +from bot.constants import Emojis, Guild, Roles, STAFF_ROLES, VideoPermission from bot.converters import Expiry from bot.utils.scheduling import Scheduler from bot.utils.time import format_infraction_with_duration @@ -13,15 +14,36 @@ from bot.utils.time import format_infraction_with_duration class Stream(commands.Cog): """Grant and revoke streaming permissions from users.""" + # Stores tasks to remove streaming permission + # User id : timestamp relation + task_cache = RedisCache() + def __init__(self, bot: Bot): self.bot = bot self.scheduler = Scheduler(self.__class__.__name__) + self.reload_task = self.bot.loop.create_task(self._reload_tasks_from_redis()) - @staticmethod - async def _remove_streaming_permission(schedule_user: discord.Member) -> None: + async def _remove_streaming_permission(self, schedule_user: discord.Member) -> None: """Remove streaming permission from Member.""" + await self._delete_from_redis(schedule_user.id) await schedule_user.remove_roles(discord.Object(Roles.video), reason="Temporary streaming access revoked") + async def _add_to_redis_cache(self, user_id: int, timestamp: float) -> None: + """Adds 'task' to redis cache.""" + await self.task_cache.set(user_id, timestamp) + + async def _reload_tasks_from_redis(self) -> None: + await self.bot.wait_until_guild_available() + items = await self.task_cache.items() + for key, value in items: + member = await self.bot.get_guild(Guild.id).fetch_member(key) + self.scheduler.schedule_at(datetime.datetime.utcfromtimestamp(value), + key, + self._remove_streaming_permission(member)) + + async def _delete_from_redis(self, key: str) -> None: + await self.task_cache.delete(key) + @commands.command(aliases=("streaming",)) @commands.has_any_role(*STAFF_ROLES) async def stream( @@ -57,8 +79,9 @@ class Stream(commands.Cog): await ctx.send(f"{Emojis.cross_mark} This user can already stream.") return - # Schedule task to remove streaming permission from Member + # Schedule task to remove streaming permission from Member and add it to task cache self.scheduler.schedule_at(duration, user.id, self._remove_streaming_permission(user)) + await self._add_to_redis_cache(user.id, duration.timestamp()) await user.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") duration = format_infraction_with_duration(str(duration)) await ctx.send(f"{Emojis.check_mark} {user.mention} can now stream until {duration}.") @@ -77,11 +100,15 @@ class Stream(commands.Cog): # Cancel scheduled task to take away streaming permission to avoid errors if user.id in self.scheduler: self.scheduler.cancel(user.id) - await user.remove_roles(discord.Object(Roles.video)) + await self._remove_streaming_permission(user) await ctx.send(f"{Emojis.check_mark} Streaming permission taken from {user.display_name}.") else: await ctx.send(f"{Emojis.cross_mark} This user already can't stream.") + def cog_unload(self) -> None: + """Cache and cancel all scheduled tasks.""" + self.scheduler.cancel_all() + def setup(bot: Bot) -> None: """Loads the Stream cog.""" -- cgit v1.2.3 From fb4484ee23a00f0660765675782f1bf44812ca4e Mon Sep 17 00:00:00 2001 From: Harbys Date: Wed, 10 Feb 2021 21:32:41 +0100 Subject: Add permanent streaming command --- bot/exts/moderation/stream.py | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index af0633bdb..46c68f74b 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -26,7 +26,7 @@ class Stream(commands.Cog): async def _remove_streaming_permission(self, schedule_user: discord.Member) -> None: """Remove streaming permission from Member.""" await self._delete_from_redis(schedule_user.id) - await schedule_user.remove_roles(discord.Object(Roles.video), reason="Temporary streaming access revoked") + await schedule_user.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked") async def _add_to_redis_cache(self, user_id: int, timestamp: float) -> None: """Adds 'task' to redis cache.""" @@ -86,6 +86,29 @@ class Stream(commands.Cog): duration = format_infraction_with_duration(str(duration)) await ctx.send(f"{Emojis.check_mark} {user.mention} can now stream until {duration}.") + @commands.command(aliases=("pstream",)) + @commands.has_any_role(*STAFF_ROLES) + async def permanentstream( + self, + ctx: commands.Context, + user: discord.Member, + *_ + ) -> None: + """Permanently give user a streaming permission.""" + # Check if user already has streaming permission + already_allowed = any(Roles.video == role.id for role in user.roles) + if already_allowed: + if user.id in self.scheduler: + self.scheduler.cancel(user.id) + await self._delete_from_redis(user.id) + await ctx.send(f"{Emojis.check_mark} Moved temporary permission to permanent") + return + await ctx.send(f"{Emojis.cross_mark} This user can already stream.") + return + + await user.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted") + await ctx.send(f"{Emojis.check_mark} {user.mention} can now stream forever") + @commands.command(aliases=("unstream", )) @commands.has_any_role(*STAFF_ROLES) async def revokestream( -- cgit v1.2.3 From 3b2a509085b0ae79d6ca428960f7b6b82d94be18 Mon Sep 17 00:00:00 2001 From: Harbys Date: Wed, 10 Feb 2021 21:50:18 +0100 Subject: Fix cancelling tasks on cog unload --- bot/exts/moderation/stream.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 46c68f74b..c61599278 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -129,8 +129,9 @@ class Stream(commands.Cog): await ctx.send(f"{Emojis.cross_mark} This user already can't stream.") def cog_unload(self) -> None: - """Cache and cancel all scheduled tasks.""" - self.scheduler.cancel_all() + """Cancel all scheduled tasks.""" + self.reload_task.cancel() + self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all()) def setup(bot: Bot) -> None: -- cgit v1.2.3 From 578bd933ca4b954131f25646e69748cc3d748d0b Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 13 Feb 2021 06:39:16 +0200 Subject: Removed enabling and switched to redis Removing self.enable and the defon & defoff commands. Defcon will now just be always 'on' and we can set the days threshold to 0 to turn it off. Switched from postgres to redis - if the data gets lost we should just reconfigure defcon again, it should not depend on the site. --- bot/constants.py | 12 +-- bot/exts/moderation/defcon.py | 173 +++++++++++++----------------------------- config-default.yml | 12 +-- 3 files changed, 65 insertions(+), 132 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index 95e22513f..cbab751d0 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -277,9 +277,9 @@ class Emojis(metaclass=YAMLGetter): badge_staff: str badge_verified_bot_developer: str - defcon_disabled: str # noqa: E704 - defcon_enabled: str # noqa: E704 - defcon_updated: str # noqa: E704 + defcon_shutdown: str # noqa: E704 + defcon_unshutdown: str # noqa: E704 + defcon_update: str # noqa: E704 failmail: str @@ -316,9 +316,9 @@ class Icons(metaclass=YAMLGetter): crown_red: str defcon_denied: str # noqa: E704 - defcon_disabled: str # noqa: E704 - defcon_enabled: str # noqa: E704 - defcon_updated: str # noqa: E704 + defcon_shutdown: str # noqa: E704 + defcon_unshutdown: str # noqa: E704 + defcon_update: str # noqa: E704 filtering: str diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index e0baab099..8e6ab1fd5 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -6,6 +6,7 @@ from datetime import datetime, timedelta from enum import Enum from gettext import ngettext +from async_rediscache import RedisCache from discord import Colour, Embed, Member from discord.ext import tasks from discord.ext.commands import Cog, Context, group, has_any_role @@ -36,67 +37,59 @@ class Action(Enum): ActionInfo = namedtuple('LogInfoDetails', ['icon', 'color', 'template']) - ENABLED = ActionInfo(Icons.defcon_enabled, Colours.soft_green, "**Days:** {days}\n\n") - DISABLED = ActionInfo(Icons.defcon_disabled, Colours.soft_red, "") - UPDATED = ActionInfo(Icons.defcon_updated, Colour.blurple(), "**Days:** {days}\n\n") + SERVER_OPEN = ActionInfo(Icons.defcon_unshutdown, Colours.soft_green, "") + SERVER_SHUTDOWN = ActionInfo(Icons.defcon_shutdown, Colours.soft_red, "") + DURATION_UPDATE = ActionInfo(Icons.defcon_update, Colour.blurple(), "**Days:** {days}\n\n") class Defcon(Cog): """Time-sensitive server defense mechanisms.""" + redis_cache = RedisCache() + def __init__(self, bot: Bot): self.bot = bot self.channel = None self.days = timedelta(days=0) - self.enabled = False + self.expiry = None - self.bot.loop.create_task(self.sync_settings()) + self.bot.loop.create_task(self._sync_settings()) @property def mod_log(self) -> ModLog: """Get currently loaded ModLog cog instance.""" return self.bot.get_cog("ModLog") - async def sync_settings(self) -> None: + @redis_cache.atomic_transaction + async def _sync_settings(self) -> None: """On cog load, try to synchronize DEFCON settings to the API.""" log.trace("Waiting for the guild to become available before syncing.") await self.bot.wait_until_guild_available() + self.channel = await self.bot.fetch_channel(Channels.defcon) log.trace("Syncing settings.") - self.channel = await self.bot.fetch_channel(Channels.defcon) try: - response = await self.bot.api_client.get('bot/bot-settings/defcon') - data = response['data'] - - except Exception: # Yikes! + settings = await self.redis_cache.to_dict() + self.days = timedelta(days=settings["days"]) + except Exception: log.exception("Unable to get DEFCON settings!") - await self.bot.get_channel(Channels.dev_log).send( - f"<@&{Roles.admins}> **WARNING**: Unable to get DEFCON settings!" - ) + await self.channel.send(f"<@&{Roles.moderators}> **WARNING**: Unable to get DEFCON settings!") else: - if data["enabled"]: - self.enabled = True - self.days = timedelta(days=data["days"]) - log.info(f"DEFCON enabled: {self.days.days} days") - - else: - self.enabled = False - self.days = timedelta(days=0) - log.info("DEFCON disabled") + self._update_notifier() + log.info(f"DEFCON synchronized: {self.days.days} days") - self.update_notifier() - await self.update_channel_topic() + await self._update_channel_topic() @Cog.listener() async def on_member_join(self, member: Member) -> None: - """If DEFCON is enabled, check newly joining users to see if they meet the account age threshold.""" - if self.enabled and self.days.days > 0: + """Check newly joining users to see if they meet the account age threshold.""" + if self.days.days > 0: now = datetime.utcnow() if now - member.created_at < self.days: - log.info(f"Rejecting user {member}: Account is too new and DEFCON is enabled") + log.info(f"Rejecting user {member}: Account is too new") message_sent = False @@ -104,7 +97,7 @@ class Defcon(Cog): await member.send(REJECTION_MESSAGE.format(user=member.mention)) message_sent = True - except Exception: + except Exception: # TODO log.exception(f"Unable to send rejection message to user: {member}") await member.kick(reason="DEFCON active, user is too new") @@ -128,118 +121,64 @@ class Defcon(Cog): """Check the DEFCON status or run a subcommand.""" await ctx.send_help(ctx.command) + @redis_cache.atomic_transaction async def _defcon_action(self, ctx: Context, days: int, action: Action) -> None: """Providing a structured way to do an defcon action.""" - try: - response = await self.bot.api_client.get('bot/bot-settings/defcon') - data = response['data'] - - if "enable_date" in data and action is Action.DISABLED: - enabled = datetime.fromisoformat(data["enable_date"]) - - delta = datetime.now() - enabled - - self.bot.stats.timing("defcon.enabled", delta) - except Exception: - pass - self.days = timedelta(days=days) - self.enabled = action != Action.DISABLED - error = None - try: - await self.bot.api_client.put( - 'bot/bot-settings/defcon', - json={ - 'name': 'defcon', - 'data': { - # TODO: retrieve old days count - 'days': self.days.days, - 'enabled': self.enabled, - 'enable_date': datetime.now().isoformat() - } - } - ) - - except Exception as err: - log.exception("Unable to update DEFCON settings.") - error = err - finally: - self.update_notifier() - - await ctx.send(self.build_defcon_msg(action, error)) - await self.send_defcon_log(action, ctx.author, error) - await self.update_channel_topic() - - self.bot.stats.gauge("defcon.threshold", days) + await self.redis_cache.update( + { + 'days': self.days.days, + } + ) + self._update_notifier() - @defcon_group.command(name='enable', aliases=('on', 'e'), root_aliases=("defon",)) - @has_any_role(*MODERATION_ROLES) - async def enable_command(self, ctx: Context) -> None: - """ - Enable DEFCON mode. Useful in a pinch, but be sure you know what you're doing! + await ctx.send(self._build_defcon_msg(action)) + await self._send_defcon_log(action, ctx.author) + await self._update_channel_topic() - Currently, this just adds an account age requirement. Use !defcon days to set how old an account must be, - in days. - """ - await self._defcon_action(ctx, days=self.days, action=Action.ENABLED) + self.bot.stats.gauge("defcon.threshold", days) - @defcon_group.command(name='disable', aliases=('off', 'd'), root_aliases=("defoff",)) + @defcon_group.command(aliases=('s',)) @has_any_role(*MODERATION_ROLES) - async def disable_command(self, ctx: Context) -> None: - """Disable DEFCON mode. Useful in a pinch, but be sure you know what you're doing!""" - await self._defcon_action(ctx, days=0, action=Action.DISABLED) - - @defcon_group.command(name='status', aliases=('s',)) - @has_any_role(*MODERATION_ROLES) - async def status_command(self, ctx: Context) -> None: + async def status(self, ctx: Context) -> None: """Check the current status of DEFCON mode.""" embed = Embed( colour=Colour.blurple(), title="DEFCON Status", - description=f"**Enabled:** {self.enabled}\n" - f"**Days:** {self.days.days}" + description=f"**Days:** {self.days.days}" ) await ctx.send(embed=embed) - @defcon_group.command(name='days') + @defcon_group.command(aliases=('d',)) @has_any_role(*MODERATION_ROLES) - async def days_command(self, ctx: Context, days: int) -> None: - """Set how old an account must be to join the server, in days, with DEFCON mode enabled.""" - await self._defcon_action(ctx, days=days, action=Action.UPDATED) + async def days(self, ctx: Context, days: int) -> None: + """Set how old an account must be to join the server, in days.""" + await self._defcon_action(ctx, days=days, action=Action.DURATION_UPDATE) - async def update_channel_topic(self) -> None: + async def _update_channel_topic(self) -> None: """Update the #defcon channel topic with the current DEFCON status.""" - if self.enabled: - day_str = "days" if self.days.days > 1 else "day" - new_topic = f"{BASE_CHANNEL_TOPIC}\n(Status: Enabled, Threshold: {self.days.days} {day_str})" - else: - new_topic = f"{BASE_CHANNEL_TOPIC}\n(Status: Disabled)" + day_str = "days" if self.days.days > 1 else "day" + new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {self.days.days} {day_str})" self.mod_log.ignore(Event.guild_channel_update, Channels.defcon) await self.channel.edit(topic=new_topic) - def build_defcon_msg(self, action: Action, e: Exception = None) -> str: + def _build_defcon_msg(self, action: Action) -> str: """Build in-channel response string for DEFCON action.""" - if action is Action.ENABLED: - msg = f"{Emojis.defcon_enabled} DEFCON enabled.\n\n" - elif action is Action.DISABLED: - msg = f"{Emojis.defcon_disabled} DEFCON disabled.\n\n" - elif action is Action.UPDATED: + if action is Action.SERVER_OPEN: + msg = f"{Emojis.defcon_enabled} Server reopened.\n\n" + elif action is Action.SERVER_SHUTDOWN: + msg = f"{Emojis.defcon_disabled} Server shut down.\n\n" + elif action is Action.DURATION_UPDATE: msg = ( - f"{Emojis.defcon_updated} DEFCON days updated; accounts must be {self.days.days} " - f"day{'s' if self.days.days > 1 else ''} old to join the server.\n\n" - ) - - if e: - msg += ( - "**There was a problem updating the site** - This setting may be reverted when the bot restarts.\n\n" - f"```py\n{e}\n```" + f"{Emojis.defcon_update} DEFCON days updated; accounts must be {self.days.days} " + f"day{ngettext('', 's', self.days.days)} old to join the server.\n\n" ) return msg - async def send_defcon_log(self, action: Action, actor: Member, e: Exception = None) -> None: + async def _send_defcon_log(self, action: Action, actor: Member) -> None: """Send log message for DEFCON action.""" info = action.value log_msg: str = ( @@ -248,15 +187,9 @@ class Defcon(Cog): ) status_msg = f"DEFCON {action.name.lower()}" - if e: - log_msg += ( - "**There was a problem updating the site** - This setting may be reverted when the bot restarts.\n\n" - f"```py\n{e}\n```" - ) - await self.mod_log.send_log_message(info.icon, info.color, status_msg, log_msg) - def update_notifier(self) -> None: + def _update_notifier(self) -> None: """Start or stop the notifier according to the DEFCON status.""" if self.days.days != 0 and not self.defcon_notifier.is_running(): log.info("DEFCON notifier started.") diff --git a/config-default.yml b/config-default.yml index d3b267159..a37743c15 100644 --- a/config-default.yml +++ b/config-default.yml @@ -44,9 +44,9 @@ style: badge_staff: "<:discord_staff:743882896498098226>" badge_verified_bot_developer: "<:verified_bot_dev:743882897299210310>" - defcon_disabled: "<:defcondisabled:470326273952972810>" - defcon_enabled: "<:defconenabled:470326274213150730>" - defcon_updated: "<:defconsettingsupdated:470326274082996224>" + defcon_shutdown: "<:defcondisabled:470326273952972810>" + defcon_unshutdown: "<:defconenabled:470326274213150730>" + defcon_update: "<:defconsettingsupdated:470326274082996224>" failmail: "<:failmail:633660039931887616>" @@ -80,9 +80,9 @@ style: crown_red: "https://cdn.discordapp.com/emojis/469964154879344640.png" defcon_denied: "https://cdn.discordapp.com/emojis/472475292078964738.png" - defcon_disabled: "https://cdn.discordapp.com/emojis/470326273952972810.png" - defcon_enabled: "https://cdn.discordapp.com/emojis/470326274213150730.png" - defcon_updated: "https://cdn.discordapp.com/emojis/472472638342561793.png" + defcon_shutdown: "https://cdn.discordapp.com/emojis/470326273952972810.png" + defcon_unshutdown: "https://cdn.discordapp.com/emojis/470326274213150730.png" + defcon_update: "https://cdn.discordapp.com/emojis/472472638342561793.png" filtering: "https://cdn.discordapp.com/emojis/472472638594482195.png" -- cgit v1.2.3 From 6435646ef04e72528c9cba4ae04f29d662877573 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 13 Feb 2021 06:53:00 +0200 Subject: Reordered methods --- bot/exts/moderation/defcon.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 8e6ab1fd5..355843bc8 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -121,24 +121,6 @@ class Defcon(Cog): """Check the DEFCON status or run a subcommand.""" await ctx.send_help(ctx.command) - @redis_cache.atomic_transaction - async def _defcon_action(self, ctx: Context, days: int, action: Action) -> None: - """Providing a structured way to do an defcon action.""" - self.days = timedelta(days=days) - - await self.redis_cache.update( - { - 'days': self.days.days, - } - ) - self._update_notifier() - - await ctx.send(self._build_defcon_msg(action)) - await self._send_defcon_log(action, ctx.author) - await self._update_channel_topic() - - self.bot.stats.gauge("defcon.threshold", days) - @defcon_group.command(aliases=('s',)) @has_any_role(*MODERATION_ROLES) async def status(self, ctx: Context) -> None: @@ -164,6 +146,24 @@ class Defcon(Cog): self.mod_log.ignore(Event.guild_channel_update, Channels.defcon) await self.channel.edit(topic=new_topic) + @redis_cache.atomic_transaction + async def _defcon_action(self, ctx: Context, days: int, action: Action) -> None: + """Providing a structured way to do an defcon action.""" + self.days = timedelta(days=days) + + await self.redis_cache.update( + { + 'days': self.days.days, + } + ) + self._update_notifier() + + await ctx.send(self._build_defcon_msg(action)) + await self._send_defcon_log(action, ctx.author) + await self._update_channel_topic() + + self.bot.stats.gauge("defcon.threshold", days) + def _build_defcon_msg(self, action: Action) -> str: """Build in-channel response string for DEFCON action.""" if action is Action.SERVER_OPEN: -- cgit v1.2.3 From a2eaa58ff5bb5876e53c31e5efb979aff71c4745 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 13 Feb 2021 07:47:31 +0200 Subject: Added server shutdown and reopen commands --- bot/exts/moderation/defcon.py | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 355843bc8..4aed24559 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -138,6 +138,28 @@ class Defcon(Cog): """Set how old an account must be to join the server, in days.""" await self._defcon_action(ctx, days=days, action=Action.DURATION_UPDATE) + @defcon_group.command() + @has_any_role(*MODERATION_ROLES) + async def shutdown(self, ctx: Context) -> None: + """Shut down the server by setting send permissions of everyone to False.""" + role = ctx.guild.default_role + permissions = role.permissions + + permissions.update(send_messages=False, add_reactions=False) + await role.edit(reason="DEFCON shutdown", permissions=permissions) + await ctx.send(self._build_defcon_msg(Action.SERVER_SHUTDOWN)) + + @defcon_group.command() + @has_any_role(*MODERATION_ROLES) + async def unshutdown(self, ctx: Context) -> None: + """Open up the server again by setting send permissions of everyone to None.""" + role = ctx.guild.default_role + permissions = role.permissions + + permissions.update(send_messages=True, add_reactions=True) + await role.edit(reason="DEFCON unshutdown", permissions=permissions) + await ctx.send(self._build_defcon_msg(Action.SERVER_OPEN)) + async def _update_channel_topic(self) -> None: """Update the #defcon channel topic with the current DEFCON status.""" day_str = "days" if self.days.days > 1 else "day" @@ -167,9 +189,9 @@ class Defcon(Cog): def _build_defcon_msg(self, action: Action) -> str: """Build in-channel response string for DEFCON action.""" if action is Action.SERVER_OPEN: - msg = f"{Emojis.defcon_enabled} Server reopened.\n\n" + msg = f"{Emojis.defcon_unshutdown} Server reopened.\n\n" elif action is Action.SERVER_SHUTDOWN: - msg = f"{Emojis.defcon_disabled} Server shut down.\n\n" + msg = f"{Emojis.defcon_shutdown} Server shut down.\n\n" elif action is Action.DURATION_UPDATE: msg = ( f"{Emojis.defcon_update} DEFCON days updated; accounts must be {self.days.days} " -- cgit v1.2.3 From 72f258c107c3b577298c5e131897cb93790c67c4 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 13 Feb 2021 08:08:23 +0200 Subject: Removed _build_defcon_message method --- bot/exts/moderation/defcon.py | 39 +++++++++++++++------------------------ 1 file changed, 15 insertions(+), 24 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 4aed24559..b04752abd 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -35,11 +35,11 @@ BASE_CHANNEL_TOPIC = "Python Discord Defense Mechanism" class Action(Enum): """Defcon Action.""" - ActionInfo = namedtuple('LogInfoDetails', ['icon', 'color', 'template']) + ActionInfo = namedtuple('LogInfoDetails', ['icon', 'emoji', 'color', 'template']) - SERVER_OPEN = ActionInfo(Icons.defcon_unshutdown, Colours.soft_green, "") - SERVER_SHUTDOWN = ActionInfo(Icons.defcon_shutdown, Colours.soft_red, "") - DURATION_UPDATE = ActionInfo(Icons.defcon_update, Colour.blurple(), "**Days:** {days}\n\n") + SERVER_OPEN = ActionInfo(Icons.defcon_unshutdown, Emojis.defcon_unshutdown, Colours.soft_green, "") + SERVER_SHUTDOWN = ActionInfo(Icons.defcon_shutdown, Emojis.defcon_shutdown, Colours.soft_red, "") + DURATION_UPDATE = ActionInfo(Icons.defcon_update, Emojis.defcon_update, Colour.blurple(), "**Days:** {days}\n\n") class Defcon(Cog): @@ -136,7 +136,7 @@ class Defcon(Cog): @has_any_role(*MODERATION_ROLES) async def days(self, ctx: Context, days: int) -> None: """Set how old an account must be to join the server, in days.""" - await self._defcon_action(ctx, days=days, action=Action.DURATION_UPDATE) + await self._defcon_action(ctx, days=days) @defcon_group.command() @has_any_role(*MODERATION_ROLES) @@ -147,7 +147,7 @@ class Defcon(Cog): permissions.update(send_messages=False, add_reactions=False) await role.edit(reason="DEFCON shutdown", permissions=permissions) - await ctx.send(self._build_defcon_msg(Action.SERVER_SHUTDOWN)) + await ctx.send(f"{Action.SERVER_SHUTDOWN.value.emoji} Server shut down.") @defcon_group.command() @has_any_role(*MODERATION_ROLES) @@ -158,7 +158,7 @@ class Defcon(Cog): permissions.update(send_messages=True, add_reactions=True) await role.edit(reason="DEFCON unshutdown", permissions=permissions) - await ctx.send(self._build_defcon_msg(Action.SERVER_OPEN)) + await ctx.send(f"{Action.SERVER_OPEN.value.emoji} Server reopened.") async def _update_channel_topic(self) -> None: """Update the #defcon channel topic with the current DEFCON status.""" @@ -169,8 +169,8 @@ class Defcon(Cog): await self.channel.edit(topic=new_topic) @redis_cache.atomic_transaction - async def _defcon_action(self, ctx: Context, days: int, action: Action) -> None: - """Providing a structured way to do an defcon action.""" + async def _defcon_action(self, ctx: Context, days: int) -> None: + """Providing a structured way to do a defcon action.""" self.days = timedelta(days=days) await self.redis_cache.update( @@ -180,26 +180,17 @@ class Defcon(Cog): ) self._update_notifier() - await ctx.send(self._build_defcon_msg(action)) + action = Action.DURATION_UPDATE + + await ctx.send( + f"{action.value.emoji} DEFCON days updated; accounts must be {self.days.days} " + f"day{ngettext('', 's', self.days.days)} old to join the server." + ) await self._send_defcon_log(action, ctx.author) await self._update_channel_topic() self.bot.stats.gauge("defcon.threshold", days) - def _build_defcon_msg(self, action: Action) -> str: - """Build in-channel response string for DEFCON action.""" - if action is Action.SERVER_OPEN: - msg = f"{Emojis.defcon_unshutdown} Server reopened.\n\n" - elif action is Action.SERVER_SHUTDOWN: - msg = f"{Emojis.defcon_shutdown} Server shut down.\n\n" - elif action is Action.DURATION_UPDATE: - msg = ( - f"{Emojis.defcon_update} DEFCON days updated; accounts must be {self.days.days} " - f"day{ngettext('', 's', self.days.days)} old to join the server.\n\n" - ) - - return msg - async def _send_defcon_log(self, action: Action, actor: Member) -> None: """Send log message for DEFCON action.""" info = action.value -- cgit v1.2.3 From 2016dceff88642b92564e8f0c8ec98db0cbedf29 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 13 Feb 2021 09:26:33 +0200 Subject: Added cog check to only allow mods in the defcon channel --- bot/exts/moderation/defcon.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index b04752abd..a5af1141f 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -116,13 +116,11 @@ class Defcon(Cog): ) @group(name='defcon', aliases=('dc',), invoke_without_command=True) - @has_any_role(*MODERATION_ROLES) async def defcon_group(self, ctx: Context) -> None: """Check the DEFCON status or run a subcommand.""" await ctx.send_help(ctx.command) @defcon_group.command(aliases=('s',)) - @has_any_role(*MODERATION_ROLES) async def status(self, ctx: Context) -> None: """Check the current status of DEFCON mode.""" embed = Embed( @@ -133,13 +131,11 @@ class Defcon(Cog): await ctx.send(embed=embed) @defcon_group.command(aliases=('d',)) - @has_any_role(*MODERATION_ROLES) async def days(self, ctx: Context, days: int) -> None: """Set how old an account must be to join the server, in days.""" await self._defcon_action(ctx, days=days) @defcon_group.command() - @has_any_role(*MODERATION_ROLES) async def shutdown(self, ctx: Context) -> None: """Shut down the server by setting send permissions of everyone to False.""" role = ctx.guild.default_role @@ -150,7 +146,6 @@ class Defcon(Cog): await ctx.send(f"{Action.SERVER_SHUTDOWN.value.emoji} Server shut down.") @defcon_group.command() - @has_any_role(*MODERATION_ROLES) async def unshutdown(self, ctx: Context) -> None: """Open up the server again by setting send permissions of everyone to None.""" role = ctx.guild.default_role @@ -217,6 +212,10 @@ class Defcon(Cog): """Routinely notify moderators that DEFCON is active.""" await self.channel.send(f"Defcon is on and is set to {self.days.days} day{ngettext('', 's', self.days.days)}.") + async def cog_check(self, ctx: Context) -> bool: + """Only allow moderators in the defcon channel to run commands in this cog.""" + return has_any_role(*MODERATION_ROLES).predicate(ctx) and ctx.channel == self.channel + def cog_unload(self) -> None: """Cancel the notifer task when the cog unloads.""" log.trace("Cog unload: canceling defcon notifier task.") -- cgit v1.2.3 From d99f7d88f4718ae8042b22788c6ec85541219ae7 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 13 Feb 2021 12:19:01 +0200 Subject: Defcon days is now defcon threshold with DurationDelta --- bot/converters.py | 17 ++-------- bot/exts/moderation/defcon.py | 72 +++++++++++++++++++++++++++---------------- bot/utils/time.py | 36 ++++++++++++++++++++++ 3 files changed, 84 insertions(+), 41 deletions(-) diff --git a/bot/converters.py b/bot/converters.py index d0a9731d6..483272de1 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -16,6 +16,7 @@ from discord.utils import DISCORD_EPOCH, snowflake_time from bot.api import ResponseCodeError from bot.constants import URLs from bot.utils.regex import INVITE_RE +from bot.utils.time import parse_duration_string log = logging.getLogger(__name__) @@ -301,16 +302,6 @@ class TagContentConverter(Converter): class DurationDelta(Converter): """Convert duration strings into dateutil.relativedelta.relativedelta objects.""" - duration_parser = re.compile( - r"((?P\d+?) ?(years|year|Y|y) ?)?" - r"((?P\d+?) ?(months|month|m) ?)?" - r"((?P\d+?) ?(weeks|week|W|w) ?)?" - r"((?P\d+?) ?(days|day|D|d) ?)?" - r"((?P\d+?) ?(hours|hour|H|h) ?)?" - r"((?P\d+?) ?(minutes|minute|M) ?)?" - r"((?P\d+?) ?(seconds|second|S|s))?" - ) - async def convert(self, ctx: Context, duration: str) -> relativedelta: """ Converts a `duration` string to a relativedelta object. @@ -326,13 +317,9 @@ class DurationDelta(Converter): The units need to be provided in descending order of magnitude. """ - match = self.duration_parser.fullmatch(duration) - if not match: + if not (delta := parse_duration_string(duration)): raise BadArgument(f"`{duration}` is not a valid duration string.") - duration_dict = {unit: int(amount) for unit, amount in match.groupdict(default=0).items()} - delta = relativedelta(**duration_dict) - return delta diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index a5af1141f..82aaf5714 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -2,19 +2,22 @@ from __future__ import annotations import logging from collections import namedtuple -from datetime import datetime, timedelta +from datetime import datetime from enum import Enum -from gettext import ngettext +from typing import Union from async_rediscache import RedisCache +from dateutil.relativedelta import relativedelta from discord import Colour, Embed, Member from discord.ext import tasks from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_ROLES, Roles +from bot.converters import DurationDelta from bot.exts.moderation.modlog import ModLog from bot.utils.messages import format_user +from bot.utils.time import humanize_delta, parse_duration_string log = logging.getLogger(__name__) @@ -31,6 +34,8 @@ will be resolved soon. In the meantime, please feel free to peruse the resources BASE_CHANNEL_TOPIC = "Python Discord Defense Mechanism" +SECONDS_IN_DAY = 86400 + class Action(Enum): """Defcon Action.""" @@ -39,7 +44,9 @@ class Action(Enum): SERVER_OPEN = ActionInfo(Icons.defcon_unshutdown, Emojis.defcon_unshutdown, Colours.soft_green, "") SERVER_SHUTDOWN = ActionInfo(Icons.defcon_shutdown, Emojis.defcon_shutdown, Colours.soft_red, "") - DURATION_UPDATE = ActionInfo(Icons.defcon_update, Emojis.defcon_update, Colour.blurple(), "**Days:** {days}\n\n") + DURATION_UPDATE = ActionInfo( + Icons.defcon_update, Emojis.defcon_update, Colour.blurple(), "**Threshold:** {threshold}\n\n" + ) class Defcon(Cog): @@ -50,7 +57,7 @@ class Defcon(Cog): def __init__(self, bot: Bot): self.bot = bot self.channel = None - self.days = timedelta(days=0) + self.threshold = relativedelta(days=0) self.expiry = None self.bot.loop.create_task(self._sync_settings()) @@ -71,24 +78,24 @@ class Defcon(Cog): try: settings = await self.redis_cache.to_dict() - self.days = timedelta(days=settings["days"]) + self.threshold = parse_duration_string(settings["threshold"]) except Exception: log.exception("Unable to get DEFCON settings!") await self.channel.send(f"<@&{Roles.moderators}> **WARNING**: Unable to get DEFCON settings!") else: self._update_notifier() - log.info(f"DEFCON synchronized: {self.days.days} days") + log.info(f"DEFCON synchronized: {humanize_delta(self.threshold)}") await self._update_channel_topic() @Cog.listener() async def on_member_join(self, member: Member) -> None: """Check newly joining users to see if they meet the account age threshold.""" - if self.days.days > 0: + if self.threshold > relativedelta(days=0): now = datetime.utcnow() - if now - member.created_at < self.days: + if now - member.created_at < self.threshold: log.info(f"Rejecting user {member}: Account is too new") message_sent = False @@ -125,15 +132,17 @@ class Defcon(Cog): """Check the current status of DEFCON mode.""" embed = Embed( colour=Colour.blurple(), title="DEFCON Status", - description=f"**Days:** {self.days.days}" + description=f"**Threshold:** {humanize_delta(self.threshold)}" ) await ctx.send(embed=embed) - @defcon_group.command(aliases=('d',)) - async def days(self, ctx: Context, days: int) -> None: - """Set how old an account must be to join the server, in days.""" - await self._defcon_action(ctx, days=days) + @defcon_group.command(aliases=('t',)) + async def threshold(self, ctx: Context, threshold: Union[DurationDelta, int]) -> None: + """Set how old an account must be to join the server.""" + if isinstance(threshold, int): + threshold = relativedelta(days=threshold) + await self._defcon_action(ctx, threshold=threshold) @defcon_group.command() async def shutdown(self, ctx: Context) -> None: @@ -157,20 +166,19 @@ class Defcon(Cog): async def _update_channel_topic(self) -> None: """Update the #defcon channel topic with the current DEFCON status.""" - day_str = "days" if self.days.days > 1 else "day" - new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {self.days.days} {day_str})" + new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {humanize_delta(self.threshold)})" self.mod_log.ignore(Event.guild_channel_update, Channels.defcon) await self.channel.edit(topic=new_topic) @redis_cache.atomic_transaction - async def _defcon_action(self, ctx: Context, days: int) -> None: + async def _defcon_action(self, ctx: Context, threshold: relativedelta) -> None: """Providing a structured way to do a defcon action.""" - self.days = timedelta(days=days) + self.threshold = threshold await self.redis_cache.update( { - 'days': self.days.days, + 'threshold': Defcon._stringify_relativedelta(self.threshold), } ) self._update_notifier() @@ -178,20 +186,32 @@ class Defcon(Cog): action = Action.DURATION_UPDATE await ctx.send( - f"{action.value.emoji} DEFCON days updated; accounts must be {self.days.days} " - f"day{ngettext('', 's', self.days.days)} old to join the server." + f"{action.value.emoji} DEFCON threshold updated; accounts must be " + f"{humanize_delta(self.threshold)} old to join the server." ) await self._send_defcon_log(action, ctx.author) await self._update_channel_topic() - self.bot.stats.gauge("defcon.threshold", days) + self._log_threshold_stat(threshold) + + @staticmethod + def _stringify_relativedelta(delta: relativedelta) -> str: + """Convert a relativedelta object to a duration string.""" + units = [("years", "y"), ("months", "m"), ("days", "d"), ("hours", "h"), ("minutes", "m"), ("seconds", "s")] + return "".join(f"{getattr(delta, unit)}{symbol}" for unit, symbol in units if getattr(delta, unit)) or "0s" + + def _log_threshold_stat(self, threshold: relativedelta) -> None: + """Adds the threshold to the bot stats in days.""" + utcnow = datetime.utcnow() + threshold_days = (utcnow + threshold - utcnow).total_seconds() / SECONDS_IN_DAY + self.bot.stats.gauge("defcon.threshold", threshold_days) async def _send_defcon_log(self, action: Action, actor: Member) -> None: """Send log message for DEFCON action.""" info = action.value log_msg: str = ( f"**Staffer:** {actor.mention} {actor} (`{actor.id}`)\n" - f"{info.template.format(days=self.days.days)}" + f"{info.template.format(threshold=humanize_delta(self.threshold))}" ) status_msg = f"DEFCON {action.name.lower()}" @@ -199,22 +219,22 @@ class Defcon(Cog): def _update_notifier(self) -> None: """Start or stop the notifier according to the DEFCON status.""" - if self.days.days != 0 and not self.defcon_notifier.is_running(): + if self.threshold != relativedelta(days=0) and not self.defcon_notifier.is_running(): log.info("DEFCON notifier started.") self.defcon_notifier.start() - elif self.days.days == 0 and self.defcon_notifier.is_running(): + elif self.threshold == relativedelta(days=0) and self.defcon_notifier.is_running(): log.info("DEFCON notifier stopped.") self.defcon_notifier.cancel() @tasks.loop(hours=1) async def defcon_notifier(self) -> None: """Routinely notify moderators that DEFCON is active.""" - await self.channel.send(f"Defcon is on and is set to {self.days.days} day{ngettext('', 's', self.days.days)}.") + await self.channel.send(f"Defcon is on and is set to {humanize_delta(self.threshold)}.") async def cog_check(self, ctx: Context) -> bool: """Only allow moderators in the defcon channel to run commands in this cog.""" - return has_any_role(*MODERATION_ROLES).predicate(ctx) and ctx.channel == self.channel + return (await has_any_role(*MODERATION_ROLES).predicate(ctx)) and ctx.channel == self.channel def cog_unload(self) -> None: """Cancel the notifer task when the cog unloads.""" diff --git a/bot/utils/time.py b/bot/utils/time.py index 47e49904b..5b197c350 100644 --- a/bot/utils/time.py +++ b/bot/utils/time.py @@ -1,5 +1,6 @@ import asyncio import datetime +import re from typing import Optional import dateutil.parser @@ -8,6 +9,16 @@ from dateutil.relativedelta import relativedelta RFC1123_FORMAT = "%a, %d %b %Y %H:%M:%S GMT" INFRACTION_FORMAT = "%Y-%m-%d %H:%M" +_duration_parser = re.compile( + r"((?P\d+?) ?(years|year|Y|y) ?)?" + r"((?P\d+?) ?(months|month|m) ?)?" + r"((?P\d+?) ?(weeks|week|W|w) ?)?" + r"((?P\d+?) ?(days|day|D|d) ?)?" + r"((?P\d+?) ?(hours|hour|H|h) ?)?" + r"((?P\d+?) ?(minutes|minute|M) ?)?" + r"((?P\d+?) ?(seconds|second|S|s))?" +) + def _stringify_time_unit(value: int, unit: str) -> str: """ @@ -74,6 +85,31 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units: return humanized +def parse_duration_string(duration: str) -> Optional[relativedelta]: + """ + Converts a `duration` string to a relativedelta object. + + The function supports the following symbols for each unit of time: + - years: `Y`, `y`, `year`, `years` + - months: `m`, `month`, `months` + - weeks: `w`, `W`, `week`, `weeks` + - days: `d`, `D`, `day`, `days` + - hours: `H`, `h`, `hour`, `hours` + - minutes: `M`, `minute`, `minutes` + - seconds: `S`, `s`, `second`, `seconds` + The units need to be provided in descending order of magnitude. + If the string does represent a durationdelta object, it will return None. + """ + match = _duration_parser.fullmatch(duration) + if not match: + return None + + duration_dict = {unit: int(amount) for unit, amount in match.groupdict(default=0).items()} + delta = relativedelta(**duration_dict) + + return delta + + def time_since(past_datetime: datetime.datetime, precision: str = "seconds", max_units: int = 6) -> str: """ Takes a datetime and returns a human-readable string that describes how long ago that datetime was. -- cgit v1.2.3 From fdf12c6d2b2f3ab5ae335e2913a714cbeac2ff30 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 13 Feb 2021 16:14:56 +0200 Subject: Add option to schedule threshold reset Added optional argument to defcon threshold to specify for how long it should be on. The notifier will now run only when there is no expiry date specified. --- bot/exts/moderation/defcon.py | 62 ++++++++++++++++++++++++++++++++----------- 1 file changed, 46 insertions(+), 16 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 82aaf5714..8c21a7327 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -4,19 +4,20 @@ import logging from collections import namedtuple from datetime import datetime from enum import Enum -from typing import Union +from typing import Optional, Union from async_rediscache import RedisCache from dateutil.relativedelta import relativedelta -from discord import Colour, Embed, Member +from discord import Colour, Embed, Member, User from discord.ext import tasks from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_ROLES, Roles -from bot.converters import DurationDelta +from bot.converters import DurationDelta, Expiry from bot.exts.moderation.modlog import ModLog from bot.utils.messages import format_user +from bot.utils.scheduling import Scheduler from bot.utils.time import humanize_delta, parse_duration_string log = logging.getLogger(__name__) @@ -60,6 +61,8 @@ class Defcon(Cog): self.threshold = relativedelta(days=0) self.expiry = None + self.scheduler = Scheduler(self.__class__.__name__) + self.bot.loop.create_task(self._sync_settings()) @property @@ -79,11 +82,15 @@ class Defcon(Cog): try: settings = await self.redis_cache.to_dict() self.threshold = parse_duration_string(settings["threshold"]) + self.expiry = datetime.fromisoformat(settings["expiry"]) if settings["expiry"] else None except Exception: log.exception("Unable to get DEFCON settings!") await self.channel.send(f"<@&{Roles.moderators}> **WARNING**: Unable to get DEFCON settings!") else: + if self.expiry: + self.scheduler.schedule_at(self.expiry, 0, self._remove_threshold()) + self._update_notifier() log.info(f"DEFCON synchronized: {humanize_delta(self.threshold)}") @@ -95,7 +102,7 @@ class Defcon(Cog): if self.threshold > relativedelta(days=0): now = datetime.utcnow() - if now - member.created_at < self.threshold: + if now - member.created_at < self.threshold: # TODO log.info(f"Rejecting user {member}: Account is too new") message_sent = False @@ -104,7 +111,7 @@ class Defcon(Cog): await member.send(REJECTION_MESSAGE.format(user=member.mention)) message_sent = True - except Exception: # TODO + except Exception: log.exception(f"Unable to send rejection message to user: {member}") await member.kick(reason="DEFCON active, user is too new") @@ -132,17 +139,22 @@ class Defcon(Cog): """Check the current status of DEFCON mode.""" embed = Embed( colour=Colour.blurple(), title="DEFCON Status", - description=f"**Threshold:** {humanize_delta(self.threshold)}" + description=f""" + **Threshold:** {humanize_delta(self.threshold)} + **Expires in:** {humanize_delta(relativedelta(self.expiry, datetime.utcnow())) if self.expiry else "-"} + """ ) await ctx.send(embed=embed) @defcon_group.command(aliases=('t',)) - async def threshold(self, ctx: Context, threshold: Union[DurationDelta, int]) -> None: + async def threshold( + self, ctx: Context, threshold: Union[DurationDelta, int], expiry: Optional[Expiry] = None + ) -> None: """Set how old an account must be to join the server.""" if isinstance(threshold, int): threshold = relativedelta(days=threshold) - await self._defcon_action(ctx, threshold=threshold) + await self._defcon_action(ctx.author, threshold=threshold, expiry=expiry) @defcon_group.command() async def shutdown(self, ctx: Context) -> None: @@ -172,28 +184,45 @@ class Defcon(Cog): await self.channel.edit(topic=new_topic) @redis_cache.atomic_transaction - async def _defcon_action(self, ctx: Context, threshold: relativedelta) -> None: + async def _defcon_action(self, author: User, threshold: relativedelta, expiry: Optional[Expiry] = None) -> None: """Providing a structured way to do a defcon action.""" self.threshold = threshold + if threshold == relativedelta(days=0): # If the threshold is 0, we don't need to schedule anything + expiry = None + self.expiry = expiry + + # Either way, we cancel the old task. + self.scheduler.cancel_all() + if self.expiry is not None: + self.scheduler.schedule_at(expiry, 0, self._remove_threshold()) await self.redis_cache.update( { 'threshold': Defcon._stringify_relativedelta(self.threshold), + 'expiry': expiry.isoformat() if expiry else 0 } ) self._update_notifier() action = Action.DURATION_UPDATE - await ctx.send( + expiry_message = "" + if expiry: + expiry_message = f"for the next {humanize_delta(relativedelta(expiry, datetime.utcnow()))}" + + await self.channel.send( f"{action.value.emoji} DEFCON threshold updated; accounts must be " - f"{humanize_delta(self.threshold)} old to join the server." + f"{humanize_delta(self.threshold)} old to join the server {expiry_message}." ) - await self._send_defcon_log(action, ctx.author) + await self._send_defcon_log(action, author) await self._update_channel_topic() self._log_threshold_stat(threshold) + async def _remove_threshold(self) -> None: + """Resets the threshold back to 0.""" + await self._defcon_action(self.bot.user, relativedelta(days=0)) + @staticmethod def _stringify_relativedelta(delta: relativedelta) -> str: """Convert a relativedelta object to a duration string.""" @@ -206,7 +235,7 @@ class Defcon(Cog): threshold_days = (utcnow + threshold - utcnow).total_seconds() / SECONDS_IN_DAY self.bot.stats.gauge("defcon.threshold", threshold_days) - async def _send_defcon_log(self, action: Action, actor: Member) -> None: + async def _send_defcon_log(self, action: Action, actor: User) -> None: """Send log message for DEFCON action.""" info = action.value log_msg: str = ( @@ -219,11 +248,11 @@ class Defcon(Cog): def _update_notifier(self) -> None: """Start or stop the notifier according to the DEFCON status.""" - if self.threshold != relativedelta(days=0) and not self.defcon_notifier.is_running(): + if self.threshold != relativedelta(days=0) and self.expiry is None and not self.defcon_notifier.is_running(): log.info("DEFCON notifier started.") self.defcon_notifier.start() - elif self.threshold == relativedelta(days=0) and self.defcon_notifier.is_running(): + elif (self.threshold == relativedelta(days=0) or self.expiry is not None) and self.defcon_notifier.is_running(): log.info("DEFCON notifier stopped.") self.defcon_notifier.cancel() @@ -237,9 +266,10 @@ class Defcon(Cog): return (await has_any_role(*MODERATION_ROLES).predicate(ctx)) and ctx.channel == self.channel def cog_unload(self) -> None: - """Cancel the notifer task when the cog unloads.""" + """Cancel the notifer and threshold removal tasks when the cog unloads.""" log.trace("Cog unload: canceling defcon notifier task.") self.defcon_notifier.cancel() + self.scheduler.cancel_all() def setup(bot: Bot) -> None: -- cgit v1.2.3 From 2e4a069ac185d1d978070327e76faba4eefbd255 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 13 Feb 2021 16:42:27 +0200 Subject: Fixed on_message --- bot/exts/moderation/defcon.py | 9 ++++----- bot/exts/moderation/slowmode.py | 4 +--- bot/utils/time.py | 6 ++++++ 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 8c21a7327..28a1a425f 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -18,7 +18,7 @@ from bot.converters import DurationDelta, Expiry from bot.exts.moderation.modlog import ModLog from bot.utils.messages import format_user from bot.utils.scheduling import Scheduler -from bot.utils.time import humanize_delta, parse_duration_string +from bot.utils.time import humanize_delta, parse_duration_string, relativedelta_to_timedelta log = logging.getLogger(__name__) @@ -99,10 +99,10 @@ class Defcon(Cog): @Cog.listener() async def on_member_join(self, member: Member) -> None: """Check newly joining users to see if they meet the account age threshold.""" - if self.threshold > relativedelta(days=0): + if self.threshold != relativedelta(days=0): now = datetime.utcnow() - if now - member.created_at < self.threshold: # TODO + if now - member.created_at < relativedelta_to_timedelta(self.threshold): log.info(f"Rejecting user {member}: Account is too new") message_sent = False @@ -231,8 +231,7 @@ class Defcon(Cog): def _log_threshold_stat(self, threshold: relativedelta) -> None: """Adds the threshold to the bot stats in days.""" - utcnow = datetime.utcnow() - threshold_days = (utcnow + threshold - utcnow).total_seconds() / SECONDS_IN_DAY + threshold_days = relativedelta_to_timedelta(threshold).total_seconds() / SECONDS_IN_DAY self.bot.stats.gauge("defcon.threshold", threshold_days) async def _send_defcon_log(self, action: Action, actor: User) -> None: diff --git a/bot/exts/moderation/slowmode.py b/bot/exts/moderation/slowmode.py index c449752e1..d8baff76a 100644 --- a/bot/exts/moderation/slowmode.py +++ b/bot/exts/moderation/slowmode.py @@ -1,5 +1,4 @@ import logging -from datetime import datetime from typing import Optional from dateutil.relativedelta import relativedelta @@ -54,8 +53,7 @@ class Slowmode(Cog): # Convert `dateutil.relativedelta.relativedelta` to `datetime.timedelta` # Must do this to get the delta in a particular unit of time - utcnow = datetime.utcnow() - slowmode_delay = (utcnow + delay - utcnow).total_seconds() + slowmode_delay = time.relativedelta_to_timedelta(delay).total_seconds() humanized_delay = time.humanize_delta(delay) diff --git a/bot/utils/time.py b/bot/utils/time.py index 5b197c350..a7b441327 100644 --- a/bot/utils/time.py +++ b/bot/utils/time.py @@ -110,6 +110,12 @@ def parse_duration_string(duration: str) -> Optional[relativedelta]: return delta +def relativedelta_to_timedelta(delta: relativedelta) -> datetime.timedelta: + """Converts a relativedelta object to a timedelta object.""" + utcnow = datetime.datetime.utcnow() + return utcnow + delta - utcnow + + def time_since(past_datetime: datetime.datetime, precision: str = "seconds", max_units: int = 6) -> str: """ Takes a datetime and returns a human-readable string that describes how long ago that datetime was. -- cgit v1.2.3 From 62bd358d9fe390ba4ac25e122e261a44276ad9e9 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 13 Feb 2021 16:51:52 +0200 Subject: Status command displays verification level --- bot/exts/moderation/defcon.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 28a1a425f..17b521b00 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -142,6 +142,7 @@ class Defcon(Cog): description=f""" **Threshold:** {humanize_delta(self.threshold)} **Expires in:** {humanize_delta(relativedelta(self.expiry, datetime.utcnow())) if self.expiry else "-"} + **Verification level:** {ctx.guild.verification_level.name} """ ) -- cgit v1.2.3 From 94f7a701034d53e82d04f2a08e5927f874c74f49 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 13 Feb 2021 17:34:57 +0200 Subject: Prevent channel description edit from locking commands Because some parts are defined as atomic transaction, we can't use them with channel description edits which are heavily rate limited. Description edits are now run in a separate task. --- bot/exts/moderation/defcon.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 17b521b00..d1b99cb35 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -1,5 +1,6 @@ from __future__ import annotations +import asyncio import logging from collections import namedtuple from datetime import datetime @@ -182,7 +183,7 @@ class Defcon(Cog): new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {humanize_delta(self.threshold)})" self.mod_log.ignore(Event.guild_channel_update, Channels.defcon) - await self.channel.edit(topic=new_topic) + asyncio.create_task(self.channel.edit(topic=new_topic)) @redis_cache.atomic_transaction async def _defcon_action(self, author: User, threshold: relativedelta, expiry: Optional[Expiry] = None) -> None: -- cgit v1.2.3 From abc3f1a30b60981f90acf2065507179010b39713 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sun, 14 Feb 2021 04:34:02 +0200 Subject: _update_channel_topic not longer needs to be awaited It's important to note that it's appropriate for the sync and action methods to have a lock between them, because if an action is made before syncing is complete it gets screwed and starts throwing excpetion for every command. --- bot/exts/moderation/defcon.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index d1b99cb35..3cc8960dd 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -95,7 +95,7 @@ class Defcon(Cog): self._update_notifier() log.info(f"DEFCON synchronized: {humanize_delta(self.threshold)}") - await self._update_channel_topic() + self._update_channel_topic() @Cog.listener() async def on_member_join(self, member: Member) -> None: @@ -178,7 +178,7 @@ class Defcon(Cog): await role.edit(reason="DEFCON unshutdown", permissions=permissions) await ctx.send(f"{Action.SERVER_OPEN.value.emoji} Server reopened.") - async def _update_channel_topic(self) -> None: + def _update_channel_topic(self) -> None: """Update the #defcon channel topic with the current DEFCON status.""" new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {humanize_delta(self.threshold)})" @@ -217,7 +217,7 @@ class Defcon(Cog): f"{humanize_delta(self.threshold)} old to join the server {expiry_message}." ) await self._send_defcon_log(action, author) - await self._update_channel_topic() + self._update_channel_topic() self._log_threshold_stat(threshold) -- cgit v1.2.3 From 5dabc88c31c212182c155ebe873fad7b04879682 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sun, 14 Feb 2021 18:33:05 +0200 Subject: Removed cog check, shutdown restricted to admins --- bot/exts/moderation/defcon.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 3cc8960dd..daacf95b7 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -131,11 +131,13 @@ class Defcon(Cog): ) @group(name='defcon', aliases=('dc',), invoke_without_command=True) + @has_any_role(*MODERATION_ROLES) async def defcon_group(self, ctx: Context) -> None: """Check the DEFCON status or run a subcommand.""" await ctx.send_help(ctx.command) @defcon_group.command(aliases=('s',)) + @has_any_role(*MODERATION_ROLES) async def status(self, ctx: Context) -> None: """Check the current status of DEFCON mode.""" embed = Embed( @@ -150,6 +152,7 @@ class Defcon(Cog): await ctx.send(embed=embed) @defcon_group.command(aliases=('t',)) + @has_any_role(*MODERATION_ROLES) async def threshold( self, ctx: Context, threshold: Union[DurationDelta, int], expiry: Optional[Expiry] = None ) -> None: @@ -159,6 +162,7 @@ class Defcon(Cog): await self._defcon_action(ctx.author, threshold=threshold, expiry=expiry) @defcon_group.command() + @has_any_role(Roles.admins) async def shutdown(self, ctx: Context) -> None: """Shut down the server by setting send permissions of everyone to False.""" role = ctx.guild.default_role @@ -169,6 +173,7 @@ class Defcon(Cog): await ctx.send(f"{Action.SERVER_SHUTDOWN.value.emoji} Server shut down.") @defcon_group.command() + @has_any_role(Roles.admins) async def unshutdown(self, ctx: Context) -> None: """Open up the server again by setting send permissions of everyone to None.""" role = ctx.guild.default_role @@ -262,10 +267,6 @@ class Defcon(Cog): """Routinely notify moderators that DEFCON is active.""" await self.channel.send(f"Defcon is on and is set to {humanize_delta(self.threshold)}.") - async def cog_check(self, ctx: Context) -> bool: - """Only allow moderators in the defcon channel to run commands in this cog.""" - return (await has_any_role(*MODERATION_ROLES).predicate(ctx)) and ctx.channel == self.channel - def cog_unload(self) -> None: """Cancel the notifer and threshold removal tasks when the cog unloads.""" log.trace("Cog unload: canceling defcon notifier task.") -- cgit v1.2.3 From 64598b37145cbd2a2ee25008ff6217ee7fe6de03 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sun, 14 Feb 2021 18:37:05 +0200 Subject: Renamed _defcon_action to _update_threshold and updated docstring --- bot/exts/moderation/defcon.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index daacf95b7..cdc5ff1b0 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -159,7 +159,7 @@ class Defcon(Cog): """Set how old an account must be to join the server.""" if isinstance(threshold, int): threshold = relativedelta(days=threshold) - await self._defcon_action(ctx.author, threshold=threshold, expiry=expiry) + await self._update_threshold(ctx.author, threshold=threshold, expiry=expiry) @defcon_group.command() @has_any_role(Roles.admins) @@ -191,8 +191,8 @@ class Defcon(Cog): asyncio.create_task(self.channel.edit(topic=new_topic)) @redis_cache.atomic_transaction - async def _defcon_action(self, author: User, threshold: relativedelta, expiry: Optional[Expiry] = None) -> None: - """Providing a structured way to do a defcon action.""" + async def _update_threshold(self, author: User, threshold: relativedelta, expiry: Optional[Expiry] = None) -> None: + """Update the new threshold in the cog, cache, defcon channel, and logs, and additionally schedule expiry.""" self.threshold = threshold if threshold == relativedelta(days=0): # If the threshold is 0, we don't need to schedule anything expiry = None @@ -228,7 +228,7 @@ class Defcon(Cog): async def _remove_threshold(self) -> None: """Resets the threshold back to 0.""" - await self._defcon_action(self.bot.user, relativedelta(days=0)) + await self._update_threshold(self.bot.user, relativedelta(days=0)) @staticmethod def _stringify_relativedelta(delta: relativedelta) -> str: -- cgit v1.2.3 From cd0e443656f0aca93691b19f3b80a6e2b7c0e228 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Wed, 17 Feb 2021 15:09:21 +0200 Subject: Add missing None reason returns of filters --- bot/exts/filters/filtering.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index 6f1374cf4..0f403ca92 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -432,7 +432,7 @@ class Filtering(Cog): # Make sure it's not a URL if URL_RE.search(text): - return False + return False, None watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False) for pattern in watchlist_patterns: @@ -445,7 +445,7 @@ class Filtering(Cog): async def _has_urls(self, text: str) -> Tuple[bool, Optional[str]]: """Returns True if the text contains one of the blacklisted URLs from the config file.""" if not URL_RE.search(text): - return False + return False, None text = text.lower() domain_blacklist = self._get_filterlist_items("domain_name", allowed=False) @@ -494,7 +494,7 @@ class Filtering(Cog): # Lack of a "guild" key in the JSON response indicates either an group DM invite, an # expired invite, or an invalid invite. The API does not currently differentiate # between invalid and expired invites - return True + return True, None guild_id = guild.get("id") guild_invite_whitelist = self._get_filterlist_items("guild_invite", allowed=True) -- cgit v1.2.3 From 480dd04bda402e86337bd9a3fb5b236f661c6055 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Wed, 17 Feb 2021 15:10:47 +0200 Subject: Rename filter trigger reason from entry command to reason --- bot/exts/filters/filtering.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index 0f403ca92..c0fa431f1 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -344,7 +344,7 @@ class Filtering(Cog): ping_everyone = Filter.ping_everyone and _filter.get("ping_everyone", True) eval_msg = "using !eval " if is_eval else "" - footer = f"Entry comment: {reason}" if reason else None + footer = f"Reason: {reason}" if reason else None message = ( f"The {filter_name} {_filter['type']} was triggered by {format_user(msg.author)} " f"{channel_str} {eval_msg}with [the following message]({msg.jump_url}):\n\n" @@ -388,7 +388,7 @@ class Filtering(Cog): if name == "filter_invites" and match is not True: additional_embeds = [] for _, data in match.items(): - reason = f"\n**Entry comment:**\n{data['reason']}" if data.get('reason') else "" + reason = f"\n**Reason:**\n{data['reason']}" if data.get('reason') else "" embed = discord.Embed(description=( f"**Members:**\n{data['members']}\n" f"**Active:**\n{data['active']}" -- cgit v1.2.3 From 90bba1ccabeeb34250f86f5bcca70d335dd0ac27 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Wed, 17 Feb 2021 15:20:59 +0200 Subject: Add comments about returning blacklist entry reasons to filters --- bot/exts/filters/filtering.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index c0fa431f1..616c40605 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -426,6 +426,7 @@ class Filtering(Cog): `word_watchlist`'s patterns are placed between word boundaries while `token_watchlist` is matched as-is. Spoilers are expanded, if any, and URLs are ignored. + Second return value is a reason written to database about blacklist entry (can be None). """ if SPOILER_RE.search(text): text = self._expand_spoilers(text) @@ -443,7 +444,11 @@ class Filtering(Cog): return False, None async def _has_urls(self, text: str) -> Tuple[bool, Optional[str]]: - """Returns True if the text contains one of the blacklisted URLs from the config file.""" + """ + Returns True if the text contains one of the blacklisted URLs from the config file. + + Second return value is a reason of URL blacklisting (can be None). + """ if not URL_RE.search(text): return False, None @@ -474,6 +479,8 @@ class Filtering(Cog): If none are detected, False is returned. Attempts to catch some of common ways to try to cheat the system. + + Return None as second value for compability with other filters. """ # Remove backslashes to prevent escape character aroundfuckery like # discord\.gg/gdudes-pony-farm @@ -535,7 +542,11 @@ class Filtering(Cog): @staticmethod async def _has_rich_embed(msg: Message) -> Tuple[Union[bool, List[discord.Embed]], None]: - """Determines if `msg` contains any rich embeds not auto-generated from a URL.""" + """ + Determines if `msg` contains any rich embeds not auto-generated from a URL. + + Return None as second value for compability with other filters. + """ if msg.embeds: for embed in msg.embeds: if embed.type == "rich": @@ -554,7 +565,11 @@ class Filtering(Cog): @staticmethod async def _has_everyone_ping(text: str) -> Tuple[bool, None]: - """Determines if `msg` contains an @everyone or @here ping outside of a codeblock.""" + """ + Determines if `msg` contains an @everyone or @here ping outside of a codeblock. + + Return None as second value for compability with other filters. + """ # First pass to avoid running re.sub on every message if not EVERYONE_PING_RE.search(text): return False, None -- cgit v1.2.3 From e4c77617d38e18fd00e6a58a4ce81d85181c1d90 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Wed, 17 Feb 2021 18:30:27 +0200 Subject: Changed server command to work with new defcon cog --- bot/exts/info/information.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index 4499e4c25..577ec13f0 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -17,7 +17,7 @@ from bot.decorators import in_whitelist from bot.pagination import LinePaginator from bot.utils.channel import is_mod_channel, is_staff_channel from bot.utils.checks import cooldown_with_role_bypass, has_no_roles_check, in_whitelist_check -from bot.utils.time import time_since +from bot.utils.time import humanize_delta, time_since log = logging.getLogger(__name__) @@ -52,7 +52,7 @@ class Information(Cog): ) return {role.name.title(): len(role.members) for role in roles} - def get_extended_server_info(self) -> str: + def get_extended_server_info(self, ctx: Context) -> str: """Return additional server info only visible in moderation channels.""" talentpool_info = "" if cog := self.bot.get_cog("Talentpool"): @@ -64,9 +64,9 @@ class Information(Cog): defcon_info = "" if cog := self.bot.get_cog("Defcon"): - defcon_status = "Enabled" if cog.enabled else "Disabled" - defcon_days = cog.days.days if cog.enabled else "-" - defcon_info = f"Defcon status: {defcon_status}\nDefcon days: {defcon_days}\n" + defcon_info = f"Defcon threshold: {humanize_delta(cog.threshold)}\n" + + verification = f"Verification level: {ctx.guild.verification_level.name}\n" python_general = self.bot.get_channel(constants.Channels.python_general) @@ -74,6 +74,7 @@ class Information(Cog): {talentpool_info}\ {bb_info}\ {defcon_info}\ + {verification}\ {python_general.mention} cooldown: {python_general.slowmode_delay}s """) @@ -198,7 +199,7 @@ class Information(Cog): # Additional info if ran in moderation channels if is_mod_channel(ctx.channel): - embed.add_field(name="Moderation:", value=self.get_extended_server_info()) + embed.add_field(name="Moderation:", value=self.get_extended_server_info(ctx)) await ctx.send(embed=embed) -- cgit v1.2.3 From cef88008c11f3b6ee12eced70a0265639abe20bd Mon Sep 17 00:00:00 2001 From: mbaruh Date: Wed, 17 Feb 2021 19:51:07 +0200 Subject: Gave more meaningful name and description to the cache --- bot/exts/moderation/defcon.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index cdc5ff1b0..44fb8dc8f 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -54,7 +54,10 @@ class Action(Enum): class Defcon(Cog): """Time-sensitive server defense mechanisms.""" - redis_cache = RedisCache() + # RedisCache[str, str] + # The cache's keys are "threshold" and "expiry". + # The caches' values are strings formatted as valid input to the DurationDelta converter. + defcon_settings = RedisCache() def __init__(self, bot: Bot): self.bot = bot @@ -71,7 +74,7 @@ class Defcon(Cog): """Get currently loaded ModLog cog instance.""" return self.bot.get_cog("ModLog") - @redis_cache.atomic_transaction + @defcon_settings.atomic_transaction async def _sync_settings(self) -> None: """On cog load, try to synchronize DEFCON settings to the API.""" log.trace("Waiting for the guild to become available before syncing.") @@ -81,7 +84,7 @@ class Defcon(Cog): log.trace("Syncing settings.") try: - settings = await self.redis_cache.to_dict() + settings = await self.defcon_settings.to_dict() self.threshold = parse_duration_string(settings["threshold"]) self.expiry = datetime.fromisoformat(settings["expiry"]) if settings["expiry"] else None except Exception: @@ -190,7 +193,7 @@ class Defcon(Cog): self.mod_log.ignore(Event.guild_channel_update, Channels.defcon) asyncio.create_task(self.channel.edit(topic=new_topic)) - @redis_cache.atomic_transaction + @defcon_settings.atomic_transaction async def _update_threshold(self, author: User, threshold: relativedelta, expiry: Optional[Expiry] = None) -> None: """Update the new threshold in the cog, cache, defcon channel, and logs, and additionally schedule expiry.""" self.threshold = threshold @@ -203,7 +206,7 @@ class Defcon(Cog): if self.expiry is not None: self.scheduler.schedule_at(expiry, 0, self._remove_threshold()) - await self.redis_cache.update( + await self.defcon_settings.update( { 'threshold': Defcon._stringify_relativedelta(self.threshold), 'expiry': expiry.isoformat() if expiry else 0 -- cgit v1.2.3 From d36ea6430faefefc0b60c9f7ac87bc89aaf2b5b5 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Wed, 17 Feb 2021 20:03:30 +0200 Subject: Error loading settings will also ping devops role --- bot/constants.py | 1 + bot/exts/moderation/defcon.py | 4 +++- config-default.yml | 1 + 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/bot/constants.py b/bot/constants.py index cbab751d0..65e8230c5 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -483,6 +483,7 @@ class Roles(metaclass=YAMLGetter): admins: int core_developers: int + devops: int helpers: int moderators: int owners: int diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 44fb8dc8f..66b551425 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -89,7 +89,9 @@ class Defcon(Cog): self.expiry = datetime.fromisoformat(settings["expiry"]) if settings["expiry"] else None except Exception: log.exception("Unable to get DEFCON settings!") - await self.channel.send(f"<@&{Roles.moderators}> **WARNING**: Unable to get DEFCON settings!") + await self.channel.send( + f"<@&{Roles.moderators}> <@&{Roles.devops}> **WARNING**: Unable to get DEFCON settings!" + ) else: if self.expiry: diff --git a/config-default.yml b/config-default.yml index a37743c15..59da23169 100644 --- a/config-default.yml +++ b/config-default.yml @@ -257,6 +257,7 @@ guild: # Staff admins: &ADMINS_ROLE 267628507062992896 core_developers: 587606783669829632 + devops: 409416496733880320 helpers: &HELPERS_ROLE 267630620367257601 moderators: &MODS_ROLE 267629731250176001 owners: &OWNERS_ROLE 267627879762755584 -- cgit v1.2.3 From be08ea954bce3e7e3f407cf72e78fe7e1aa9096e Mon Sep 17 00:00:00 2001 From: mbaruh Date: Wed, 17 Feb 2021 20:44:39 +0200 Subject: Threshold has false-y value when set to 0 --- bot/exts/moderation/defcon.py | 35 ++++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 66b551425..49f5a4ddd 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio import logging from collections import namedtuple @@ -56,7 +54,7 @@ class Defcon(Cog): # RedisCache[str, str] # The cache's keys are "threshold" and "expiry". - # The caches' values are strings formatted as valid input to the DurationDelta converter. + # The caches' values are strings formatted as valid input to the DurationDelta converter, or empty when off. defcon_settings = RedisCache() def __init__(self, bot: Bot): @@ -85,7 +83,7 @@ class Defcon(Cog): try: settings = await self.defcon_settings.to_dict() - self.threshold = parse_duration_string(settings["threshold"]) + self.threshold = parse_duration_string(settings["threshold"]) if settings["threshold"] else None self.expiry = datetime.fromisoformat(settings["expiry"]) if settings["expiry"] else None except Exception: log.exception("Unable to get DEFCON settings!") @@ -98,14 +96,14 @@ class Defcon(Cog): self.scheduler.schedule_at(self.expiry, 0, self._remove_threshold()) self._update_notifier() - log.info(f"DEFCON synchronized: {humanize_delta(self.threshold)}") + log.info(f"DEFCON synchronized: {humanize_delta(self.threshold) if self.threshold else '-'}") self._update_channel_topic() @Cog.listener() async def on_member_join(self, member: Member) -> None: """Check newly joining users to see if they meet the account age threshold.""" - if self.threshold != relativedelta(days=0): + if self.threshold: now = datetime.utcnow() if now - member.created_at < relativedelta_to_timedelta(self.threshold): @@ -148,7 +146,7 @@ class Defcon(Cog): embed = Embed( colour=Colour.blurple(), title="DEFCON Status", description=f""" - **Threshold:** {humanize_delta(self.threshold)} + **Threshold:** {humanize_delta(self.threshold) if self.threshold else "-"} **Expires in:** {humanize_delta(relativedelta(self.expiry, datetime.utcnow())) if self.expiry else "-"} **Verification level:** {ctx.guild.verification_level.name} """ @@ -190,7 +188,7 @@ class Defcon(Cog): def _update_channel_topic(self) -> None: """Update the #defcon channel topic with the current DEFCON status.""" - new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {humanize_delta(self.threshold)})" + new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {humanize_delta(self.threshold) if self.threshold else '-'})" self.mod_log.ignore(Event.guild_channel_update, Channels.defcon) asyncio.create_task(self.channel.edit(topic=new_topic)) @@ -210,7 +208,7 @@ class Defcon(Cog): await self.defcon_settings.update( { - 'threshold': Defcon._stringify_relativedelta(self.threshold), + 'threshold': Defcon._stringify_relativedelta(self.threshold) if self.threshold else "", 'expiry': expiry.isoformat() if expiry else 0 } ) @@ -220,11 +218,18 @@ class Defcon(Cog): expiry_message = "" if expiry: - expiry_message = f"for the next {humanize_delta(relativedelta(expiry, datetime.utcnow()))}" + expiry_message = f" for the next {humanize_delta(relativedelta(expiry, datetime.utcnow()), max_units=2)}" + + if self.threshold: + channel_message = ( + f"updated; accounts must be {humanize_delta(self.threshold)} " + f"old to join the server{expiry_message}" + ) + else: + channel_message = "removed" await self.channel.send( - f"{action.value.emoji} DEFCON threshold updated; accounts must be " - f"{humanize_delta(self.threshold)} old to join the server {expiry_message}." + f"{action.value.emoji} DEFCON threshold {channel_message}." ) await self._send_defcon_log(action, author) self._update_channel_topic() @@ -251,7 +256,7 @@ class Defcon(Cog): info = action.value log_msg: str = ( f"**Staffer:** {actor.mention} {actor} (`{actor.id}`)\n" - f"{info.template.format(threshold=humanize_delta(self.threshold))}" + f"{info.template.format(threshold=(humanize_delta(self.threshold) if self.threshold else '-'))}" ) status_msg = f"DEFCON {action.name.lower()}" @@ -259,11 +264,11 @@ class Defcon(Cog): def _update_notifier(self) -> None: """Start or stop the notifier according to the DEFCON status.""" - if self.threshold != relativedelta(days=0) and self.expiry is None and not self.defcon_notifier.is_running(): + if self.threshold and self.expiry is None and not self.defcon_notifier.is_running(): log.info("DEFCON notifier started.") self.defcon_notifier.start() - elif (self.threshold == relativedelta(days=0) or self.expiry is not None) and self.defcon_notifier.is_running(): + elif (not self.threshold or self.expiry is not None) and self.defcon_notifier.is_running(): log.info("DEFCON notifier stopped.") self.defcon_notifier.cancel() -- cgit v1.2.3 From b7712cb0c3afac01cc67547ddbe0f17057e07585 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 19 Feb 2021 10:49:59 +0200 Subject: Error to load settings will send the traceback to the channel --- bot/exts/moderation/defcon.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 49f5a4ddd..3ea6b971a 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -1,5 +1,6 @@ import asyncio import logging +import traceback from collections import namedtuple from datetime import datetime from enum import Enum @@ -89,6 +90,7 @@ class Defcon(Cog): log.exception("Unable to get DEFCON settings!") await self.channel.send( f"<@&{Roles.moderators}> <@&{Roles.devops}> **WARNING**: Unable to get DEFCON settings!" + f"\n\n```{traceback.format_exc()}```" ) else: -- cgit v1.2.3 From 0370f3677fa74467063f798e32e3728bb1183947 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 19 Feb 2021 10:51:23 +0200 Subject: Retain 'd' alias for threshold command --- bot/exts/moderation/defcon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 3ea6b971a..86dece518 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -156,7 +156,7 @@ class Defcon(Cog): await ctx.send(embed=embed) - @defcon_group.command(aliases=('t',)) + @defcon_group.command(aliases=('t', 'd')) @has_any_role(*MODERATION_ROLES) async def threshold( self, ctx: Context, threshold: Union[DurationDelta, int], expiry: Optional[Expiry] = None -- cgit v1.2.3 From 0f4365e2430d40f17ab9a545d3e8614a4b3a9669 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 20 Feb 2021 11:54:52 +0200 Subject: Remove attachments check in duplicates filter --- bot/rules/duplicates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/rules/duplicates.py b/bot/rules/duplicates.py index 23aefd3dc..8e4fbc12d 100644 --- a/bot/rules/duplicates.py +++ b/bot/rules/duplicates.py @@ -13,7 +13,7 @@ async def apply( if ( msg.author == last_message.author and msg.content == last_message.content - and (msg.content and not msg.attachments) + and msg.content ) ) -- cgit v1.2.3 From 7f980be37a572f1998160ce6a2221504e414d285 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Sat, 20 Feb 2021 11:55:09 +0200 Subject: Update CODEOWNERS --- .github/CODEOWNERS | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index ad813d893..7217cb443 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -7,11 +7,15 @@ bot/exts/utils/extensions.py @MarkKoz bot/exts/utils/snekbox.py @MarkKoz @Akarys42 bot/exts/help_channels/** @MarkKoz @Akarys42 bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129 -bot/exts/info/** @Akarys42 @mbaruh @Den4200 +bot/exts/info/** @Akarys42 @Den4200 +bot/exts/info/information.py @mbaruh bot/exts/filters/** @mbaruh bot/exts/fun/** @ks129 bot/exts/utils/** @ks129 +# Rules +bot/rules/** @mbaruh + # Utils bot/utils/extensions.py @MarkKoz bot/utils/function.py @MarkKoz -- cgit v1.2.3 From e3b980e53c13fd5dcaf51408f97c99b629c1a6ec Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 20 Feb 2021 11:55:26 +0200 Subject: Set max attachment from 3 -> 6 --- config-default.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config-default.yml b/config-default.yml index d323a946d..e9dce7845 100644 --- a/config-default.yml +++ b/config-default.yml @@ -367,7 +367,7 @@ anti_spam: rules: attachments: interval: 10 - max: 3 + max: 6 burst: interval: 10 -- cgit v1.2.3 From 2268d461f254b4087013ce8ed1125c5287dda099 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 20 Feb 2021 15:38:58 +0200 Subject: Move reason for guild filtering to footer --- bot/exts/filters/filtering.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index 616c40605..2e8552f59 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -388,15 +388,14 @@ class Filtering(Cog): if name == "filter_invites" and match is not True: additional_embeds = [] for _, data in match.items(): - reason = f"\n**Reason:**\n{data['reason']}" if data.get('reason') else "" + reason = f"Reason: {data['reason']} | " if data.get('reason') else "" embed = discord.Embed(description=( f"**Members:**\n{data['members']}\n" f"**Active:**\n{data['active']}" - f"{reason}" )) embed.set_author(name=data["name"]) embed.set_thumbnail(url=data["icon"]) - embed.set_footer(text=f"Guild ID: {data['id']}") + embed.set_footer(text=f"{reason}Guild ID: {data['id']}") additional_embeds.append(embed) elif name == "watch_rich_embeds": -- cgit v1.2.3 From 0dd2e75be2368a4197e9370cf982dc8be8fa862b Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sat, 20 Feb 2021 17:01:19 +0100 Subject: Add bot and verified bot badges to the user embed. --- bot/constants.py | 2 ++ bot/exts/info/information.py | 3 +++ config-default.yml | 2 ++ 3 files changed, 7 insertions(+) diff --git a/bot/constants.py b/bot/constants.py index 8a93ff9cf..91d425b1d 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -279,6 +279,8 @@ class Emojis(metaclass=YAMLGetter): badge_partner: str badge_staff: str badge_verified_bot_developer: str + badge_verified_bot: str + bot: str defcon_disabled: str # noqa: E704 defcon_enabled: str # noqa: E704 diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index 4499e4c25..256be2161 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -228,6 +228,9 @@ class Information(Cog): if on_server and user.nick: name = f"{user.nick} ({name})" + if user.bot: + name += f" {constants.Emojis.bot}" + badges = [] for badge, is_set in user.public_flags: diff --git a/config-default.yml b/config-default.yml index 25bbcc3c5..822b37daf 100644 --- a/config-default.yml +++ b/config-default.yml @@ -46,6 +46,8 @@ style: badge_partner: "<:partner:748666453242413136>" badge_staff: "<:discord_staff:743882896498098226>" badge_verified_bot_developer: "<:verified_bot_dev:743882897299210310>" + badge_verified_bot: "<:verified_bot:811645219220750347>" + bot: "<:bot:812712599464443914>" defcon_disabled: "<:defcondisabled:470326273952972810>" defcon_enabled: "<:defconenabled:470326274213150730>" -- cgit v1.2.3 From 93c3327414dabd12236e47210be2be1151b71719 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sun, 21 Feb 2021 13:50:37 +0100 Subject: Show the last three characters of censored tokens --- bot/exts/filters/token_remover.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/filters/token_remover.py b/bot/exts/filters/token_remover.py index bd6a1f97a..33b39cc2d 100644 --- a/bot/exts/filters/token_remover.py +++ b/bot/exts/filters/token_remover.py @@ -147,7 +147,7 @@ class TokenRemover(Cog): channel=msg.channel.mention, user_id=token.user_id, timestamp=token.timestamp, - hmac='x' * len(token.hmac), + hmac='x' * (len(token.hmac) - 3) + token.hmac[-3:], ) @classmethod -- cgit v1.2.3 From 04e233685e163d1e513a21acd236c2385536b0b7 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sun, 21 Feb 2021 13:51:57 +0100 Subject: Ping the mods if a token present in the server is found no matter the kind --- bot/exts/filters/token_remover.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/filters/token_remover.py b/bot/exts/filters/token_remover.py index 33b39cc2d..93f1f3c33 100644 --- a/bot/exts/filters/token_remover.py +++ b/bot/exts/filters/token_remover.py @@ -135,7 +135,7 @@ class TokenRemover(Cog): user_id=user_id, user_name=str(user), kind="BOT" if user.bot else "USER", - ), not user.bot + ), True else: return UNKNOWN_USER_LOG_MESSAGE.format(user_id=user_id), False -- cgit v1.2.3 From 27e60e94bd1fe6784c2b7674433bb175255fa217 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sun, 21 Feb 2021 14:06:54 +0100 Subject: Update token remover unittests --- tests/bot/exts/filters/test_token_remover.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/bot/exts/filters/test_token_remover.py b/tests/bot/exts/filters/test_token_remover.py index f99cc3370..51feae9cb 100644 --- a/tests/bot/exts/filters/test_token_remover.py +++ b/tests/bot/exts/filters/test_token_remover.py @@ -291,7 +291,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): channel=self.msg.channel.mention, user_id=token.user_id, timestamp=token.timestamp, - hmac="x" * len(token.hmac), + hmac="xxxxxxxxxxxxxxxxxxxxxxxxjf4", ) @autospec("bot.exts.filters.token_remover", "UNKNOWN_USER_LOG_MESSAGE") @@ -318,7 +318,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): return_value = TokenRemover.format_userid_log_message(msg, token) - self.assertEqual(return_value, (known_user_log_message.format.return_value, False)) + self.assertEqual(return_value, (known_user_log_message.format.return_value, True)) known_user_log_message.format.assert_called_once_with( user_id=472265943062413332, -- cgit v1.2.3 From 584ed52c7107c7d3e3b838ee1e8df3a22ae95e35 Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Sun, 21 Feb 2021 23:07:38 +0000 Subject: Update max available channels to 3 Partially resolves #1427 --- config-default.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config-default.yml b/config-default.yml index beaf89f2c..8e9a29a51 100644 --- a/config-default.yml +++ b/config-default.yml @@ -470,7 +470,7 @@ help_channels: deleted_idle_minutes: 5 # Maximum number of channels to put in the available category - max_available: 2 + max_available: 3 # Maximum number of channels across all 3 categories # Note Discord has a hard limit of 50 channels per category, so this shouldn't be > 50 -- cgit v1.2.3 From 1daf01ef9a3853252d4cadab5fc6abce14df3557 Mon Sep 17 00:00:00 2001 From: wookie184 Date: Mon, 22 Feb 2021 11:35:12 +0000 Subject: Rewrite inline codeblock tag --- bot/resources/tags/inline.md | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/bot/resources/tags/inline.md b/bot/resources/tags/inline.md index a6a7c35d6..4ece74ef7 100644 --- a/bot/resources/tags/inline.md +++ b/bot/resources/tags/inline.md @@ -1,16 +1,7 @@ **Inline codeblocks** -In addition to multi-line codeblocks, discord has support for inline codeblocks as well. These are small codeblocks that are usually a single line, that can fit between non-codeblocks on the same line. +Inline codeblocks look `like this`. To create them you surround text with single backticks, so \`hello\` would become `hello`. -The following is an example of how it's done: +Note that backticks are not quotes, see [this](https://superuser.com/questions/254076/how-do-i-type-the-tick-and-backtick-characters-on-windows/254077#254077) if you are struggling to find the backtick key. -The \`\_\_init\_\_\` method customizes the newly created instance. - -And results in the following: - -The `__init__` method customizes the newly created instance. - -**Note:** -• These are **backticks** not quotes -• Avoid using them for multiple lines -• Useful for negating formatting you don't want +For how to make multiline codeblocks see the `!codeblock` tag. -- cgit v1.2.3 From b116688be7d8b3d83c88a78969e2118e0504fadc Mon Sep 17 00:00:00 2001 From: wookie184 Date: Mon, 22 Feb 2021 11:38:45 +0000 Subject: Add pep 8 song to pep 8 tag --- bot/resources/tags/pep8.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bot/resources/tags/pep8.md b/bot/resources/tags/pep8.md index cab4c4db8..57b176122 100644 --- a/bot/resources/tags/pep8.md +++ b/bot/resources/tags/pep8.md @@ -1,3 +1,5 @@ -**PEP 8** is the official style guide for Python. It includes comprehensive guidelines for code formatting, variable naming, and making your code easy to read. Professional Python developers are usually required to follow the guidelines, and will often use code-linters like `flake8` to verify that the code they\'re writing complies with the style guide. +**PEP 8** is the official style guide for Python. It includes comprehensive guidelines for code formatting, variable naming, and making your code easy to read. Professional Python developers are usually required to follow the guidelines, and will often use code-linters like flake8 to verify that the code they're writing complies with the style guide. -You can find the PEP 8 document [here](https://www.python.org/dev/peps/pep-0008). +More information: +• [PEP 8 document](https://www.python.org/dev/peps/pep-0008) +• [Our PEP 8 song!](https://www.youtube.com/watch?v=hgI0p1zf31k) :notes: -- cgit v1.2.3 From 1d72334d3dcff4d82dde3e9ca5a1edc0989114f2 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 16 Feb 2021 02:06:49 +0100 Subject: Fix docstring typos --- bot/exts/info/doc/_batch_parser.py | 4 ++-- bot/exts/info/doc/_cog.py | 2 +- bot/exts/info/doc/_html.py | 2 +- bot/exts/info/doc/_parsing.py | 12 ++++++------ 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index b3f72bb89..b140843b6 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -33,7 +33,7 @@ class StaleInventoryNotifier: self._dev_log = bot.instance.get_channel(Channels.dev_log) async def send_warning(self, doc_item: _cog.DocItem) -> None: - """Send a warning to dev log is one wasn't already sent for `item`'s url.""" + """Send a warning to dev log if one wasn't already sent for `item`'s url.""" if doc_item.url not in self._warned_urls: self._warned_urls.add(doc_item.url) await self._init_task @@ -132,7 +132,7 @@ class BatchParser: async def _parse_queue(self) -> None: """ - Parse all item from the queue, setting their result markdown on the futures and sending them to redis. + Parse all items from the queue, setting their result Markdown on the futures and sending them to redis. The coroutine will run as long as the queue is not empty, resetting `self._parse_task` to None when finished. """ diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index e2204bd4a..b6b9b2171 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -126,7 +126,7 @@ class DocCog(commands.Cog): inventory_url: str, ) -> None: """ - Update the cog's inventory, or reschedule this method to execute again if the remote inventory unreachable. + Update the cog's inventory, or reschedule this method to execute again if the remote inventory is unreachable. The first attempt is rescheduled to execute in `FETCH_RESCHEDULE_DELAY.first` minutes, the subsequent attempts in `FETCH_RESCHEDULE_DELAY.repeated` minutes. diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py index f9fe542ce..2884a3cf1 100644 --- a/bot/exts/info/doc/_html.py +++ b/bot/exts/info/doc/_html.py @@ -100,7 +100,7 @@ def get_general_description(start_element: Tag) -> List[Union[Tag, NavigableStri """ Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. - A headerlink a tag is attempted to be found to skip repeating the symbol information in the description, + A headerlink tag is attempted to be found to skip repeating the symbol information in the description, if it's found it's used as the tag to start the search from instead of the `start_element`. """ child_tags = _find_recursive_children_until_tag(start_element, _class_filter_factory(["section"]), limit=100) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 8e1b4d7a1..3350aac0a 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -106,7 +106,7 @@ def _split_parameters(parameters_string: str) -> Iterator[str]: def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collection[str]]: """ - Truncate passed signatures to not exceed `_MAX_SIGNAUTRES_LENGTH`. + Truncate passed signatures to not exceed `_MAX_SIGNATURES_LENGTH`. If the signatures need to be truncated, parameters are collapsed until they fit withing the limit. Individual signatures can consist of max 1, 2, ..., `_MAX_SIGNATURE_AMOUNT` lines of text, @@ -149,10 +149,10 @@ def _get_truncated_description( max_lines: int, ) -> str: """ - Truncate markdown from `elements` to be at most `max_length` characters when rendered or `max_lines` newlines. + Truncate the Markdown from `elements` to be at most `max_length` characters when rendered or `max_lines` newlines. `max_length` limits the length of the rendered characters in the string, - with the real string length limited to `_MAX_DESCRIPTION_LENGTH` to accommodate discord length limits + with the real string length limited to `_MAX_DESCRIPTION_LENGTH` to accommodate discord length limits. """ result = "" markdown_element_ends = [] @@ -209,10 +209,10 @@ def _get_truncated_description( def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag], url: str) -> str: """ - Create a markdown string with the signatures at the top, and the converted html description below them. + Create a Markdown string with the signatures at the top, and the converted html description below them. The signatures are wrapped in python codeblocks, separated from the description by a newline. - The result markdown string is max 750 rendered characters for the description with signatures at the start. + The result Markdown string is max 750 rendered characters for the description with signatures at the start. """ description = _get_truncated_description( description, @@ -232,7 +232,7 @@ def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag] def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[str]: """ - Return parsed markdown of the passed item using the passed in soup, truncated to fit within a discord message. + Return parsed Markdown of the passed item using the passed in soup, truncated to fit within a discord message. The method of parsing and what information gets included depends on the symbol's group. """ -- cgit v1.2.3 From e607600cdd1084566319d4283bd747d772627121 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 19 Feb 2021 00:01:55 +0100 Subject: Simplify the _split_parameters implementation The main simplification was getting rid of keeping track of string depth which was unnecessary, as we can just always skip them as was being done for strings inside of brackets. The branching was also simplified to make sure less unnecessary checks were being done with a bit less confusing elifs. --- bot/exts/info/doc/_parsing.py | 48 ++++++++++++++++--------------------------- 1 file changed, 18 insertions(+), 30 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 3350aac0a..280a0c8f2 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -63,43 +63,31 @@ def _split_parameters(parameters_string: str) -> Iterator[str]: last_split = 0 depth = 0 current_search: Optional[BracketPair] = None - quote_character = None enumerated_string = enumerate(parameters_string) for index, character in enumerated_string: - if quote_character is None and character in _BRACKET_PAIRS: - if current_search is None: - current_search = _BRACKET_PAIRS[character] - depth = 1 - elif character == current_search.opening_bracket: - depth += 1 + if character in {"'", '"'}: + # Skip everything inside of strings, regardless of the depth. + quote_character = character + for index, character in enumerated_string: + if character == quote_character and _is_closing_quote(parameters_string, index): + break - elif character in {"'", '"'}: - if current_search is not None: - # We're currently searching for a bracket, skip all characters that belong to the string - # to avoid false positives of closing brackets - quote_character = character - for index, character in enumerated_string: - if character == quote_character and _is_closing_quote(parameters_string, index): - break + elif current_search is None: + if (current_search := _BRACKET_PAIRS.get(character)) is not None: + depth = 1 + elif character == ",": + yield parameters_string[last_split:index] + last_split = index + 1 - elif depth == 0: + else: + if character == current_search.opening_bracket: depth += 1 - quote_character = character - elif character == quote_character: - if _is_closing_quote(parameters_string, index): - depth -= 1 - if depth == 0: - quote_character = None - elif current_search is not None and character == current_search.closing_bracket: - depth -= 1 - if depth == 0: - current_search = None - - elif depth == 0 and character == ",": - yield parameters_string[last_split:index] - last_split = index + 1 + elif character == current_search.closing_bracket: + depth -= 1 + if depth == 0: + current_search = None yield parameters_string[last_split:] -- cgit v1.2.3 From 6badbd4cb85bd688363ed5d57e174b43b4788f66 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 19 Feb 2021 14:39:34 +0100 Subject: Simplify condition --- bot/exts/info/doc/_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 280a0c8f2..173051650 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -101,7 +101,7 @@ def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collec inversely proportional to the amount of signatures. A maximum of `_MAX_SIGNATURE_AMOUNT` signatures is assumed to be passed. """ - if not sum(len(signature) for signature in signatures) > _MAX_SIGNATURES_LENGTH: + if sum(len(signature) for signature in signatures) <= _MAX_SIGNATURES_LENGTH: return signatures max_signature_length = _EMBED_CODE_BLOCK_LINE_LENGTH * (MAX_SIGNATURE_AMOUNT + 1 - len(signatures)) -- cgit v1.2.3 From 88e951f94aa0c1780e54798b5f3af72b75502ef4 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 20 Feb 2021 01:21:42 +0100 Subject: Use "inventories" when referring to the cog's collection of sphinx invs Previously in some context inventory referred both to a single "inventory" that we got from a remote objects.inv and to the internal cog inventories. Always referring to the cog's inventories as plural The update_single docstring was also changed from rebuild to build, as the method doesn't handle anything with a preexisting inventory with the same symbols being in the cog's inventories --- bot/exts/info/doc/_cog.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index b6b9b2171..ed67abb79 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -76,11 +76,11 @@ class DocCog(commands.Cog): async def init_refresh_inventory(self) -> None: """Refresh documentation inventory on cog initialization.""" await self.bot.wait_until_guild_available() - await self.refresh_inventory() + await self.refresh_inventories() def update_single(self, api_package_name: str, base_url: str, package: InventoryDict) -> None: """ - Rebuild the inventory for a single package. + Build the inventory for a single package. Where: * `package_name` is the package name to use, appears in the log @@ -126,7 +126,7 @@ class DocCog(commands.Cog): inventory_url: str, ) -> None: """ - Update the cog's inventory, or reschedule this method to execute again if the remote inventory is unreachable. + Update the cog's inventories, or reschedule this method to execute again if the remote inventory is unreachable. The first attempt is rescheduled to execute in `FETCH_RESCHEDULE_DELAY.first` minutes, the subsequent attempts in `FETCH_RESCHEDULE_DELAY.repeated` minutes. @@ -202,8 +202,8 @@ class DocCog(commands.Cog): self.renamed_symbols[symbol_name].append(new_symbol_name) return new_symbol_name - async def refresh_inventory(self) -> None: - """Refresh internal documentation inventory.""" + async def refresh_inventories(self) -> None: + """Refresh internal documentation inventories.""" self.refresh_event.clear() await self.symbol_get_event.wait() log.debug("Refreshing documentation inventory...") @@ -369,7 +369,7 @@ class DocCog(commands.Cog): ) self.update_single(package_name, base_url, inventory_dict) - await ctx.send(f"Added the package `{package_name}` to the database and refreshed the inventory.") + await ctx.send(f"Added the package `{package_name}` to the database and updated the inventories.") @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) @commands.has_any_role(*MODERATION_ROLES) @@ -386,9 +386,9 @@ class DocCog(commands.Cog): async with ctx.typing(): # Rebuild the inventory to ensure that everything # that was from this package is properly deleted. - await self.refresh_inventory() + await self.refresh_inventories() await doc_cache.delete(package_name) - await ctx.send(f"Successfully deleted `{package_name}` and refreshed the inventory.") + await ctx.send(f"Successfully deleted `{package_name}` and refreshed the inventories.") @docs_group.command(name="refreshdoc", aliases=("rfsh", "r")) @commands.has_any_role(*MODERATION_ROLES) @@ -397,7 +397,7 @@ class DocCog(commands.Cog): """Refresh inventories and show the difference.""" old_inventories = set(self.base_urls) with ctx.typing(): - await self.refresh_inventory() + await self.refresh_inventories() new_inventories = set(self.base_urls) if added := ", ".join(new_inventories - old_inventories): -- cgit v1.2.3 From e7d7f958b60045a447d5460e740a703654450a0c Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 20 Feb 2021 01:23:41 +0100 Subject: Remove unnecessary comments The comments explain things that should be clear, or basic concepts --- bot/exts/info/doc/_cog.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index ed67abb79..eb3de9d46 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -209,16 +209,11 @@ class DocCog(commands.Cog): log.debug("Refreshing documentation inventory...") self.inventory_scheduler.cancel_all() - # Clear the old base URLS and doc symbols to ensure - # that we start from a fresh local dataset. - # Also, reset the cache used for fetching documentation. self.base_urls.clear() self.doc_symbols.clear() self.renamed_symbols.clear() await self.item_fetcher.clear() - # Run all coroutines concurrently - since each of them performs an HTTP - # request, this speeds up fetching the inventory data heavily. coros = [ self.update_or_reschedule_inventory( package["package"], package["base_url"], package["inventory_url"] @@ -317,9 +312,6 @@ class DocCog(commands.Cog): else: symbol = symbol_name.strip("`") - # Fetching documentation for a symbol (at least for the first time, since - # caching is used) takes quite some time, so let's send typing to indicate - # that we got the command, but are still working on it. async with ctx.typing(): doc_embed = await self.get_symbol_embed(symbol) @@ -384,8 +376,6 @@ class DocCog(commands.Cog): await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') async with ctx.typing(): - # Rebuild the inventory to ensure that everything - # that was from this package is properly deleted. await self.refresh_inventories() await doc_cache.delete(package_name) await ctx.send(f"Successfully deleted `{package_name}` and refreshed the inventories.") -- cgit v1.2.3 From 30a39e9bed8ea9c50a6851504dead1d9c8ed7539 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 20 Feb 2021 01:27:07 +0100 Subject: cleanup->clean_up --- bot/exts/info/doc/_batch_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index b140843b6..9956878cf 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -93,7 +93,7 @@ class BatchParser: self._item_futures: Dict[_cog.DocItem, ParseResultFuture] = {} self._parse_task = None - self.cleanup_futures_task = bot.instance.loop.create_task(self._cleanup_futures()) + self.cleanup_futures_task = bot.instance.loop.create_task(self._clean_up_futures()) self.stale_inventory_notifier = StaleInventoryNotifier() @@ -189,7 +189,7 @@ class BatchParser: self._page_doc_items.clear() self._item_futures.clear() - async def _cleanup_futures(self) -> None: + async def _clean_up_futures(self) -> None: """ Clear old futures from internal results. -- cgit v1.2.3 From 181aa5732b28181800cb663d52be42fd70c3226c Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 20 Feb 2021 01:36:09 +0100 Subject: Add deletedoccache alias to the clear cache command --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index eb3de9d46..1e498237a 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -402,7 +402,7 @@ class DocCog(commands.Cog): ) await ctx.send(embed=embed) - @docs_group.command(name="cleardoccache") + @docs_group.command(name="cleardoccache", aliases=("deletedoccache",)) @commands.has_any_role(*MODERATION_ROLES) async def clear_cache_command(self, ctx: commands.Context, package_name: PackageName) -> None: """Clear the persistent redis cache for `package`.""" -- cgit v1.2.3 From 70912a98bf29bcd8cc9052adb587380533ab5102 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Feb 2021 01:56:22 +0100 Subject: Add lt and gt angle brackets to bracket pairs --- bot/exts/info/doc/_parsing.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 173051650..f6e25937e 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -42,6 +42,7 @@ _BRACKET_PAIRS = { "{": BracketPair("{", "}"), "(": BracketPair("(", ")"), "[": BracketPair("[", "]"), + "<": BracketPair("<", ">"), } -- cgit v1.2.3 From 8f6d11a7694d6dea50d94d7918f686834283c858 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Feb 2021 02:54:09 +0100 Subject: Add unittests for _split_signature --- tests/bot/exts/info/doc/__init__.py | 0 tests/bot/exts/info/doc/test_parsing.py | 59 +++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+) create mode 100644 tests/bot/exts/info/doc/__init__.py create mode 100644 tests/bot/exts/info/doc/test_parsing.py diff --git a/tests/bot/exts/info/doc/__init__.py b/tests/bot/exts/info/doc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/bot/exts/info/doc/test_parsing.py b/tests/bot/exts/info/doc/test_parsing.py new file mode 100644 index 000000000..f302b38fc --- /dev/null +++ b/tests/bot/exts/info/doc/test_parsing.py @@ -0,0 +1,59 @@ +from unittest import TestCase + +from bot.exts.info.doc import _parsing as parsing + + +class SignatureSplitter(TestCase): + + def test_basic_split(self): + test_cases = ( + ("0,0,0", ["0", "0", "0"]), + ("0,a=0,a=0", ["0", "a=0", "a=0"]), + ) + self._run_tests(test_cases) + + def test_commas_ignored_in_brackets(self): + test_cases = ( + ("0,[0,0],0,[0,0],0", ["0", "[0,0]", "0", "[0,0]", "0"]), + ("(0,),0,(0,(0,),0),0", ["(0,)", "0", "(0,(0,),0)", "0"]), + ) + self._run_tests(test_cases) + + def test_mixed_brackets(self): + tests_cases = ( + ("[0,{0},0],0,{0:0},0", ["[0,{0},0]", "0", "{0:0}", "0"]), + ("([0],0,0),0,(0,0),0", ["([0],0,0)", "0", "(0,0)", "0"]), + ("([(0,),(0,)],0),0", ["([(0,),(0,)],0)", "0"]), + ) + self._run_tests(tests_cases) + + def test_string_contents_ignored(self): + test_cases = ( + ("'0,0',0,',',0", ["'0,0'", "0", "','", "0"]), + ("0,[']',0],0", ["0", "[']',0]", "0"]), + ("{0,0,'}}',0,'{'},0", ["{0,0,'}}',0,'{'}", "0"]), + ) + self._run_tests(test_cases) + + def test_mixed_quotes(self): + test_cases = ( + ("\"0',0',\",'0,0',0", ["\"0',0',\"", "'0,0'", "0"]), + ("\",',\",'\",',0", ["\",',\"", "'\",'", "0"]), + ) + self._run_tests(test_cases) + + def test_real_signatures(self): + test_cases = ( + ("start, stop[, step]", ["start", " stop[, step]"]), + ("object=b'', encoding='utf-8', errors='strict'", ["object=b''", " encoding='utf-8'", " errors='strict'"]), + ( + "typename, field_names, *, rename=False, defaults=None, module=None", + ["typename", " field_names", " *", " rename=False", " defaults=None", " module=None"] + ), + ) + self._run_tests(test_cases) + + def _run_tests(self, test_cases): + for input_string, expected_output in test_cases: + with self.subTest(input_string=input_string): + self.assertEqual(list(parsing._split_parameters(input_string)), expected_output) -- cgit v1.2.3 From cec78fd1eb3b8da00fe8d2c5057fbbb417ac0255 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Feb 2021 03:04:32 +0100 Subject: Correct length adjustment the placeholder contains a space and is actually 4 chars because of that with a comma that adds up to 5 characters in the signature instead of 4 --- bot/exts/info/doc/_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index f6e25937e..c26af8ac3 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -118,7 +118,7 @@ def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collec parameters_string = parameters_match[1] running_length = len(signature) - len(parameters_string) for parameter in _split_parameters(parameters_string): - if (len(parameter) + running_length) <= max_signature_length - 4: # account for comma and placeholder + if (len(parameter) + running_length) <= max_signature_length - 5: # account for comma and placeholder truncated_signature.append(parameter) running_length += len(parameter) + 1 else: -- cgit v1.2.3 From 95f6dd89a528be327e9c52b47948e3d8138590ed Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 21 Feb 2021 03:17:57 +0100 Subject: Remove redundant group check As we check for non dt symbol names to be sure in case something pops up , we can skip the initial group check as all the symbols from those groups should point to non dt tags. --- bot/exts/info/doc/_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index c26af8ac3..f9b4f9d8a 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -231,7 +231,7 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[s signature = None # Modules, doc pages and labels don't point to description list tags but to tags like divs, # no special parsing can be done so we only try to include what's under them. - if symbol_data.group in {"module", "doc", "label"} or symbol_heading.name != "dt": + if symbol_heading.name != "dt": description = get_general_description(symbol_heading) elif symbol_data.group in _NO_SIGNATURE_GROUPS: -- cgit v1.2.3 From d620a38bd03b7452474e12dc8a8531a868e7055d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 23 Feb 2021 03:31:39 +0100 Subject: Update docstrings --- bot/exts/info/doc/_cog.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 1e498237a..0b0611cbc 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -86,7 +86,7 @@ class DocCog(commands.Cog): * `package_name` is the package name to use, appears in the log * `base_url` is the root documentation URL for the specified package, used to build absolute paths that link to specific symbols - * `inventory_url` is the absolute URL to the intersphinx inventory. + * `package` are the InventoryDict contents of a intersphinx inventory. """ self.base_urls[api_package_name] = base_url @@ -225,7 +225,7 @@ class DocCog(commands.Cog): async def get_symbol_embed(self, symbol_name: str) -> Optional[discord.Embed]: """ - Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents. + Attempt to scrape and fetch the data for the given `symbol_name`, and build an embed from its contents. If the symbol is known, an Embed with documentation about it is returned. -- cgit v1.2.3 From 0b11d7dfb408f4e5fe6248ae8377ddc7aa1aa5ee Mon Sep 17 00:00:00 2001 From: Gustav Odinger <65498475+gustavwilliam@users.noreply.github.com> Date: Tue, 23 Feb 2021 03:48:35 +0100 Subject: Add truncate_message util --- bot/utils/messages.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/bot/utils/messages.py b/bot/utils/messages.py index 077dd9569..c01fa5d0e 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -154,3 +154,12 @@ async def send_denial(ctx: Context, reason: str) -> None: def format_user(user: discord.abc.User) -> str: """Return a string for `user` which has their mention and ID.""" return f"{user.mention} (`{user.id}`)" + + +def truncate_message(message: discord.Message, limit: int) -> str: + """Returns a truncated version of the message content, up to the specified limit.""" + text = message.content + if len(text) > limit: + return text[:limit-3] + "..." + else: + return text -- cgit v1.2.3 From e1d269d82eed8a01d3d3b0ff33d05e3c79324007 Mon Sep 17 00:00:00 2001 From: Gustav Odinger <65498475+gustavwilliam@users.noreply.github.com> Date: Tue, 23 Feb 2021 04:00:01 +0100 Subject: Add function to DM users when opening help channel --- bot/exts/help_channels/_message.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 2bbd4bdd6..12ac4035d 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -8,6 +8,7 @@ import bot from bot import constants from bot.exts.help_channels import _caches from bot.utils.channel import is_in_category +from bot.utils.messages import truncate_message log = logging.getLogger(__name__) @@ -92,6 +93,38 @@ async def is_empty(channel: discord.TextChannel) -> bool: return False +async def dm_on_open(message: discord.Message) -> None: + """ + DM claimant with a link to the claimed channel's first message, with a 100 letter preview of the message. + + Does nothing if the user has DMs disabled. + """ + embed = discord.Embed( + title="Help channel opened", + description=f"You claimed {message.channel.mention}.", + colour=bot.constants.Colours.bright_green, + timestamp=message.created_at, + ) + + embed.set_thumbnail(url=constants.Icons.green_questionmark) + embed.add_field( + name="Your message", value=truncate_message(message, limit=100), inline=False + ) + embed.add_field( + name="Want to go there?", + value=f"[Jump to message!]({message.jump_url})", + inline=False, + ) + + try: + await message.author.send(embed=embed) + log.trace(f"Sent DM to {message.author.id} after claiming help channel.") + except discord.errors.Forbidden: + log.trace( + f"Ignoring to send DM to {message.author.id} after claiming help channel: DMs disabled." + ) + + async def notify(channel: discord.TextChannel, last_notification: t.Optional[datetime]) -> t.Optional[datetime]: """ Send a message in `channel` notifying about a lack of available help channels. -- cgit v1.2.3 From e6483d633ac6ecc2a88051442108d9c88e5f7745 Mon Sep 17 00:00:00 2001 From: Gustav Odinger <65498475+gustavwilliam@users.noreply.github.com> Date: Tue, 23 Feb 2021 04:00:58 +0100 Subject: Add green question mark to default config Add green question mark to default config Add green question mark to config --- bot/constants.py | 1 + config-default.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/bot/constants.py b/bot/constants.py index 8a93ff9cf..69bc82b89 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -326,6 +326,7 @@ class Icons(metaclass=YAMLGetter): filtering: str green_checkmark: str + green_questionmark: str guild_update: str hash_blurple: str diff --git a/config-default.yml b/config-default.yml index 8e9a29a51..7d9afaa0e 100644 --- a/config-default.yml +++ b/config-default.yml @@ -90,6 +90,7 @@ style: filtering: "https://cdn.discordapp.com/emojis/472472638594482195.png" green_checkmark: "https://raw.githubusercontent.com/python-discord/branding/master/icons/checkmark/green-checkmark-dist.png" + green_questionmark: "https://raw.githubusercontent.com/python-discord/branding/master/icons/checkmark/green-question-mark-dist.png" guild_update: "https://cdn.discordapp.com/emojis/469954765141442561.png" hash_blurple: "https://cdn.discordapp.com/emojis/469950142942806017.png" -- cgit v1.2.3 From e34ea2f1c108d1900e251d17b38563536345d2de Mon Sep 17 00:00:00 2001 From: Gustav Odinger <65498475+gustavwilliam@users.noreply.github.com> Date: Tue, 23 Feb 2021 04:07:05 +0100 Subject: Send DM when user claims help channel --- bot/exts/help_channels/_cog.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 0995c8a79..a18ddc900 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -102,6 +102,7 @@ class HelpChannels(commands.Cog): await _cooldown.revoke_send_permissions(message.author, self.scheduler) await _message.pin(message) + await _message.dm_on_open(message) # Add user with channel for dormant check. await _caches.claimants.set(message.channel.id, message.author.id) -- cgit v1.2.3 From bb9e56c3cb874ef76ab82db02ce8242117e0da92 Mon Sep 17 00:00:00 2001 From: Gustav Odinger <65498475+gustavwilliam@users.noreply.github.com> Date: Tue, 23 Feb 2021 11:08:41 +0100 Subject: Update embed field title to be more formal --- bot/exts/help_channels/_message.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 12ac4035d..95aca067a 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -111,7 +111,7 @@ async def dm_on_open(message: discord.Message) -> None: name="Your message", value=truncate_message(message, limit=100), inline=False ) embed.add_field( - name="Want to go there?", + name="Conversation", value=f"[Jump to message!]({message.jump_url})", inline=False, ) -- cgit v1.2.3 From cae0d84757e026976f1a9e87d52c581669b7b8e8 Mon Sep 17 00:00:00 2001 From: Gustav Odinger <65498475+gustavwilliam@users.noreply.github.com> Date: Tue, 23 Feb 2021 11:14:31 +0100 Subject: Use textwrap.shorten instead of custom function This applies to the help channel DM embed, where the user is sent a truncated version of their message. --- bot/exts/help_channels/_message.py | 6 ++++-- bot/utils/messages.py | 9 --------- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 95aca067a..4113e51c5 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -1,4 +1,5 @@ import logging +import textwrap import typing as t from datetime import datetime @@ -8,7 +9,6 @@ import bot from bot import constants from bot.exts.help_channels import _caches from bot.utils.channel import is_in_category -from bot.utils.messages import truncate_message log = logging.getLogger(__name__) @@ -108,7 +108,9 @@ async def dm_on_open(message: discord.Message) -> None: embed.set_thumbnail(url=constants.Icons.green_questionmark) embed.add_field( - name="Your message", value=truncate_message(message, limit=100), inline=False + name="Your message", + value=textwrap.shorten(message.content, width=100, placeholder="..."), + inline=False, ) embed.add_field( name="Conversation", diff --git a/bot/utils/messages.py b/bot/utils/messages.py index c01fa5d0e..077dd9569 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -154,12 +154,3 @@ async def send_denial(ctx: Context, reason: str) -> None: def format_user(user: discord.abc.User) -> str: """Return a string for `user` which has their mention and ID.""" return f"{user.mention} (`{user.id}`)" - - -def truncate_message(message: discord.Message, limit: int) -> str: - """Returns a truncated version of the message content, up to the specified limit.""" - text = message.content - if len(text) > limit: - return text[:limit-3] + "..." - else: - return text -- cgit v1.2.3 From d71ac9f6e240ffd2d4195d9dbbf5740a0c2413a1 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> Date: Tue, 23 Feb 2021 19:24:18 +0300 Subject: Fixes Problems With Help Channel DM Signed-off-by: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> --- bot/exts/help_channels/_cog.py | 5 ++++- bot/exts/help_channels/_message.py | 8 +++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index a18ddc900..6abf99810 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -102,7 +102,10 @@ class HelpChannels(commands.Cog): await _cooldown.revoke_send_permissions(message.author, self.scheduler) await _message.pin(message) - await _message.dm_on_open(message) + try: + await _message.dm_on_open(message) + except Exception as e: + log.warning("Error occurred while sending DM:", exc_info=e) # Add user with channel for dormant check. await _caches.claimants.set(message.channel.id, message.author.id) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 4113e51c5..36388f9bd 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -107,11 +107,9 @@ async def dm_on_open(message: discord.Message) -> None: ) embed.set_thumbnail(url=constants.Icons.green_questionmark) - embed.add_field( - name="Your message", - value=textwrap.shorten(message.content, width=100, placeholder="..."), - inline=False, - ) + formatted_message = textwrap.shorten(message.content, width=100, placeholder="...") + if formatted_message: + embed.add_field(name="Your message", value=formatted_message, inline=False) embed.add_field( name="Conversation", value=f"[Jump to message!]({message.jump_url})", -- cgit v1.2.3 From c29cda2e96c73518517acefa77e967f791ec87c5 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Wed, 24 Feb 2021 09:36:52 +0200 Subject: Remove compatibility-none returns from filters This doesn't look good to have just None returns, so made changes so if function don't return tuple, then reason is automatically None. --- bot/exts/filters/filtering.py | 51 ++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 27 deletions(-) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index 2e8552f59..4093ba4ad 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -271,9 +271,17 @@ class Filtering(Cog): # Does the filter only need the message content or the full message? if _filter["content_only"]: - match, reason = await _filter["function"](msg.content) + payload = msg.content else: - match, reason = await _filter["function"](msg) + payload = msg + + result = await _filter["function"](payload) + reason = None + + if isinstance(result, tuple): + match, reason = result + else: + match = result if match: is_private = msg.channel.type is discord.ChannelType.private @@ -461,16 +469,15 @@ class Filtering(Cog): return False, None @staticmethod - async def _has_zalgo(text: str) -> Tuple[bool, None]: + async def _has_zalgo(text: str) -> bool: """ Returns True if the text contains zalgo characters. Zalgo range is \u0300 – \u036F and \u0489. - Return None as second value for compability with other filters. """ - return bool(ZALGO_RE.search(text)), None + return bool(ZALGO_RE.search(text)) - async def _has_invites(self, text: str) -> Tuple[Union[dict, bool], None]: + async def _has_invites(self, text: str) -> Union[dict, bool]: """ Checks if there's any invites in the text content that aren't in the guild whitelist. @@ -478,8 +485,6 @@ class Filtering(Cog): If none are detected, False is returned. Attempts to catch some of common ways to try to cheat the system. - - Return None as second value for compability with other filters. """ # Remove backslashes to prevent escape character aroundfuckery like # discord\.gg/gdudes-pony-farm @@ -500,7 +505,7 @@ class Filtering(Cog): # Lack of a "guild" key in the JSON response indicates either an group DM invite, an # expired invite, or an invalid invite. The API does not currently differentiate # between invalid and expired invites - return True, None + return True guild_id = guild.get("id") guild_invite_whitelist = self._get_filterlist_items("guild_invite", allowed=True) @@ -537,15 +542,11 @@ class Filtering(Cog): "reason": reason } - return invite_data if invite_data else False, None + return invite_data if invite_data else False @staticmethod - async def _has_rich_embed(msg: Message) -> Tuple[Union[bool, List[discord.Embed]], None]: - """ - Determines if `msg` contains any rich embeds not auto-generated from a URL. - - Return None as second value for compability with other filters. - """ + async def _has_rich_embed(msg: Message) -> Union[bool, List[discord.Embed]]: + """Determines if `msg` contains any rich embeds not auto-generated from a URL.""" if msg.embeds: for embed in msg.embeds: if embed.type == "rich": @@ -553,28 +554,24 @@ class Filtering(Cog): if not embed.url or embed.url not in urls: # If `embed.url` does not exist or if `embed.url` is not part of the content # of the message, it's unlikely to be an auto-generated embed by Discord. - return msg.embeds, None + return msg.embeds else: log.trace( "Found a rich embed sent by a regular user account, " "but it was likely just an automatic URL embed." ) - return False, None - return False, None + return False + return False @staticmethod - async def _has_everyone_ping(text: str) -> Tuple[bool, None]: - """ - Determines if `msg` contains an @everyone or @here ping outside of a codeblock. - - Return None as second value for compability with other filters. - """ + async def _has_everyone_ping(text: str) -> bool: + """Determines if `msg` contains an @everyone or @here ping outside of a codeblock.""" # First pass to avoid running re.sub on every message if not EVERYONE_PING_RE.search(text): - return False, None + return False content_without_codeblocks = CODE_BLOCK_RE.sub("", text) - return bool(EVERYONE_PING_RE.search(content_without_codeblocks)), None + return bool(EVERYONE_PING_RE.search(content_without_codeblocks)) async def notify_member(self, filtered_member: Member, reason: str, channel: TextChannel) -> None: """ -- cgit v1.2.3 From cbd6054956cf3bd0646e0577f510faebf077cfe5 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 24 Feb 2021 14:23:59 +0100 Subject: Allow wildcard match for clear cache command --- bot/exts/info/doc/_cog.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 0b0611cbc..09ce04dd4 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -7,14 +7,14 @@ import textwrap from collections import defaultdict from contextlib import suppress from types import SimpleNamespace -from typing import Dict, NamedTuple, Optional +from typing import Dict, NamedTuple, Optional, Union import discord from discord.ext import commands from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput -from bot.converters import Inventory, PackageName, ValidURL +from bot.converters import Inventory, PackageName, ValidURL, allowed_strings from bot.pagination import LinePaginator from bot.utils.lock import SharedEvent, lock from bot.utils.messages import send_denial, wait_for_deletion @@ -404,7 +404,11 @@ class DocCog(commands.Cog): @docs_group.command(name="cleardoccache", aliases=("deletedoccache",)) @commands.has_any_role(*MODERATION_ROLES) - async def clear_cache_command(self, ctx: commands.Context, package_name: PackageName) -> None: + async def clear_cache_command( + self, + ctx: commands.Context, + package_name: Union[PackageName, allowed_strings("*")] # noqa: F722 + ) -> None: """Clear the persistent redis cache for `package`.""" if await doc_cache.delete(package_name): await ctx.send(f"Successfully cleared the cache for `{package_name}`.") -- cgit v1.2.3 From 44eb00ca03dae1b3d5faf40be63fae04ca515790 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Wed, 24 Feb 2021 18:27:25 +0100 Subject: Add off-topic etiquette to the off-topic tag --- bot/resources/tags/off-topic.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bot/resources/tags/off-topic.md b/bot/resources/tags/off-topic.md index c7f98a813..6a864a1d5 100644 --- a/bot/resources/tags/off-topic.md +++ b/bot/resources/tags/off-topic.md @@ -6,3 +6,5 @@ There are three off-topic channels: • <#463035268514185226> Their names change randomly every 24 hours, but you can always find them under the `OFF-TOPIC/GENERAL` category in the channel list. + +Please read our [off-topic etiquette](https://pythondiscord.com/pages/resources/guides/off-topic-etiquette/) before participating in conversations. -- cgit v1.2.3 From d65d130f1efb2dc5b4a72f025cd7abb3371bd663 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 24 Feb 2021 14:27:08 +0100 Subject: Use 4 spaces for hanging indent --- bot/exts/info/doc/_cog.py | 18 +++++++++--------- bot/exts/info/doc/_html.py | 12 ++++++------ bot/exts/info/doc/_parsing.py | 8 ++++---- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 09ce04dd4..95a772df3 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -120,10 +120,10 @@ class DocCog(commands.Cog): log.trace(f"Fetched inventory for {api_package_name}.") async def update_or_reschedule_inventory( - self, - api_package_name: str, - base_url: str, - inventory_url: str, + self, + api_package_name: str, + base_url: str, + inventory_url: str, ) -> None: """ Update the cog's inventories, or reschedule this method to execute again if the remote inventory is unreachable. @@ -149,11 +149,11 @@ class DocCog(commands.Cog): self.update_single(api_package_name, base_url, package) def ensure_unique_symbol_name( - self, - package_name: str, - group_name: str, - original_item: DocItem, - symbol_name: str, + self, + package_name: str, + group_name: str, + original_item: DocItem, + symbol_name: str, ) -> Optional[str]: """ Ensure `symbol_name` doesn't overwrite an another symbol in `doc_symbols`. diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py index 2884a3cf1..701684b88 100644 --- a/bot/exts/info/doc/_html.py +++ b/bot/exts/info/doc/_html.py @@ -46,12 +46,12 @@ class Strainer(SoupStrainer): def _find_elements_until_tag( - start_element: PageElement, - end_tag_filter: Union[Container[str], Callable[[Tag], bool]], - *, - func: Callable, - include_strings: bool = False, - limit: int = None, + start_element: PageElement, + end_tag_filter: Union[Container[str], Callable[[Tag], bool]], + *, + func: Callable, + include_strings: bool = False, + limit: int = None, ) -> List[Union[Tag, NavigableString]]: """ Get all elements up to `limit` or until a tag matching `tag_filter` is found. diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index f9b4f9d8a..6b2d31cdd 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -132,10 +132,10 @@ def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collec def _get_truncated_description( - elements: Iterable[Union[Tag, NavigableString]], - markdown_converter: DocMarkdownConverter, - max_length: int, - max_lines: int, + elements: Iterable[Union[Tag, NavigableString]], + markdown_converter: DocMarkdownConverter, + max_length: int, + max_lines: int, ) -> str: """ Truncate the Markdown from `elements` to be at most `max_length` characters when rendered or `max_lines` newlines. -- cgit v1.2.3 From 3153ad05222156b4aacb0f288d36ec3a1ab6eda1 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 25 Feb 2021 04:23:54 +0100 Subject: Send command help for `BadUnionArgument`s --- bot/exts/backend/error_handler.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py index d2cce5558..92414e5d2 100644 --- a/bot/exts/backend/error_handler.py +++ b/bot/exts/backend/error_handler.py @@ -239,6 +239,7 @@ class ErrorHandler(Cog): elif isinstance(e, errors.BadUnionArgument): embed = self._get_error_embed("Bad argument", f"{e}\n{e.errors[-1]}") await ctx.send(embed=embed) + await prepared_help_command self.bot.stats.incr("errors.bad_union_argument") elif isinstance(e, errors.ArgumentParsingError): embed = self._get_error_embed("Argument parsing error", str(e)) -- cgit v1.2.3 From d56b01ad1d9d2b52603255978061c95a2487505a Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 25 Feb 2021 04:24:20 +0100 Subject: Close coroutine to prevent `RuntimeWarning`s --- bot/exts/backend/error_handler.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py index 92414e5d2..9cb54cdab 100644 --- a/bot/exts/backend/error_handler.py +++ b/bot/exts/backend/error_handler.py @@ -244,6 +244,7 @@ class ErrorHandler(Cog): elif isinstance(e, errors.ArgumentParsingError): embed = self._get_error_embed("Argument parsing error", str(e)) await ctx.send(embed=embed) + prepared_help_command.close() self.bot.stats.incr("errors.argument_parsing_error") else: embed = self._get_error_embed( -- cgit v1.2.3 From 47ed2339e55eb2a0bc245b45c1f0df9cc8b9af36 Mon Sep 17 00:00:00 2001 From: swfarnsworth Date: Thu, 25 Feb 2021 00:53:12 -0500 Subject: Instructions to dispute an infraction vary by infraction type. Previously, the user was instructed to email the appeals email for infraction types that don't remove one from the server. They are now instructed to DM ModMail except for Ban-type infractions. Also removed the URL string literal from the hyperlink to that URL. --- bot/exts/moderation/infraction/_utils.py | 44 +++++++++++----------- tests/bot/exts/moderation/infraction/test_utils.py | 6 +-- 2 files changed, 26 insertions(+), 24 deletions(-) diff --git a/bot/exts/moderation/infraction/_utils.py b/bot/exts/moderation/infraction/_utils.py index e766c1e5c..e58c2b22f 100644 --- a/bot/exts/moderation/infraction/_utils.py +++ b/bot/exts/moderation/infraction/_utils.py @@ -22,7 +22,6 @@ INFRACTION_ICONS = { "voice_ban": (Icons.voice_state_red, Icons.voice_state_green), } RULES_URL = "https://pythondiscord.com/pages/rules" -APPEALABLE_INFRACTIONS = ("ban", "mute", "voice_ban") # Type aliases UserObject = t.Union[discord.Member, discord.User] @@ -31,8 +30,10 @@ Infraction = t.Dict[str, t.Union[str, int, bool]] APPEAL_EMAIL = "appeals@pythondiscord.com" -INFRACTION_TITLE = f"Please review our rules over at {RULES_URL}" -INFRACTION_APPEAL_FOOTER = f"To appeal this infraction, send an e-mail to {APPEAL_EMAIL}" +INFRACTION_TITLE = "Please review our rules" +INFRACTION_APPEAL_EMAIL_FOOTER = f"To appeal this infraction, send an e-mail to {APPEAL_EMAIL}" +INFRACTION_APPEAL_MODMAIL_FOOTER = ('If you would like to discuss or appeal this infraction, ' + 'send a message to the ModMail bot') INFRACTION_AUTHOR_NAME = "Infraction information" INFRACTION_DESCRIPTION_TEMPLATE = ( @@ -71,13 +72,13 @@ async def post_user(ctx: Context, user: UserSnowflake) -> t.Optional[dict]: async def post_infraction( - ctx: Context, - user: UserSnowflake, - infr_type: str, - reason: str, - expires_at: datetime = None, - hidden: bool = False, - active: bool = True + ctx: Context, + user: UserSnowflake, + infr_type: str, + reason: str, + expires_at: datetime = None, + hidden: bool = False, + active: bool = True ) -> t.Optional[dict]: """Posts an infraction to the API.""" if isinstance(user, (discord.Member, discord.User)) and user.bot: @@ -150,11 +151,11 @@ async def get_active_infraction( async def notify_infraction( - user: UserObject, - infr_type: str, - expires_at: t.Optional[str] = None, - reason: t.Optional[str] = None, - icon_url: str = Icons.token_removed + user: UserObject, + infr_type: str, + expires_at: t.Optional[str] = None, + reason: t.Optional[str] = None, + icon_url: str = Icons.token_removed ) -> bool: """DM a user about their new infraction and return True if the DM is successful.""" log.trace(f"Sending {user} a DM about their {infr_type} infraction.") @@ -178,17 +179,18 @@ async def notify_infraction( embed.title = INFRACTION_TITLE embed.url = RULES_URL - if infr_type in APPEALABLE_INFRACTIONS: - embed.set_footer(text=INFRACTION_APPEAL_FOOTER) + embed.set_footer( + text=INFRACTION_APPEAL_EMAIL_FOOTER if infr_type == 'Ban' else INFRACTION_APPEAL_MODMAIL_FOOTER + ) return await send_private_embed(user, embed) async def notify_pardon( - user: UserObject, - title: str, - content: str, - icon_url: str = Icons.user_verified + user: UserObject, + title: str, + content: str, + icon_url: str = Icons.user_verified ) -> bool: """DM a user about their pardoned infraction and return True if the DM is successful.""" log.trace(f"Sending {user} a DM about their pardoned infraction.") diff --git a/tests/bot/exts/moderation/infraction/test_utils.py b/tests/bot/exts/moderation/infraction/test_utils.py index 5b62463e0..ef6127344 100644 --- a/tests/bot/exts/moderation/infraction/test_utils.py +++ b/tests/bot/exts/moderation/infraction/test_utils.py @@ -146,7 +146,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.token_removed - ).set_footer(text=utils.INFRACTION_APPEAL_FOOTER), + ).set_footer(text=utils.INFRACTION_APPEAL_EMAIL_FOOTER), "send_result": True }, { @@ -200,7 +200,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.defcon_denied - ).set_footer(text=utils.INFRACTION_APPEAL_FOOTER), + ).set_footer(text=utils.INFRACTION_APPEAL_EMAIL_FOOTER), "send_result": False }, { @@ -218,7 +218,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.defcon_denied - ).set_footer(text=utils.INFRACTION_APPEAL_FOOTER), + ).set_footer(text=utils.INFRACTION_APPEAL_EMAIL_FOOTER), "send_result": True } ] -- cgit v1.2.3 From c5e113734d16e8d5ac2eede6c1f29e019cfc2f28 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> Date: Thu, 25 Feb 2021 12:58:15 +0300 Subject: Adds More Descriptive Startup Error Messages Signed-off-by: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> --- bot/__main__.py | 24 ++++++++++++++++++++---- bot/bot.py | 13 ++++++++++++- 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/bot/__main__.py b/bot/__main__.py index 257216fa7..e4df4b77d 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -1,10 +1,26 @@ +import logging + +import aiohttp + import bot from bot import constants -from bot.bot import Bot +from bot.bot import Bot, StartupError from bot.log import setup_sentry setup_sentry() -bot.instance = Bot.create() -bot.instance.load_extensions() -bot.instance.run(constants.Bot.token) +try: + bot.instance = Bot.create() + bot.instance.load_extensions() + bot.instance.run(constants.Bot.token) +except StartupError as e: + message = "Unknown Startup Error Occurred." + if isinstance(e.exception, aiohttp.ClientConnectorError): + message = "Could not connect to site API. Is it running?" + elif isinstance(e.exception, OSError): + message = "Could not connect to Redis. Is it running?" + + # The exception is logged with an empty message so the actual message is visible at the bottom + log = logging.getLogger("bot") + log.fatal("", exc_info=e.exception) + log.fatal(message) diff --git a/bot/bot.py b/bot/bot.py index d5f108575..df80868ee 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -19,6 +19,14 @@ log = logging.getLogger('bot') LOCALHOST = "127.0.0.1" +class StartupError(Exception): + """Exception class for startup errors.""" + + def __init__(self, base: Exception): + super() + self.exception = base + + class Bot(commands.Bot): """A subclass of `discord.ext.commands.Bot` with an aiohttp session and an API client.""" @@ -318,5 +326,8 @@ def _create_redis_session(loop: asyncio.AbstractEventLoop) -> RedisSession: use_fakeredis=constants.Redis.use_fakeredis, global_namespace="bot", ) - loop.run_until_complete(redis_session.connect()) + try: + loop.run_until_complete(redis_session.connect()) + except OSError as e: + raise StartupError(e) return redis_session -- cgit v1.2.3 From 866f3156cb05e49a8ca2c9ebdb13688829f15914 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> Date: Thu, 25 Feb 2021 12:59:37 +0300 Subject: Adds Site Readiness Checks Attempts to connect to the site multiple times before throwing an exception to allow the site to warm up when running in docker. Signed-off-by: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> --- bot/bot.py | 21 +++++++++++++++++++++ bot/constants.py | 2 ++ config-default.yml | 2 ++ 3 files changed, 25 insertions(+) diff --git a/bot/bot.py b/bot/bot.py index df80868ee..cd8e26325 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -89,6 +89,22 @@ class Bot(commands.Bot): for item in full_cache: self.insert_item_into_filter_list_cache(item) + async def ping_services(self) -> None: + """A helper to make sure all the services the bot relies on are available on startup.""" + # Connect Site/API + attempts = 0 + while True: + try: + log.info(f"Attempting site connection: {attempts + 1}/{constants.URLs.connect_max_retries}") + await self.api_client.get("healthcheck") + break + + except aiohttp.ClientConnectorError as e: + attempts += 1 + if attempts == constants.URLs.connect_max_retries: + raise e + await asyncio.sleep(constants.URLs.connect_cooldown) + @classmethod def create(cls) -> "Bot": """Create and return an instance of a Bot.""" @@ -231,6 +247,11 @@ class Bot(commands.Bot): # here. Normally, this shouldn't happen. await self.redis_session.connect() + try: + await self.ping_services() + except Exception as e: + raise StartupError(e) + # Build the FilterList cache await self.cache_filter_list_data() diff --git a/bot/constants.py b/bot/constants.py index 69bc82b89..7cf31e835 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -531,6 +531,8 @@ class URLs(metaclass=YAMLGetter): github_bot_repo: str # Base site vars + connect_max_retries: int + connect_cooldown: int site: str site_api: str site_schema: str diff --git a/config-default.yml b/config-default.yml index 7d9afaa0e..a9fb2262e 100644 --- a/config-default.yml +++ b/config-default.yml @@ -338,6 +338,8 @@ keys: urls: # PyDis site vars + connect_max_retries: 3 + connect_cooldown: 5 site: &DOMAIN "pythondiscord.com" site_api: &API "pydis-api.default.svc.cluster.local" site_api_schema: "http://" -- cgit v1.2.3 From 900923cc6a9b4d40b625b8f33e8bef18a286a84f Mon Sep 17 00:00:00 2001 From: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> Date: Thu, 25 Feb 2021 13:23:14 +0300 Subject: Catches All Site Startup Issues Adds a missing exception when trying to connect to the site on startup. Signed-off-by: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> --- bot/__main__.py | 2 +- bot/bot.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/__main__.py b/bot/__main__.py index e4df4b77d..d3abcd7b2 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -15,7 +15,7 @@ try: bot.instance.run(constants.Bot.token) except StartupError as e: message = "Unknown Startup Error Occurred." - if isinstance(e.exception, aiohttp.ClientConnectorError): + if type(e.exception) in [aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError]: message = "Could not connect to site API. Is it running?" elif isinstance(e.exception, OSError): message = "Could not connect to Redis. Is it running?" diff --git a/bot/bot.py b/bot/bot.py index cd8e26325..1a815c31e 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -99,7 +99,7 @@ class Bot(commands.Bot): await self.api_client.get("healthcheck") break - except aiohttp.ClientConnectorError as e: + except (aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError) as e: attempts += 1 if attempts == constants.URLs.connect_max_retries: raise e -- cgit v1.2.3 From 4c566bb2445d0bc637e11242c44a69baa8a39e48 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> Date: Thu, 25 Feb 2021 13:42:22 +0300 Subject: Cleans Up Startup Error Handler Code Style Co-authored-by: Akarys42 Signed-off-by: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> --- bot/__main__.py | 4 +++- bot/bot.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/bot/__main__.py b/bot/__main__.py index d3abcd7b2..9317563c8 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -15,7 +15,7 @@ try: bot.instance.run(constants.Bot.token) except StartupError as e: message = "Unknown Startup Error Occurred." - if type(e.exception) in [aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError]: + if isinstance(e.exception, (aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError)): message = "Could not connect to site API. Is it running?" elif isinstance(e.exception, OSError): message = "Could not connect to Redis. Is it running?" @@ -24,3 +24,5 @@ except StartupError as e: log = logging.getLogger("bot") log.fatal("", exc_info=e.exception) log.fatal(message) + + exit(69) diff --git a/bot/bot.py b/bot/bot.py index 1a815c31e..3218a60b4 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -102,7 +102,7 @@ class Bot(commands.Bot): except (aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError) as e: attempts += 1 if attempts == constants.URLs.connect_max_retries: - raise e + raise await asyncio.sleep(constants.URLs.connect_cooldown) @classmethod -- cgit v1.2.3 From 283857f543ca50e188f39a9b880cef9963f486db Mon Sep 17 00:00:00 2001 From: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> Date: Thu, 25 Feb 2021 13:44:18 +0300 Subject: Call Super __init__ in Startup Error Co-authored-by: Matteo Bertucci --- bot/bot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/bot.py b/bot/bot.py index 3218a60b4..1b4037076 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -23,7 +23,7 @@ class StartupError(Exception): """Exception class for startup errors.""" def __init__(self, base: Exception): - super() + super().__init__() self.exception = base -- cgit v1.2.3 From fb7e21a0897e6de4964ff883f1cd52a9dd443722 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> Date: Thu, 25 Feb 2021 13:48:52 +0300 Subject: Removes Unused Variable Signed-off-by: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> --- bot/bot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/bot.py b/bot/bot.py index 1b4037076..3a2af472d 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -99,7 +99,7 @@ class Bot(commands.Bot): await self.api_client.get("healthcheck") break - except (aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError) as e: + except (aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError): attempts += 1 if attempts == constants.URLs.connect_max_retries: raise -- cgit v1.2.3 From ad2bc5d2d1d94ac3ef60d9b60e6f716be5827bf2 Mon Sep 17 00:00:00 2001 From: Sebastian Kuipers <61157793+sebkuip@users.noreply.github.com> Date: Thu, 25 Feb 2021 17:17:00 +0100 Subject: Apply suggestions from code review Co-authored-by: Mark --- bot/resources/tags/empty-json.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md index 93e2cadba..935544bb7 100644 --- a/bot/resources/tags/empty-json.md +++ b/bot/resources/tags/empty-json.md @@ -4,8 +4,8 @@ JSONDecodeError: Expecting value: line 1 column 1 (char 0) ``` This error could have appeared because you just created the JSON file and there is nothing in it at the moment. -Whilst having the data empty is no problem, the file itself may never be completely empty. +Whilst having empty data is no problem, the file itself may never be completely empty. -You most likely wanted to structure your JSON as a dictionary. To do this, change your JSON to read `{}`. +You most likely wanted to structure your JSON as a dictionary. To do this, edit your empty JSON file so that it instead contains `{}`. Different data types are also supported. If you wish to read more on these, please refer to [this article](https://www.tutorialspoint.com/json/json_data_types.htm). -- cgit v1.2.3 From 82190ee57bd25a1e999b7a8fb323513696e7e042 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Feb 2021 02:25:18 +0000 Subject: Bump aiohttp from 3.7.3 to 3.7.4 Bumps [aiohttp](https://github.com/aio-libs/aiohttp) from 3.7.3 to 3.7.4. - [Release notes](https://github.com/aio-libs/aiohttp/releases) - [Changelog](https://github.com/aio-libs/aiohttp/blob/master/CHANGES.rst) - [Commits](https://github.com/aio-libs/aiohttp/compare/v3.7.3...v3.7.4) Signed-off-by: dependabot[bot] --- Pipfile | 2 +- Pipfile.lock | 282 ++++++++++++++++++++++++++++------------------------------- 2 files changed, 133 insertions(+), 151 deletions(-) diff --git a/Pipfile b/Pipfile index efdd46522..0a94fb888 100644 --- a/Pipfile +++ b/Pipfile @@ -6,7 +6,7 @@ name = "pypi" [packages] aio-pika = "~=6.1" aiodns = "~=2.0" -aiohttp = "~=3.5" +aiohttp = "~=3.7" aioping = "~=0.3.1" aioredis = "~=1.3.1" "async-rediscache[fakeredis]" = "~=0.1.2" diff --git a/Pipfile.lock b/Pipfile.lock index 636d07b1a..f8cedb08f 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "26c8089f17d6d6bac11dbed366b1b46818b4546f243af756a106a32af5d9d8f6" + "sha256": "228ae55fe5700ac3827ba6b661933b60b1d06f44fea8bcbe8c5a769fa10ab2fd" }, "pipfile-spec": 6, "requires": { @@ -34,46 +34,46 @@ }, "aiohttp": { "hashes": [ - "sha256:0b795072bb1bf87b8620120a6373a3c61bfcb8da7e5c2377f4bb23ff4f0b62c9", - "sha256:0d438c8ca703b1b714e82ed5b7a4412c82577040dadff479c08405e2a715564f", - "sha256:16a3cb5df5c56f696234ea9e65e227d1ebe9c18aa774d36ff42f532139066a5f", - "sha256:1edfd82a98c5161497bbb111b2b70c0813102ad7e0aa81cbeb34e64c93863005", - "sha256:2406dc1dda01c7f6060ab586e4601f18affb7a6b965c50a8c90ff07569cf782a", - "sha256:2858b2504c8697beb9357be01dc47ef86438cc1cb36ecb6991796d19475faa3e", - "sha256:2a7b7640167ab536c3cb90cfc3977c7094f1c5890d7eeede8b273c175c3910fd", - "sha256:3228b7a51e3ed533f5472f54f70fd0b0a64c48dc1649a0f0e809bec312934d7a", - "sha256:328b552513d4f95b0a2eea4c8573e112866107227661834652a8984766aa7656", - "sha256:39f4b0a6ae22a1c567cb0630c30dd082481f95c13ca528dc501a7766b9c718c0", - "sha256:3b0036c978cbcc4a4512278e98e3e6d9e6b834dc973206162eddf98b586ef1c6", - "sha256:3ea8c252d8df5e9166bcf3d9edced2af132f4ead8ac422eac723c5781063709a", - "sha256:41608c0acbe0899c852281978492f9ce2c6fbfaf60aff0cefc54a7c4516b822c", - "sha256:59d11674964b74a81b149d4ceaff2b674b3b0e4d0f10f0be1533e49c4a28408b", - "sha256:5e479df4b2d0f8f02133b7e4430098699450e1b2a826438af6bec9a400530957", - "sha256:684850fb1e3e55c9220aad007f8386d8e3e477c4ec9211ae54d968ecdca8c6f9", - "sha256:6ccc43d68b81c424e46192a778f97da94ee0630337c9bbe5b2ecc9b0c1c59001", - "sha256:6d42debaf55450643146fabe4b6817bb2a55b23698b0434107e892a43117285e", - "sha256:710376bf67d8ff4500a31d0c207b8941ff4fba5de6890a701d71680474fe2a60", - "sha256:756ae7efddd68d4ea7d89c636b703e14a0c686688d42f588b90778a3c2fc0564", - "sha256:77149002d9386fae303a4a162e6bce75cc2161347ad2ba06c2f0182561875d45", - "sha256:78e2f18a82b88cbc37d22365cf8d2b879a492faedb3f2975adb4ed8dfe994d3a", - "sha256:7d9b42127a6c0bdcc25c3dcf252bb3ddc70454fac593b1b6933ae091396deb13", - "sha256:8389d6044ee4e2037dca83e3f6994738550f6ee8cfb746762283fad9b932868f", - "sha256:9c1a81af067e72261c9cbe33ea792893e83bc6aa987bfbd6fdc1e5e7b22777c4", - "sha256:c1e0920909d916d3375c7a1fdb0b1c78e46170e8bb42792312b6eb6676b2f87f", - "sha256:c68fdf21c6f3573ae19c7ee65f9ff185649a060c9a06535e9c3a0ee0bbac9235", - "sha256:c733ef3bdcfe52a1a75564389bad4064352274036e7e234730526d155f04d914", - "sha256:c9c58b0b84055d8bc27b7df5a9d141df4ee6ff59821f922dd73155861282f6a3", - "sha256:d03abec50df423b026a5aa09656bd9d37f1e6a49271f123f31f9b8aed5dc3ea3", - "sha256:d2cfac21e31e841d60dc28c0ec7d4ec47a35c608cb8906435d47ef83ffb22150", - "sha256:dcc119db14757b0c7bce64042158307b9b1c76471e655751a61b57f5a0e4d78e", - "sha256:df3a7b258cc230a65245167a202dd07320a5af05f3d41da1488ba0fa05bc9347", - "sha256:df48a623c58180874d7407b4d9ec06a19b84ed47f60a3884345b1a5099c1818b", - "sha256:e1b95972a0ae3f248a899cdbac92ba2e01d731225f566569311043ce2226f5e7", - "sha256:f326b3c1bbfda5b9308252ee0dcb30b612ee92b0e105d4abec70335fab5b1245", - "sha256:f411cb22115cb15452d099fec0ee636b06cf81bfb40ed9c02d30c8dc2bc2e3d1" + "sha256:119feb2bd551e58d83d1b38bfa4cb921af8ddedec9fad7183132db334c3133e0", + "sha256:16d0683ef8a6d803207f02b899c928223eb219111bd52420ef3d7a8aa76227b6", + "sha256:2eb3efe243e0f4ecbb654b08444ae6ffab37ac0ef8f69d3a2ffb958905379daf", + "sha256:2ffea7904e70350da429568113ae422c88d2234ae776519549513c8f217f58a9", + "sha256:40bd1b101b71a18a528ffce812cc14ff77d4a2a1272dfb8b11b200967489ef3e", + "sha256:418597633b5cd9639e514b1d748f358832c08cd5d9ef0870026535bd5eaefdd0", + "sha256:481d4b96969fbfdcc3ff35eea5305d8565a8300410d3d269ccac69e7256b1329", + "sha256:4c1bdbfdd231a20eee3e56bd0ac1cd88c4ff41b64ab679ed65b75c9c74b6c5c2", + "sha256:5563ad7fde451b1986d42b9bb9140e2599ecf4f8e42241f6da0d3d624b776f40", + "sha256:58c62152c4c8731a3152e7e650b29ace18304d086cb5552d317a54ff2749d32a", + "sha256:5b50e0b9460100fe05d7472264d1975f21ac007b35dcd6fd50279b72925a27f4", + "sha256:5d84ecc73141d0a0d61ece0742bb7ff5751b0657dab8405f899d3ceb104cc7de", + "sha256:5dde6d24bacac480be03f4f864e9a67faac5032e28841b00533cd168ab39cad9", + "sha256:5e91e927003d1ed9283dee9abcb989334fc8e72cf89ebe94dc3e07e3ff0b11e9", + "sha256:62bc216eafac3204877241569209d9ba6226185aa6d561c19159f2e1cbb6abfb", + "sha256:6c8200abc9dc5f27203986100579fc19ccad7a832c07d2bc151ce4ff17190076", + "sha256:6ca56bdfaf825f4439e9e3673775e1032d8b6ea63b8953d3812c71bd6a8b81de", + "sha256:71680321a8a7176a58dfbc230789790639db78dad61a6e120b39f314f43f1907", + "sha256:7c7820099e8b3171e54e7eedc33e9450afe7cd08172632d32128bd527f8cb77d", + "sha256:7dbd087ff2f4046b9b37ba28ed73f15fd0bc9f4fdc8ef6781913da7f808d9536", + "sha256:822bd4fd21abaa7b28d65fc9871ecabaddc42767884a626317ef5b75c20e8a2d", + "sha256:8ec1a38074f68d66ccb467ed9a673a726bb397142c273f90d4ba954666e87d54", + "sha256:950b7ef08b2afdab2488ee2edaff92a03ca500a48f1e1aaa5900e73d6cf992bc", + "sha256:99c5a5bf7135607959441b7d720d96c8e5c46a1f96e9d6d4c9498be8d5f24212", + "sha256:b84ad94868e1e6a5e30d30ec419956042815dfaea1b1df1cef623e4564c374d9", + "sha256:bc3d14bf71a3fb94e5acf5bbf67331ab335467129af6416a437bd6024e4f743d", + "sha256:c2a80fd9a8d7e41b4e38ea9fe149deed0d6aaede255c497e66b8213274d6d61b", + "sha256:c44d3c82a933c6cbc21039326767e778eface44fca55c65719921c4b9661a3f7", + "sha256:cc31e906be1cc121ee201adbdf844522ea3349600dd0a40366611ca18cd40e81", + "sha256:d5d102e945ecca93bcd9801a7bb2fa703e37ad188a2f81b1e65e4abe4b51b00c", + "sha256:dd7936f2a6daa861143e376b3a1fb56e9b802f4980923594edd9ca5670974895", + "sha256:dee68ec462ff10c1d836c0ea2642116aba6151c6880b688e56b4c0246770f297", + "sha256:e76e78863a4eaec3aee5722d85d04dcbd9844bc6cd3bfa6aa880ff46ad16bfcb", + "sha256:eab51036cac2da8a50d7ff0ea30be47750547c9aa1aa2cf1a1b710a1827e7dbe", + "sha256:f4496d8d04da2e98cc9133e238ccebf6a13ef39a93da2e87146c8c8ac9768242", + "sha256:fbd3b5e18d34683decc00d9a360179ac1e7a320a5fee10ab8053ffd6deab76e0", + "sha256:feb24ff1226beeb056e247cf2e24bba5232519efb5645121c4aea5b6ad74c1f2" ], "index": "pypi", - "version": "==3.7.3" + "version": "==3.7.4" }, "aioping": { "hashes": [ @@ -96,7 +96,6 @@ "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573", "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e" ], - "markers": "python_version >= '3.6'", "version": "==3.3.1" }, "alabaster": { @@ -123,7 +122,6 @@ "sha256:c25e4fff73f64d20645254783c3224a4c49e083e3fab67c44f17af944c5e26af" ], "index": "pypi", - "markers": "python_version ~= '3.7'", "version": "==0.1.4" }, "async-timeout": { @@ -131,7 +129,6 @@ "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f", "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3" ], - "markers": "python_full_version >= '3.5.3'", "version": "==3.0.1" }, "attrs": { @@ -139,7 +136,6 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "babel": { @@ -147,7 +143,6 @@ "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.9.0" }, "beautifulsoup4": { @@ -168,44 +163,45 @@ }, "cffi": { "hashes": [ - "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e", - "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d", - "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a", - "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec", - "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362", - "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668", - "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c", - "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b", - "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06", - "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698", - "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2", - "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c", - "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7", - "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009", - "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03", - "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b", - "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909", - "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53", - "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35", - "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26", - "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b", - "sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01", - "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb", - "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293", - "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd", - "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d", - "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3", - "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d", - "sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e", - "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca", - "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d", - "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775", - "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375", - "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b", - "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b", - "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f" - ], - "version": "==1.14.4" + "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813", + "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06", + "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea", + "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee", + "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396", + "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73", + "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315", + "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1", + "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49", + "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892", + "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482", + "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058", + "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5", + "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53", + "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045", + "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3", + "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5", + "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e", + "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c", + "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369", + "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827", + "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053", + "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa", + "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4", + "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322", + "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132", + "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62", + "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa", + "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0", + "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396", + "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e", + "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991", + "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6", + "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1", + "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406", + "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d", + "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c" + ], + "version": "==1.14.5" }, "chardet": { "hashes": [ @@ -219,6 +215,7 @@ "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], + "index": "pypi", "markers": "sys_platform == 'win32'", "version": "==0.4.4" }, @@ -251,7 +248,6 @@ "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, "emoji": { @@ -334,7 +330,6 @@ "sha256:e64be68255234bb489a574c4f2f8df7029c98c81ec4d160d6cd836e7f0679390", "sha256:e82d6b930e02e80e5109b678c663a9ed210680ded81c1abaf54635d88d1da298" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.0" }, "humanfriendly": { @@ -342,7 +337,6 @@ "sha256:066562956639ab21ff2676d1fda0b5987e985c534fc76700a19bd54bcb81121d", "sha256:d5c731705114b9ad673754f3317d9fa4c23212f36b29bdc4272a892eafc9bc72" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==9.1" }, "idna": { @@ -350,7 +344,6 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "imagesize": { @@ -358,16 +351,14 @@ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, "jinja2": { "hashes": [ - "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", - "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" + "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", + "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==2.11.2" + "version": "==2.11.3" }, "lxml": { "hashes": [ @@ -427,8 +418,12 @@ "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", + "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", + "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", + "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", @@ -437,26 +432,40 @@ "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", + "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", + "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", + "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", + "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", + "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", + "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" + "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", + "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "more-itertools": { @@ -507,23 +516,20 @@ "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281", "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80" ], - "markers": "python_version >= '3.6'", "version": "==5.1.0" }, "ordered-set": { "hashes": [ "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95" ], - "markers": "python_version >= '3.5'", "version": "==4.0.2" }, "packaging": { "hashes": [ - "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858", - "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093" + "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", + "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.8" + "version": "==20.9" }, "pamqp": { "hashes": [ @@ -571,23 +577,20 @@ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pygments": { "hashes": [ - "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435", - "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337" + "sha256:37a13ba168a02ac54cc5891a42b1caec333e59b66addb7fa633ea8a6d73445c0", + "sha256:b21b072d0ccdf29297a82a2363359d99623597b8a265b8081760e4d0f7153c88" ], - "markers": "python_version >= '3.5'", - "version": "==2.7.4" + "version": "==2.8.0" }, "pyparsing": { "hashes": [ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", "version": "==2.4.7" }, "python-dateutil": { @@ -600,10 +603,10 @@ }, "pytz": { "hashes": [ - "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4", - "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5" + "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da", + "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798" ], - "version": "==2020.5" + "version": "==2021.1" }, "pyyaml": { "hashes": [ @@ -629,7 +632,6 @@ "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2", "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.5.3" }, "requests": { @@ -653,15 +655,14 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==1.15.0" }, "snowballstemmer": { "hashes": [ - "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0", - "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52" + "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2", + "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914" ], - "version": "==2.0.0" + "version": "==2.1.0" }, "sortedcontainers": { "hashes": [ @@ -672,11 +673,11 @@ }, "soupsieve": { "hashes": [ - "sha256:4bb21a6ee4707bf43b61230e80740e71bfe56e55d1f1f50924b087bb2975c851", - "sha256:6dc52924dc0bc710a5d16794e6b3480b2c7c08b07729505feab2b2c16661ff6e" + "sha256:407fa1e8eb3458d1b5614df51d9651a1180ea5fedf07feb46e45d7e25e6d6cdd", + "sha256:d3a5ea5b350423f47d07639f74475afedad48cf41c0ad7a82ca13a3928af34f6" ], "markers": "python_version >= '3.0'", - "version": "==2.1" + "version": "==2.2" }, "sphinx": { "hashes": [ @@ -691,7 +692,6 @@ "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" ], - "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-devhelp": { @@ -699,7 +699,6 @@ "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" ], - "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-htmlhelp": { @@ -707,7 +706,6 @@ "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], - "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-jsmath": { @@ -715,7 +713,6 @@ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], - "markers": "python_version >= '3.5'", "version": "==1.0.1" }, "sphinxcontrib-qthelp": { @@ -723,7 +720,6 @@ "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" ], - "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-serializinghtml": { @@ -731,7 +727,6 @@ "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" ], - "markers": "python_version >= '3.5'", "version": "==1.1.4" }, "statsd": { @@ -752,11 +747,10 @@ }, "urllib3": { "hashes": [ - "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08", - "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473" + "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", + "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.26.2" + "version": "==1.26.3" }, "yarl": { "hashes": [ @@ -798,7 +792,6 @@ "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a", "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71" ], - "markers": "python_version >= '3.6'", "version": "==1.6.3" } }, @@ -815,7 +808,6 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "certifi": { @@ -830,7 +822,6 @@ "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d", "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1" ], - "markers": "python_full_version >= '3.6.1'", "version": "==3.2.0" }, "chardet": { @@ -995,18 +986,16 @@ }, "identify": { "hashes": [ - "sha256:18994e850ba50c37bcaed4832be8b354d6a06c8fb31f54e0e7ece76d32f69bc8", - "sha256:892473bf12e655884132a3a32aca737a3cbefaa34a850ff52d501773a45837bc" + "sha256:de7129142a5c86d75a52b96f394d94d96d497881d2aaf8eafe320cdbe8ac4bcc", + "sha256:e0dae57c0397629ce13c289f6ddde0204edf518f557bfdb1e56474aa143e77c3" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.5.12" + "version": "==1.5.14" }, "idna": { "hashes": [ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "mccabe": { @@ -1044,7 +1033,6 @@ "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.6.0" }, "pydocstyle": { @@ -1052,7 +1040,6 @@ "sha256:19b86fa8617ed916776a11cd8bc0197e5b9856d5433b777f51a3defe13075325", "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678" ], - "markers": "python_version >= '3.5'", "version": "==5.1.1" }, "pyflakes": { @@ -1060,7 +1047,6 @@ "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.2.0" }, "pyyaml": { @@ -1095,39 +1081,35 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==1.15.0" }, "snowballstemmer": { "hashes": [ - "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0", - "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52" + "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2", + "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914" ], - "version": "==2.0.0" + "version": "==2.1.0" }, "toml": { "hashes": [ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", "version": "==0.10.2" }, "urllib3": { "hashes": [ - "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08", - "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473" + "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", + "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.26.2" + "version": "==1.26.3" }, "virtualenv": { "hashes": [ - "sha256:0c111a2236b191422b37fe8c28b8c828ced39aab4bf5627fa5c331aeffb570d9", - "sha256:14b34341e742bdca219e10708198e704e8a7064dd32f474fc16aca68ac53a306" + "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d", + "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.3.1" + "version": "==20.4.2" } } } -- cgit v1.2.3 From cea82da9547d3178f071241a75d024582d314ff9 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 26 Feb 2021 14:48:10 +0200 Subject: Supressing any exceptions while updating the threshold in redis Updating redis might cause an error, making sure it doesn't stop the command mid-way --- bot/exts/moderation/defcon.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 86dece518..a88892b13 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -2,6 +2,7 @@ import asyncio import logging import traceback from collections import namedtuple +from contextlib import suppress from datetime import datetime from enum import Enum from typing import Optional, Union @@ -208,12 +209,13 @@ class Defcon(Cog): if self.expiry is not None: self.scheduler.schedule_at(expiry, 0, self._remove_threshold()) - await self.defcon_settings.update( - { - 'threshold': Defcon._stringify_relativedelta(self.threshold) if self.threshold else "", - 'expiry': expiry.isoformat() if expiry else 0 - } - ) + with suppress(Exception): + await self.defcon_settings.update( + { + 'threshold': Defcon._stringify_relativedelta(self.threshold) if self.threshold else "", + 'expiry': expiry.isoformat() if expiry else 0 + } + ) self._update_notifier() action = Action.DURATION_UPDATE -- cgit v1.2.3 From 80153ed12d20ccaa637a55765df60d8d3b5e64ef Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 26 Feb 2021 15:17:16 +0200 Subject: Changed name of _duration_parser constant to uppercase --- bot/utils/time.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/utils/time.py b/bot/utils/time.py index a7b441327..f862e40f7 100644 --- a/bot/utils/time.py +++ b/bot/utils/time.py @@ -9,7 +9,7 @@ from dateutil.relativedelta import relativedelta RFC1123_FORMAT = "%a, %d %b %Y %H:%M:%S GMT" INFRACTION_FORMAT = "%Y-%m-%d %H:%M" -_duration_parser = re.compile( +_DURATION_REGEX = re.compile( r"((?P\d+?) ?(years|year|Y|y) ?)?" r"((?P\d+?) ?(months|month|m) ?)?" r"((?P\d+?) ?(weeks|week|W|w) ?)?" @@ -100,7 +100,7 @@ def parse_duration_string(duration: str) -> Optional[relativedelta]: The units need to be provided in descending order of magnitude. If the string does represent a durationdelta object, it will return None. """ - match = _duration_parser.fullmatch(duration) + match = _DURATION_REGEX.fullmatch(duration) if not match: return None -- cgit v1.2.3 From 6dbf8ded81716f2bf55ca4d6297e3154afcdd285 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 26 Feb 2021 19:07:47 +0200 Subject: Sync alert won't trigger with fake redis The alert will trigger with fake redis on every bot startup even when people aren't working on the defcon cog. Added a condition to check if fake redis is being used. --- bot/exts/moderation/defcon.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index a88892b13..aa6dc0790 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -14,7 +14,7 @@ from discord.ext import tasks from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot -from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_ROLES, Roles +from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_ROLES, Redis, Roles from bot.converters import DurationDelta, Expiry from bot.exts.moderation.modlog import ModLog from bot.utils.messages import format_user @@ -89,10 +89,11 @@ class Defcon(Cog): self.expiry = datetime.fromisoformat(settings["expiry"]) if settings["expiry"] else None except Exception: log.exception("Unable to get DEFCON settings!") - await self.channel.send( - f"<@&{Roles.moderators}> <@&{Roles.devops}> **WARNING**: Unable to get DEFCON settings!" - f"\n\n```{traceback.format_exc()}```" - ) + if not Redis.use_fakeredis: + await self.channel.send( + f"<@&{Roles.moderators}> <@&{Roles.devops}> **WARNING**: Unable to get DEFCON settings!" + f"\n\n```{traceback.format_exc()}```" + ) else: if self.expiry: -- cgit v1.2.3 From 64e85ddcc57e2789627c4a4a7869424d7583dc17 Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Fri, 26 Feb 2021 21:02:31 +0000 Subject: !int socketstats improvements - Comma separate event values - Make fields inline for smaller embed --- bot/exts/utils/internal.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/utils/internal.py b/bot/exts/utils/internal.py index a7ab43f37..d193e4d4f 100644 --- a/bot/exts/utils/internal.py +++ b/bot/exts/utils/internal.py @@ -245,7 +245,7 @@ async def func(): # (None,) -> Any ) for event_type, count in self.socket_events.most_common(25): - stats_embed.add_field(name=event_type, value=count, inline=False) + stats_embed.add_field(name=event_type, value=f"{count:,}", inline=True) await ctx.send(embed=stats_embed) -- cgit v1.2.3 From de226ea845e8f68735ce6d20193bece9f50b1d5f Mon Sep 17 00:00:00 2001 From: Gustav Odinger <65498475+gustavwilliam@users.noreply.github.com> Date: Fri, 26 Feb 2021 22:24:50 +0100 Subject: Make "event" plural in socketstats embed --- bot/exts/utils/internal.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/utils/internal.py b/bot/exts/utils/internal.py index d193e4d4f..6f2da3131 100644 --- a/bot/exts/utils/internal.py +++ b/bot/exts/utils/internal.py @@ -240,7 +240,7 @@ async def func(): # (None,) -> Any stats_embed = discord.Embed( title="WebSocket statistics", - description=f"Receiving {per_s:0.2f} event per second.", + description=f"Receiving {per_s:0.2f} events per second.", color=discord.Color.blurple() ) -- cgit v1.2.3 From 41cfe3f805e53c43ec18585d203e0b80ed59afda Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 27 Feb 2021 02:52:05 +0100 Subject: Get the last index instead of using max The last index will always be the largest one so there's no need for max to search for it --- bot/exts/info/doc/_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 6b2d31cdd..b422b4f24 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -190,7 +190,7 @@ def _get_truncated_description( truncated_result = textwrap.shorten(result, truncate_index) else: # Truncate at the last Markdown element that comes before the truncation index. - markdown_truncate_index = max(possible_truncation_indices) + markdown_truncate_index = possible_truncation_indices[-1] truncated_result = result[:markdown_truncate_index] return truncated_result.strip(_TRUNCATE_STRIP_CHARACTERS) + "..." -- cgit v1.2.3 From a95cbc501a813b18d4e11bacefef0d447578e6fe Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 27 Feb 2021 11:07:47 +0100 Subject: Add digits to package covnerter --- bot/converters.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/converters.py b/bot/converters.py index 2b383636c..be1f1329f 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -134,13 +134,13 @@ class PackageName(Converter): Package names are used for stats and are restricted to the a-z and _ characters. """ - PACKAGE_NAME_RE = re.compile(r"[^a-z_]") + PACKAGE_NAME_RE = re.compile(r"[^a-z0-9_]") @classmethod async def convert(cls, ctx: Context, argument: str) -> str: """Checks whether the given string is a valid package name.""" if cls.PACKAGE_NAME_RE.search(argument): - raise BadArgument("The provided package name is not valid; please only use the _ and a-z characters.") + raise BadArgument("The provided package name is not valid; please only use the _, 0-9 and a-z characters.") return argument -- cgit v1.2.3 From 02c0d1535b46922096d53967e2938bbb3a56ef82 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 28 Feb 2021 00:40:52 +0100 Subject: Add new symbols to front of queue instead of extending the end Using extend caused old items, including the ones that were requested by users and pushed to the front, to be pushed back by all of the items on the new page, possibly significantly delaying their parsing --- bot/exts/info/doc/_batch_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 9956878cf..780fb16d9 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -118,7 +118,7 @@ class BatchParser: "lxml", ) - self._queue.extend(QueueItem(item, soup) for item in self._page_doc_items[doc_item.url]) + self._queue[:0] = (QueueItem(item, soup) for item in self._page_doc_items[doc_item.url]) log.debug(f"Added items from {doc_item.url} to parse queue.") if self._parse_task is None: -- cgit v1.2.3 From 75f2b9d5e922db8aca2c873c214455fded02fc4d Mon Sep 17 00:00:00 2001 From: swfarnsworth Date: Sun, 28 Feb 2021 11:33:26 -0500 Subject: Update the tests to reflect changes in expected behavior. The DM sent to infracted users now instructs them to DM modmail if they want to discuss non-ban infractions, so the tests now check if that instruction is present. Note that there already exists a superfluous test for note infractions, for which no DM is sent by design. --- tests/bot/exts/moderation/infraction/test_utils.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/tests/bot/exts/moderation/infraction/test_utils.py b/tests/bot/exts/moderation/infraction/test_utils.py index ef6127344..ee9ff650c 100644 --- a/tests/bot/exts/moderation/infraction/test_utils.py +++ b/tests/bot/exts/moderation/infraction/test_utils.py @@ -146,7 +146,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.token_removed - ).set_footer(text=utils.INFRACTION_APPEAL_EMAIL_FOOTER), + ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER), "send_result": True }, { @@ -164,9 +164,11 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.token_removed - ), + ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER), "send_result": False }, + # Note that this test case asserts that the DM that *would* get sent to the user is formatted + # correctly, even though that message is deliberately never sent. { "args": (self.user, "note", None, None, Icons.defcon_denied), "expected_output": Embed( @@ -182,7 +184,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.defcon_denied - ), + ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER), "send_result": False }, { @@ -200,7 +202,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.defcon_denied - ).set_footer(text=utils.INFRACTION_APPEAL_EMAIL_FOOTER), + ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER), "send_result": False }, { @@ -218,7 +220,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.defcon_denied - ).set_footer(text=utils.INFRACTION_APPEAL_EMAIL_FOOTER), + ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER), "send_result": True } ] -- cgit v1.2.3 From e82429c88f8643f8eaa89ea5541d0ffe860ec338 Mon Sep 17 00:00:00 2001 From: SavagePastaMan <69145546+SavagePastaMan@users.noreply.github.com> Date: Sat, 27 Feb 2021 09:54:50 -0500 Subject: Create comparison.md --- bot/resources/tags/comparison.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 bot/resources/tags/comparison.md diff --git a/bot/resources/tags/comparison.md b/bot/resources/tags/comparison.md new file mode 100644 index 000000000..12844bd2f --- /dev/null +++ b/bot/resources/tags/comparison.md @@ -0,0 +1,12 @@ +**Assignment vs. Comparison** + +The assignment operator (`=`) is used to assign variables. +```python +x = 5 +print(x) # Prints 5 +``` +The equality operator (`==`) is used to compare values. +```python +if x == 5: + print("The value of x is 5") +``` -- cgit v1.2.3 From c3a9e704080bfc670993b396d721ffd762348591 Mon Sep 17 00:00:00 2001 From: Bast Date: Mon, 1 Mar 2021 02:20:53 -0800 Subject: Add alias !u for !user --- bot/exts/info/information.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index 4499e4c25..88e904d03 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -202,7 +202,7 @@ class Information(Cog): await ctx.send(embed=embed) - @command(name="user", aliases=["user_info", "member", "member_info"]) + @command(name="user", aliases=["user_info", "member", "member_info", "u"]) async def user_info(self, ctx: Context, user: FetchedMember = None) -> None: """Returns info about a user.""" if user is None: -- cgit v1.2.3 From 5b31aa992db27cd1798e4dce5f1c4256aa8848fa Mon Sep 17 00:00:00 2001 From: Bast Date: Mon, 1 Mar 2021 02:22:59 -0800 Subject: Add alias !tban for !tempban --- bot/exts/moderation/infraction/infractions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py index 7349d65f2..406c6b53f 100644 --- a/bot/exts/moderation/infraction/infractions.py +++ b/bot/exts/moderation/infraction/infractions.py @@ -126,7 +126,7 @@ class Infractions(InfractionScheduler, commands.Cog): duration = await Duration().convert(ctx, "1h") await self.apply_mute(ctx, user, reason, expires_at=duration) - @command() + @command(aliases=("tban",)) async def tempban( self, ctx: Context, -- cgit v1.2.3 From 58c37361d0a322b308869492d50f2008ae497b3d Mon Sep 17 00:00:00 2001 From: Bast Date: Mon, 1 Mar 2021 02:23:46 -0800 Subject: Add !superstar and !unsuperstar aliases for !superstarify --- bot/exts/moderation/infraction/superstarify.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index ffc470c54..704dddf9c 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -104,7 +104,7 @@ class Superstarify(InfractionScheduler, Cog): await self.reapply_infraction(infraction, action) - @command(name="superstarify", aliases=("force_nick", "star", "starify")) + @command(name="superstarify", aliases=("force_nick", "star", "starify", "superstar")) async def superstarify( self, ctx: Context, @@ -183,7 +183,7 @@ class Superstarify(InfractionScheduler, Cog): ) await ctx.send(embed=embed) - @command(name="unsuperstarify", aliases=("release_nick", "unstar", "unstarify")) + @command(name="unsuperstarify", aliases=("release_nick", "unstar", "unstarify", "unsuperstar")) async def unsuperstarify(self, ctx: Context, member: Member) -> None: """Remove the superstarify infraction and allow the user to change their nickname.""" await self.pardon_infraction(ctx, "superstar", member) -- cgit v1.2.3 From dec9a9dba77aa4322f9dc37b6493a8410e7482ec Mon Sep 17 00:00:00 2001 From: Bast Date: Mon, 1 Mar 2021 02:38:41 -0800 Subject: Add !stban alias for !shadowtempban --- bot/exts/moderation/infraction/infractions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py index 406c6b53f..3b5b1df45 100644 --- a/bot/exts/moderation/infraction/infractions.py +++ b/bot/exts/moderation/infraction/infractions.py @@ -198,7 +198,7 @@ class Infractions(InfractionScheduler, commands.Cog): # endregion # region: Temporary shadow infractions - @command(hidden=True, aliases=["shadowtempban", "stempban"]) + @command(hidden=True, aliases=["shadowtempban", "stempban", "stban"]) async def shadow_tempban( self, ctx: Context, -- cgit v1.2.3 From 2293b9dc78d21a80043a9e9d24b9442caf7579df Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Mon, 1 Mar 2021 19:46:08 +0200 Subject: Change to handle specifically redis errors The idea to ignore alerts on fake redis didn't solve the problem completely, because sometimes you'll just develop with a real redis. It also didn't solve the ping we would get on first start up. After looking into it there seems like there's no actual reason to alert on key errors, as they should only happen if the cache gets wiped for some reason, which shouldn't happen, but in which case we have bigger issues. Alerts are therefore limited to connection errors raised by redis. This additionally handles only redis errors when writing to it as well. If any other error is raised it is ok for the function to stop at that point, as all variables have already been set. The only thing which doesn't get executed is the confirmation message and logging, the lack of which is an exception message in itself. --- bot/exts/moderation/defcon.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index aa6dc0790..3d3f0e81e 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -2,11 +2,11 @@ import asyncio import logging import traceback from collections import namedtuple -from contextlib import suppress from datetime import datetime from enum import Enum from typing import Optional, Union +from aioredis import RedisError from async_rediscache import RedisCache from dateutil.relativedelta import relativedelta from discord import Colour, Embed, Member, User @@ -14,7 +14,7 @@ from discord.ext import tasks from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot -from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_ROLES, Redis, Roles +from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_ROLES, Roles from bot.converters import DurationDelta, Expiry from bot.exts.moderation.modlog import ModLog from bot.utils.messages import format_user @@ -87,13 +87,12 @@ class Defcon(Cog): settings = await self.defcon_settings.to_dict() self.threshold = parse_duration_string(settings["threshold"]) if settings["threshold"] else None self.expiry = datetime.fromisoformat(settings["expiry"]) if settings["expiry"] else None - except Exception: + except RedisError: log.exception("Unable to get DEFCON settings!") - if not Redis.use_fakeredis: - await self.channel.send( - f"<@&{Roles.moderators}> <@&{Roles.devops}> **WARNING**: Unable to get DEFCON settings!" - f"\n\n```{traceback.format_exc()}```" - ) + await self.channel.send( + f"<@&{Roles.moderators}> <@&{Roles.devops}> **WARNING**: Unable to get DEFCON settings!" + f"\n\n```{traceback.format_exc()}```" + ) else: if self.expiry: @@ -210,14 +209,19 @@ class Defcon(Cog): if self.expiry is not None: self.scheduler.schedule_at(expiry, 0, self._remove_threshold()) - with suppress(Exception): + self._update_notifier() + + # Make sure to handle the critical part of the update before writing to Redis. + error = "" + try: await self.defcon_settings.update( { 'threshold': Defcon._stringify_relativedelta(self.threshold) if self.threshold else "", 'expiry': expiry.isoformat() if expiry else 0 } ) - self._update_notifier() + except RedisError: + error = ", but failed to write to cache" action = Action.DURATION_UPDATE @@ -234,7 +238,7 @@ class Defcon(Cog): channel_message = "removed" await self.channel.send( - f"{action.value.emoji} DEFCON threshold {channel_message}." + f"{action.value.emoji} DEFCON threshold {channel_message}{error}." ) await self._send_defcon_log(action, author) self._update_channel_topic() -- cgit v1.2.3 From b9d1de268fdaa67413e1ac4f24057cd6ecc9771d Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Mon, 1 Mar 2021 19:50:54 +0200 Subject: Provide default cache values when syncing --- bot/exts/moderation/defcon.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 3d3f0e81e..482ebe13b 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -85,8 +85,8 @@ class Defcon(Cog): try: settings = await self.defcon_settings.to_dict() - self.threshold = parse_duration_string(settings["threshold"]) if settings["threshold"] else None - self.expiry = datetime.fromisoformat(settings["expiry"]) if settings["expiry"] else None + self.threshold = parse_duration_string(settings["threshold"]) if settings.get("threshold") else None + self.expiry = datetime.fromisoformat(settings["expiry"]) if settings.get("expiry") else None except RedisError: log.exception("Unable to get DEFCON settings!") await self.channel.send( -- cgit v1.2.3 From ca3389eb2e2a796c5f757b37e5e2fa6f308c4dbf Mon Sep 17 00:00:00 2001 From: mbaruh Date: Tue, 2 Mar 2021 20:45:20 +0200 Subject: Improved docstring for threshold command. --- bot/exts/moderation/defcon.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 86dece518..02302612f 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -161,7 +161,14 @@ class Defcon(Cog): async def threshold( self, ctx: Context, threshold: Union[DurationDelta, int], expiry: Optional[Expiry] = None ) -> None: - """Set how old an account must be to join the server.""" + """ + Set how old an account must be to join the server. + + The threshold is the minimum required account age. Can accept either a duration string or a number of days. + Set it to 0 to have no threshold. + The expiry allows to automatically remove the threshold after a designated time. If no expiry is specified, + the cog will remind to remove the threshold hourly. + """ if isinstance(threshold, int): threshold = relativedelta(days=threshold) await self._update_threshold(ctx.author, threshold=threshold, expiry=expiry) -- cgit v1.2.3 From 54952d11339ce4e065f061064098e76a780d5644 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Wed, 3 Mar 2021 15:58:50 +0200 Subject: Add disable_header to watchchannel to disable talentpool headers We need to disable this, because new format of nominations don't match with it. --- bot/exts/moderation/watchchannels/_watchchannel.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py index f9fc12dc3..0793a66af 100644 --- a/bot/exts/moderation/watchchannels/_watchchannel.py +++ b/bot/exts/moderation/watchchannels/_watchchannel.py @@ -47,7 +47,9 @@ class WatchChannel(metaclass=CogABCMeta): webhook_id: int, api_endpoint: str, api_default_params: dict, - logger: logging.Logger + logger: logging.Logger, + *, + disable_header: bool = False ) -> None: self.bot = bot @@ -66,6 +68,7 @@ class WatchChannel(metaclass=CogABCMeta): self.channel = None self.webhook = None self.message_history = MessageHistory() + self.disable_header = disable_header self._start = self.bot.loop.create_task(self.start_watchchannel()) @@ -267,6 +270,9 @@ class WatchChannel(metaclass=CogABCMeta): async def send_header(self, msg: Message) -> None: """Sends a header embed with information about the relayed messages to the watch channel.""" + if self.disable_header: + return + user_id = msg.author.id guild = self.bot.get_guild(GuildConfig.id) -- cgit v1.2.3 From 86988b6bde5fcbbdf2445a5a4f2f1df68d5a7754 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Wed, 3 Mar 2021 16:11:09 +0200 Subject: Migrate talentpool to new schema - Add disable_header to watchchannel initialization. We don't have root actor field anymore, so headers give error and there is no point to rewrite this, because this will be removed soon. - Removed duplicates check of nominations of one user. Now as API allows this, multiple actors can nomination one user. - Add special error message if same actor have already nominated user Every actor can only have 1 nomination entry. - Remove previous reason from watch command We don't store reason that way anymore, and we don't want that this message spam whole channel. - Split end reason and reason editing commands. API PATCH request buildup have been changed, so changing both of them in one command don't make sense anymore. - Migrate nomination string generation --- bot/exts/moderation/watchchannels/talentpool.py | 86 +++++++++++++++++-------- 1 file changed, 59 insertions(+), 27 deletions(-) diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index dd3349c3a..1649d4d48 100644 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -28,6 +28,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): api_endpoint='bot/nominations', api_default_params={'active': 'true', 'ordering': '-inserted_at'}, logger=log, + disable_header=True, ) @group(name='talentpool', aliases=('tp', 'talent', 'nomination', 'n'), invoke_without_command=True) @@ -83,10 +84,6 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): await ctx.send(f":x: Failed to update the user cache; can't add {user}") return - if user.id in self.watched_users: - await ctx.send(f":x: {user} is already being watched in the talent pool") - return - # Manual request with `raise_for_status` as False because we want the actual response session = self.bot.api_client.session url = self.bot.api_client._url_for(self.api_endpoint) @@ -101,8 +98,12 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): async with session.post(url, **kwargs) as resp: response_data = await resp.json() - if resp.status == 400 and response_data.get('user', False): - await ctx.send(":x: The specified user can't be found in the database tables") + if resp.status == 400: + if response_data.get('user', False): + await ctx.send(":x: The specified user can't be found in the database tables") + elif response_data.get('actor', False): + await ctx.send(":x: You already have nominated this user") + return else: resp.raise_for_status() @@ -120,9 +121,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): ) if history: - total = f"({len(history)} previous nominations in total)" - start_reason = f"Watched: {textwrap.shorten(history[0]['reason'], width=500, placeholder='...')}" - msg += f"\n\nUser's previous watch reasons {total}:```{start_reason}```" + msg += f"\n\n{len(history)} previous nominations in total" await ctx.send(msg) @@ -176,13 +175,39 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): @nomination_edit_group.command(name='reason') @has_any_role(*MODERATION_ROLES) - async def edit_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None: - """ - Edits the reason/unnominate reason for the nomination with the given `id` depending on the status. + async def edit_reason_command(self, ctx: Context, nomination_id: int, actor: FetchedMember, *, reason: str) -> None: + """Edits the reason of `actor` entry for the nomination with the given `id`.""" + try: + nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") + except ResponseCodeError as e: + if e.response.status == 404: + self.log.trace(f"Nomination API 404: Can't nomination with id {nomination_id}") + await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") + return + else: + raise - If the nomination is active, the reason for nominating the user will be edited; - If the nomination is no longer active, the reason for ending the nomination will be edited instead. - """ + if not nomination["active"]: + await ctx.send(":x: Can't edit reason of ended nomination.") + return + + if not any(entry["actor"] == actor.id for entry in nomination["entries"]): + await ctx.send(f":x: {actor} don't have entry for this nomination.") + return + + self.log.trace(f"Changing reason for nomination with id {nomination_id} of actor {actor} to {reason}") + + await self.bot.api_client.patch( + f"{self.api_endpoint}/{nomination_id}", + json={"actor": actor.id, "reason": reason} + ) + await self.fetch_user_cache() # Update cache + await ctx.send(":white_check_mark: Successfully updates reason of nomination.") + + @nomination_edit_group.command(name='end_reason') + @has_any_role(*MODERATION_ROLES) + async def edit_end_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None: + """Edits the unnominate reason for the nomination with the given `id`.""" try: nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") except ResponseCodeError as e: @@ -193,16 +218,18 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): else: raise - field = "reason" if nomination["active"] else "end_reason" + if nomination["active"]: + await ctx.send(":x: Cannot edit end reason of active nomination.") + return - self.log.trace(f"Changing {field} for nomination with id {nomination_id} to {reason}") + self.log.trace(f"Changing end reason for nomination with id {nomination_id} to {reason}") await self.bot.api_client.patch( f"{self.api_endpoint}/{nomination_id}", - json={field: reason} + json={"end_reason": reason} ) await self.fetch_user_cache() # Update cache. - await ctx.send(f":white_check_mark: Updated the {field} of the nomination!") + await ctx.send(":white_check_mark: Updated the end reason of the nomination!") @Cog.listener() async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None: @@ -237,13 +264,18 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): def _nomination_to_string(self, nomination_object: dict) -> str: """Creates a string representation of a nomination.""" guild = self.bot.get_guild(Guild.id) + entries = [] + for site_entry in nomination_object["entries"]: + actor_id = site_entry["actor"] + actor = guild.get_member(actor_id) - actor_id = nomination_object["actor"] - actor = guild.get_member(actor_id) + reason = site_entry["reason"] or "*None*" + created = time.format_infraction(site_entry["inserted_at"]) + entries.append(f"Actor: {actor or actor_id}\nReason: {reason}\nCreated: {created}") - active = nomination_object["active"] + entries_string = "\n\n".join(entries) - reason = nomination_object["reason"] or "*None*" + active = nomination_object["active"] start_date = time.format_infraction(nomination_object["inserted_at"]) if active: @@ -252,9 +284,9 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): =============== Status: **Active** Date: {start_date} - Actor: {actor.mention if actor else actor_id} - Reason: {reason} Nomination ID: `{nomination_object["id"]}` + + {entries_string} =============== """ ) @@ -265,8 +297,8 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): =============== Status: Inactive Date: {start_date} - Actor: {actor.mention if actor else actor_id} - Reason: {reason} + + {entries_string} End date: {end_date} Unwatch reason: {nomination_object["end_reason"]} -- cgit v1.2.3 From 85b1d7751c3cf46c007a4194d984a1921684456b Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 3 Mar 2021 14:21:24 +0100 Subject: Use common check for early exit This introduces a possibly redundant check for the doc_item being None but results in flatter code with less duplication --- bot/exts/info/doc/_cog.py | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 95a772df3..0c255c449 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -237,18 +237,15 @@ class DocCog(commands.Cog): await self.refresh_event.wait() doc_item = self.doc_symbols.get(symbol_name) + if doc_item is None and " " in symbol_name: + # If an invalid symbol contains a space, check if the command was invoked + # in the format !d + symbol_name = symbol_name.split(" ", maxsplit=1)[0] + doc_item = self.doc_symbols.get(symbol_name) + if doc_item is None: - if symbol_name.count(" "): - # If an invalid symbol contains a space, check if the command was invoked - # in the format !d - symbol_name = symbol_name.split(" ", maxsplit=1)[0] - doc_item = self.doc_symbols.get(symbol_name) - if doc_item is None: - log.debug("Symbol does not exist.") - return None - else: - log.debug("Symbol does not exist.") - return None + log.debug("Symbol does not exist.") + return None self.bot.stats.incr(f"doc_fetches.{doc_item.package}") -- cgit v1.2.3 From d9d637930e486716f5143bcb1b64bc309e3c55eb Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 4 Mar 2021 19:07:13 +0100 Subject: Use deque instead of a list As we extend the left side of a list now, using a deque that provides a direct method for it is more fitting. fixup! Use deque instead of a list --- bot/exts/info/doc/_batch_parser.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 780fb16d9..95538f364 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -1,12 +1,13 @@ from __future__ import annotations import asyncio +import collections import logging import time from collections import defaultdict from contextlib import suppress from operator import attrgetter -from typing import Dict, List, NamedTuple, Union +from typing import Deque, Dict, List, NamedTuple, Union import discord from bs4 import BeautifulSoup @@ -88,7 +89,7 @@ class BatchParser: """ def __init__(self): - self._queue: List[QueueItem] = [] + self._queue: Deque[QueueItem] = collections.deque() self._page_doc_items: Dict[str, List[_cog.DocItem]] = defaultdict(list) self._item_futures: Dict[_cog.DocItem, ParseResultFuture] = {} self._parse_task = None @@ -118,7 +119,7 @@ class BatchParser: "lxml", ) - self._queue[:0] = (QueueItem(item, soup) for item in self._page_doc_items[doc_item.url]) + self._queue.extendleft(QueueItem(item, soup) for item in self._page_doc_items[doc_item.url]) log.debug(f"Added items from {doc_item.url} to parse queue.") if self._parse_task is None: @@ -126,7 +127,7 @@ class BatchParser: else: self._item_futures[doc_item].user_requested = True with suppress(ValueError): - # If the item is not in the list then the item is already parsed or is being parsed + # If the item is not in the queue then the item is already parsed or is being parsed self._move_to_front(doc_item) return await self._item_futures[doc_item] @@ -166,7 +167,8 @@ class BatchParser: # The parse queue stores soups along with the doc symbols in QueueItem objects, # in case we're moving a DocItem we have to get the associated QueueItem first and then move it. item_index = self._queue.index(item) - queue_item = self._queue.pop(item_index) + queue_item = self._queue[item_index] + del self._queue[item_index] self._queue.append(queue_item) log.trace(f"Moved {item} to the front of the queue.") -- cgit v1.2.3 From bd8323501712ed0fc313c502f3e0bd567c111328 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 4 Mar 2021 21:42:28 +0100 Subject: Move the seconds of a week timedelta into a constant --- bot/exts/info/doc/_redis_cache.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py index cab51c3f1..7de2f3806 100644 --- a/bot/exts/info/doc/_redis_cache.py +++ b/bot/exts/info/doc/_redis_cache.py @@ -7,6 +7,8 @@ from async_rediscache.types.base import RedisObject, namespace_lock if TYPE_CHECKING: from ._cog import DocItem +WEEK_SECONDS = datetime.timedelta(weeks=1).total_seconds() + class DocRedisCache(RedisObject): """Interface for redis functionality needed by the Doc cog.""" @@ -33,7 +35,7 @@ class DocRedisCache(RedisObject): await connection.hset(redis_key, item.symbol_id, value) if needs_expire: - await connection.expire(redis_key, datetime.timedelta(weeks=1).total_seconds()) + await connection.expire(redis_key, WEEK_SECONDS) @namespace_lock async def get(self, item: DocItem) -> Optional[str]: -- cgit v1.2.3 From f993a11c0461e57e853dfc0e296fc32dcfc2b265 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 4 Mar 2021 21:49:38 +0100 Subject: Typo and docstring style changes Co-authored-by: MarkKoz fixup! Docstring typos and style changes --- bot/converters.py | 2 +- bot/exts/info/doc/_batch_parser.py | 6 +++--- bot/exts/info/doc/_cog.py | 4 ++-- bot/exts/info/doc/_html.py | 8 ++++---- bot/utils/lock.py | 2 +- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/bot/converters.py b/bot/converters.py index be1f1329f..4fbf3c124 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -140,7 +140,7 @@ class PackageName(Converter): async def convert(cls, ctx: Context, argument: str) -> str: """Checks whether the given string is a valid package name.""" if cls.PACKAGE_NAME_RE.search(argument): - raise BadArgument("The provided package name is not valid; please only use the _, 0-9 and a-z characters.") + raise BadArgument("The provided package name is not valid; please only use the _, 0-9, and a-z characters.") return argument diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 95538f364..45ca17e5e 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -46,7 +46,7 @@ class StaleInventoryNotifier: class QueueItem(NamedTuple): - """Contains a doc_item and the BeautifulSoup object needed to parse it.""" + """Contains a `DocItem` and the `BeautifulSoup` object needed to parse it.""" doc_item: _cog.DocItem soup: BeautifulSoup @@ -120,7 +120,7 @@ class BatchParser: ) self._queue.extendleft(QueueItem(item, soup) for item in self._page_doc_items[doc_item.url]) - log.debug(f"Added items from {doc_item.url} to parse queue.") + log.debug(f"Added items from {doc_item.url} to the parse queue.") if self._parse_task is None: self._parse_task = asyncio.create_task(self._parse_queue()) @@ -181,7 +181,7 @@ class BatchParser: """ Clear all internal symbol data. - All currently requested items are waited to be parsed before clearing. + Wait for all user-requested symbols to be parsed before clearing the parser. """ for future in filter(attrgetter("user_requested"), self._item_futures.values()): await future diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 0c255c449..8300f11d1 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -83,10 +83,10 @@ class DocCog(commands.Cog): Build the inventory for a single package. Where: - * `package_name` is the package name to use, appears in the log + * `package_name` is the package name to use in logs and when qualifying symbols * `base_url` is the root documentation URL for the specified package, used to build absolute paths that link to specific symbols - * `package` are the InventoryDict contents of a intersphinx inventory. + * `package` is the content of a intersphinx inventory. """ self.base_urls[api_package_name] = base_url diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py index 701684b88..334b82e98 100644 --- a/bot/exts/info/doc/_html.py +++ b/bot/exts/info/doc/_html.py @@ -54,7 +54,7 @@ def _find_elements_until_tag( limit: int = None, ) -> List[Union[Tag, NavigableString]]: """ - Get all elements up to `limit` or until a tag matching `tag_filter` is found. + Get all elements up to `limit` or until a tag matching `end_tag_filter` is found. `end_tag_filter` can be either a container of string names to check against, or a filtering callable that's applied to tags. @@ -86,7 +86,7 @@ _find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=Beaut def _class_filter_factory(class_names: Iterable[str]) -> Callable[[Tag], bool]: - """Create callable that returns True when the passed in tag's class is in `class_names` or when it's is a table.""" + """Create callable that returns True when the passed in tag's class is in `class_names` or when it's a table.""" def match_tag(tag: Tag) -> bool: for attr in class_names: if attr in tag.get("class", ()): @@ -100,8 +100,8 @@ def get_general_description(start_element: Tag) -> List[Union[Tag, NavigableStri """ Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. - A headerlink tag is attempted to be found to skip repeating the symbol information in the description, - if it's found it's used as the tag to start the search from instead of the `start_element`. + A headerlink tag is attempted to be found to skip repeating the symbol information in the description. + If it's found it's used as the tag to start the search from instead of the `start_element`. """ child_tags = _find_recursive_children_until_tag(start_element, _class_filter_factory(["section"]), limit=100) header = next(filter(_class_filter_factory(["headerlink"]), child_tags), None) diff --git a/bot/utils/lock.py b/bot/utils/lock.py index b4c93f063..ec6f92cd4 100644 --- a/bot/utils/lock.py +++ b/bot/utils/lock.py @@ -23,7 +23,7 @@ class SharedEvent: """ Context manager managing an internal event exposed through the wait coro. - While any code is executing in this context manager, the underyling event will not be set; + While any code is executing in this context manager, the underlying event will not be set; when all of the holders finish the event will be set. """ -- cgit v1.2.3 From 8c2aa1de81fc55c9e33312a086b98faf0a8cab47 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 00:08:11 +0100 Subject: Do not set redis results in get_symbol_embed The redis results are already being set in the BatchParser for all symbols --- bot/exts/info/doc/_cog.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 8300f11d1..5f9366228 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -255,9 +255,7 @@ class DocCog(commands.Cog): if markdown is None: log.debug(f"Redis cache miss for symbol `{symbol_name}`.") markdown = await self.item_fetcher.get_markdown(doc_item) - if markdown is not None: - await doc_cache.set(doc_item, markdown) - else: + if markdown is None: markdown = "Unable to parse the requested symbol." embed = discord.Embed( -- cgit v1.2.3 From ed750b03efa792205b1e624e49dd318cda9d1312 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 00:28:23 +0100 Subject: Set the result of the future instead of an exception when avaialble --- bot/exts/info/doc/_batch_parser.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 45ca17e5e..f5e16a60b 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -141,6 +141,7 @@ class BatchParser: try: while self._queue: item, soup = self._queue.pop() + markdown = None try: if (future := self._item_futures[item]).done(): # Some items are present in the inventories multiple times under different symbol names, @@ -154,7 +155,10 @@ class BatchParser: scheduling.create_task(self.stale_inventory_notifier.send_warning(item)) except Exception as e: log.exception(f"Unexpected error when handling {item}") - future.set_exception(e) + if markdown is not None: + future.set_result(markdown) + else: + future.set_exception(e) else: future.set_result(markdown) await asyncio.sleep(0.1) -- cgit v1.2.3 From fdafa7423596d8a11b5c25a7f6a9ab47ed3ce6b6 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 01:08:43 +0100 Subject: Do _item_futures cleanup in _parse_queue instead of a concurrent task The doc_cache coro was changed to be awaited directly instead of creating a task to ensure the cache is populated before the item is deleted --- bot/exts/info/doc/_batch_parser.py | 36 ++++-------------------------------- bot/exts/info/doc/_cog.py | 1 - 2 files changed, 4 insertions(+), 33 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index f5e16a60b..d80b62d88 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio import collections import logging -import time from collections import defaultdict from contextlib import suppress from operator import attrgetter @@ -63,20 +62,11 @@ class ParseResultFuture(asyncio.Future): `user_requested` is set by the parser when a Future is requested by an user and moved to the front, allowing the futures to only be waited for when clearing if they were user requested. - - `result_set_time` provides the time at which the future's result has been set, - or -inf if the result hasn't been set yet """ def __init__(self): super().__init__() self.user_requested = False - self.result_set_time = float("inf") - - def set_result(self, result: str, /) -> None: - """Set `self.result_set_time` to current time when the result is set.""" - self.result_set_time = time.time() - super().set_result(result) class BatchParser: @@ -91,11 +81,9 @@ class BatchParser: def __init__(self): self._queue: Deque[QueueItem] = collections.deque() self._page_doc_items: Dict[str, List[_cog.DocItem]] = defaultdict(list) - self._item_futures: Dict[_cog.DocItem, ParseResultFuture] = {} + self._item_futures: Dict[_cog.DocItem, ParseResultFuture] = defaultdict(ParseResultFuture) self._parse_task = None - self.cleanup_futures_task = bot.instance.loop.create_task(self._clean_up_futures()) - self.stale_inventory_notifier = StaleInventoryNotifier() async def get_markdown(self, doc_item: _cog.DocItem) -> str: @@ -107,8 +95,7 @@ class BatchParser: Not safe to run while `self.clear` is running. """ - if doc_item not in self._item_futures: - self._item_futures.update((item, ParseResultFuture()) for item in self._page_doc_items[doc_item.url]) + if doc_item not in self._item_futures and doc_item not in self._queue: self._item_futures[doc_item].user_requested = True async with bot.instance.http_session.get(doc_item.url) as response: @@ -150,7 +137,7 @@ class BatchParser: markdown = await bot.instance.loop.run_in_executor(None, get_symbol_markdown, soup, item) if markdown is not None: - scheduling.create_task(doc_cache.set(item, markdown)) + await doc_cache.set(item, markdown) else: scheduling.create_task(self.stale_inventory_notifier.send_warning(item)) except Exception as e: @@ -161,6 +148,7 @@ class BatchParser: future.set_exception(e) else: future.set_result(markdown) + del self._item_futures[item] await asyncio.sleep(0.1) finally: self._parse_task = None @@ -194,19 +182,3 @@ class BatchParser: self._queue.clear() self._page_doc_items.clear() self._item_futures.clear() - - async def _clean_up_futures(self) -> None: - """ - Clear old futures from internal results. - - After a future is set, we only need to wait for old requests to its associated `DocItem` to finish - as all new requests will get the value from the redis cache in the cog first. - Keeping them around for longer than a second is unnecessary and keeps the parsed Markdown strings alive. - """ - while True: - if not self._queue: - current_time = time.time() - for key, future in self._item_futures.copy().items(): - if current_time - future.result_set_time > 5: - del self._item_futures[key] - await asyncio.sleep(5) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 5f9366228..80f85d625 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -413,6 +413,5 @@ class DocCog(commands.Cog): def cog_unload(self) -> None: """Clear scheduled inventories, queued symbols and cleanup task on cog unload.""" self.inventory_scheduler.cancel_all() - self.item_fetcher.cleanup_futures_task.cancel() self.init_refresh_task.cancel() asyncio.create_task(self.item_fetcher.clear()) -- cgit v1.2.3 From 0b8dab1840ba4f14b41f18a88d0fd870dfeec7fe Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 01:19:16 +0100 Subject: Add comments for purpose of DocItem attributes --- bot/exts/info/doc/_cog.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 80f85d625..fd8ed2008 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -42,11 +42,11 @@ COMMAND_LOCK_SINGLETON = "inventory refresh" class DocItem(NamedTuple): """Holds inventory symbol information.""" - package: str - group: str - base_url: str - relative_url_path: str - symbol_id: str + package: str # Name of the package name the symbol is from + group: str # Interpshinx "role" of the symbol, for example `label` or `method` + base_url: str # Absolute path to to which the relative path resolves, same for all items with the same package + relative_url_path: str # Relative path to the page where the symbol is located + symbol_id: str # Fragment id used to locate the symbol on the page @property def url(self) -> str: -- cgit v1.2.3 From 8e556bd52b62881de594b6d73365aa0b0498c766 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 01:23:44 +0100 Subject: Use clearer branching Co-authored-by: MarkKoz --- bot/exts/info/doc/_parsing.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index b422b4f24..7549efeac 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -211,12 +211,10 @@ def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag] ) description = _WHITESPACE_AFTER_NEWLINES_RE.sub('', description) if signatures is not None: - formatted_markdown = "".join(f"```py\n{signature}```" for signature in _truncate_signatures(signatures)) + signature = "".join(f"```py\n{signature}```" for signature in _truncate_signatures(signatures)) + return f"{signature}\n{description}" else: - formatted_markdown = "" - formatted_markdown += f"\n{description}" - - return formatted_markdown + return description def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[str]: -- cgit v1.2.3 From 398bbdd2080934ef643d5fc98db6358f28fec051 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 01:26:09 +0100 Subject: Remove placeholder in shorten call --- bot/exts/info/doc/_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 7549efeac..b1b09ccc7 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -187,7 +187,7 @@ def _get_truncated_description( possible_truncation_indices = [cut for cut in markdown_element_ends if cut < truncate_index] if not possible_truncation_indices: # In case there is no Markdown element ending before the truncation index, use shorten as a fallback. - truncated_result = textwrap.shorten(result, truncate_index) + truncated_result = textwrap.shorten(result, truncate_index, placeholder="") else: # Truncate at the last Markdown element that comes before the truncation index. markdown_truncate_index = possible_truncation_indices[-1] -- cgit v1.2.3 From c2c0dc2a8caced134422c005d010e7dd10cf7466 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 01:32:06 +0100 Subject: Account for ellipses when determining the truncation description index --- bot/exts/info/doc/_parsing.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index b1b09ccc7..43e78ddca 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -170,14 +170,14 @@ def _get_truncated_description( if not markdown_element_ends: return "" - # Determine the "hard" truncation index. + # Determine the "hard" truncation index. Account for the ellipsis placeholder for the max length. newline_truncate_index = find_nth_occurrence(result, "\n", max_lines) - if newline_truncate_index is not None and newline_truncate_index < _MAX_DESCRIPTION_LENGTH: + if newline_truncate_index is not None and newline_truncate_index < _MAX_DESCRIPTION_LENGTH - 3: # Truncate based on maximum lines if there are more than the maximum number of lines. truncate_index = newline_truncate_index else: # There are less than the maximum number of lines; truncate based on the max char length. - truncate_index = _MAX_DESCRIPTION_LENGTH + truncate_index = _MAX_DESCRIPTION_LENGTH - 3 # Nothing needs to be truncated if the last element ends before the truncation index. if truncate_index >= markdown_element_ends[-1]: -- cgit v1.2.3 From 9c28041dcfb33b273823ef6d5fec3abbe3f1a4c8 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 01:37:26 +0100 Subject: Add comments to the parsing module Co-authored-by: MarkKoz --- bot/exts/info/doc/_parsing.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 43e78ddca..e7b8b695b 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -49,7 +49,7 @@ _BRACKET_PAIRS = { def _is_closing_quote(search_string: str, index: int) -> bool: """Check whether the quote at `index` inside `search_string` can be a closing quote.""" if search_string[index - 1] != "\\": - return True + return True # The quote is not escaped. elif search_string[index - 2] == "\\": return True return False @@ -69,7 +69,7 @@ def _split_parameters(parameters_string: str) -> Iterator[str]: for index, character in enumerated_string: if character in {"'", '"'}: # Skip everything inside of strings, regardless of the depth. - quote_character = character + quote_character = character # The closing quote must equal the opening quote. for index, character in enumerated_string: if character == quote_character and _is_closing_quote(parameters_string, index): break @@ -103,6 +103,7 @@ def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collec A maximum of `_MAX_SIGNATURE_AMOUNT` signatures is assumed to be passed. """ if sum(len(signature) for signature in signatures) <= _MAX_SIGNATURES_LENGTH: + # Total length of signatures is under the length limit; no truncation needed. return signatures max_signature_length = _EMBED_CODE_BLOCK_LINE_LENGTH * (MAX_SIGNATURE_AMOUNT + 1 - len(signatures)) @@ -111,6 +112,7 @@ def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collec signature = signature.strip() if len(signature) > max_signature_length: if (parameters_match := _PARAMETERS_RE.search(signature)) is None: + # The signature has no parameters or the regex failed; perform a simple truncation of the text. formatted_signatures.append(textwrap.shorten(signature, max_signature_length)) continue @@ -118,14 +120,17 @@ def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collec parameters_string = parameters_match[1] running_length = len(signature) - len(parameters_string) for parameter in _split_parameters(parameters_string): + # Check if including this parameter would still be within the maximum length. if (len(parameter) + running_length) <= max_signature_length - 5: # account for comma and placeholder truncated_signature.append(parameter) running_length += len(parameter) + 1 else: + # There's no more room for this parameter. Truncate the parameter list and put it in the signature. truncated_signature.append(" ...") formatted_signatures.append(signature.replace(parameters_string, ",".join(truncated_signature))) break else: + # The current signature is under the length limit; no truncation needed. formatted_signatures.append(signature) return formatted_signatures @@ -144,7 +149,7 @@ def _get_truncated_description( with the real string length limited to `_MAX_DESCRIPTION_LENGTH` to accommodate discord length limits. """ result = "" - markdown_element_ends = [] + markdown_element_ends = [] # Stores indices into `result` which point to the end boundary of each Markdown element. rendered_length = 0 tag_end_index = 0 -- cgit v1.2.3 From 4c423a8d97035e9b7f67413f63b0241b027cd1fc Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 01:37:58 +0100 Subject: Use placeholder consistent with others in the cog --- bot/exts/info/doc/_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index e7b8b695b..fc38ff82a 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -113,7 +113,7 @@ def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collec if len(signature) > max_signature_length: if (parameters_match := _PARAMETERS_RE.search(signature)) is None: # The signature has no parameters or the regex failed; perform a simple truncation of the text. - formatted_signatures.append(textwrap.shorten(signature, max_signature_length)) + formatted_signatures.append(textwrap.shorten(signature, max_signature_length, placeholder="...")) continue truncated_signature = [] -- cgit v1.2.3 From 33d6df2eae9d235db3405966e3f55db970582632 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 01:47:58 +0100 Subject: Explain use of various containers in the cog Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index fd8ed2008..cedd31f55 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -58,10 +58,13 @@ class DocCog(commands.Cog): """A set of commands for querying & displaying documentation.""" def __init__(self, bot: Bot): + # Contains URLs to documentation home pages. + # Used to calculate inventory diffs on refreshes and to display all currently stored inventories. self.base_urls = {} self.bot = bot - self.doc_symbols: Dict[str, DocItem] = {} + self.doc_symbols: Dict[str, DocItem] = {} # Maps symbol names to objects containing their metadata. self.item_fetcher = _batch_parser.BatchParser() + # Maps a conflicting symbol name to a list of the new, disambiguated names created from conflicts with the name. self.renamed_symbols = defaultdict(list) self.inventory_scheduler = Scheduler(self.__class__.__name__) -- cgit v1.2.3 From e811070c0909f596e1767ee955b302b5f60a16d8 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 01:48:29 +0100 Subject: Rename params to clearer and more concise alternatives --- bot/exts/info/doc/_cog.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index cedd31f55..8dcc1eff3 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -81,7 +81,7 @@ class DocCog(commands.Cog): await self.bot.wait_until_guild_available() await self.refresh_inventories() - def update_single(self, api_package_name: str, base_url: str, package: InventoryDict) -> None: + def update_single(self, package_name: str, base_url: str, inventory: InventoryDict) -> None: """ Build the inventory for a single package. @@ -91,16 +91,16 @@ class DocCog(commands.Cog): absolute paths that link to specific symbols * `package` is the content of a intersphinx inventory. """ - self.base_urls[api_package_name] = base_url + self.base_urls[package_name] = base_url - for group, items in package.items(): + for group, items in inventory.items(): for symbol_name, relative_doc_url in items: # e.g. get 'class' from 'py:class' group_name = group.split(":")[1] if (original_item := self.doc_symbols.get(symbol_name)) is not None: replaced_symbol_name = self.ensure_unique_symbol_name( - api_package_name, + package_name, group_name, original_item, symbol_name, @@ -111,7 +111,7 @@ class DocCog(commands.Cog): relative_url_path, _, symbol_id = relative_doc_url.partition("#") # Intern fields that have shared content so we're not storing unique strings for every object doc_item = DocItem( - api_package_name, + package_name, sys.intern(group_name), base_url, sys.intern(relative_url_path), @@ -120,7 +120,7 @@ class DocCog(commands.Cog): self.doc_symbols[symbol_name] = doc_item self.item_fetcher.add_item(doc_item) - log.trace(f"Fetched inventory for {api_package_name}.") + log.trace(f"Fetched inventory for {package_name}.") async def update_or_reschedule_inventory( self, -- cgit v1.2.3 From 3d4df68eb875a0e7042be387bb50561b917d1e40 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 02:09:14 +0100 Subject: Move future assignment and check outside of the try No exceptions can be raised from the two lines of code because of the data structures used, moving it out makes for flatter code. --- bot/exts/info/doc/_batch_parser.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index d80b62d88..a626008d2 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -129,12 +129,13 @@ class BatchParser: while self._queue: item, soup = self._queue.pop() markdown = None - try: - if (future := self._item_futures[item]).done(): - # Some items are present in the inventories multiple times under different symbol names, - # if we already parsed an equal item, we can just skip it. - continue + if (future := self._item_futures[item]).done(): + # Some items are present in the inventories multiple times under different symbol names, + # if we already parsed an equal item, we can just skip it. + continue + + try: markdown = await bot.instance.loop.run_in_executor(None, get_symbol_markdown, soup, item) if markdown is not None: await doc_cache.set(item, markdown) -- cgit v1.2.3 From 218455259e9d520bcf3b48c3d8d57b1924f31cc9 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 02:22:17 +0100 Subject: Correct typehint --- bot/exts/info/doc/_batch_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index a626008d2..d88b32208 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -6,7 +6,7 @@ import logging from collections import defaultdict from contextlib import suppress from operator import attrgetter -from typing import Deque, Dict, List, NamedTuple, Union +from typing import Deque, Dict, List, NamedTuple, Optional, Union import discord from bs4 import BeautifulSoup @@ -86,7 +86,7 @@ class BatchParser: self.stale_inventory_notifier = StaleInventoryNotifier() - async def get_markdown(self, doc_item: _cog.DocItem) -> str: + async def get_markdown(self, doc_item: _cog.DocItem) -> Optional[str]: """ Get the result Markdown of `doc_item`. -- cgit v1.2.3 From 64e5ba42675f0940995d75d2a3340791acd260c2 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 02:27:35 +0100 Subject: Set future result to None on exceptions We can still provide th user with at least the link to the docs, for which we already have handling in the cog with a generic "unable to parse message", using exceptions for that would mean setting it here, immediately catching it and then providing the same or very similar message. --- bot/exts/info/doc/_batch_parser.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index d88b32208..a809fed78 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -141,14 +141,9 @@ class BatchParser: await doc_cache.set(item, markdown) else: scheduling.create_task(self.stale_inventory_notifier.send_warning(item)) - except Exception as e: + except Exception: log.exception(f"Unexpected error when handling {item}") - if markdown is not None: - future.set_result(markdown) - else: - future.set_exception(e) - else: - future.set_result(markdown) + future.set_result(markdown) del self._item_futures[item] await asyncio.sleep(0.1) finally: -- cgit v1.2.3 From c3a516ce6d69e774c3a0d441b0ca2b4a1af774be Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 02:31:32 +0100 Subject: Add comment explaining purpose of create_task over await --- bot/exts/info/doc/_batch_parser.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index a809fed78..da0984a91 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -140,6 +140,7 @@ class BatchParser: if markdown is not None: await doc_cache.set(item, markdown) else: + # Don't wait for this coro as the parsing doesn't depend on anything it does. scheduling.create_task(self.stale_inventory_notifier.send_warning(item)) except Exception: log.exception(f"Unexpected error when handling {item}") -- cgit v1.2.3 From af3c1140c99058e6681f26e8f72b973935df7ad8 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 02:32:07 +0100 Subject: Use scheduling's create_task --- bot/exts/info/doc/_batch_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index da0984a91..f56f4e283 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -110,7 +110,7 @@ class BatchParser: log.debug(f"Added items from {doc_item.url} to the parse queue.") if self._parse_task is None: - self._parse_task = asyncio.create_task(self._parse_queue()) + self._parse_task = scheduling.create_task(self._parse_queue()) else: self._item_futures[doc_item].user_requested = True with suppress(ValueError): -- cgit v1.2.3 From 7f1f47104eaa7ad7ca38ecad846f32b6567060d0 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 02:36:40 +0100 Subject: Name tasks --- bot/exts/info/doc/_batch_parser.py | 11 ++++++++--- bot/exts/info/doc/_cog.py | 7 +++++-- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index f56f4e283..369bb462c 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -24,7 +24,10 @@ class StaleInventoryNotifier: """Handle sending notifications about stale inventories through `DocItem`s to dev log.""" def __init__(self): - self._init_task = bot.instance.loop.create_task(self._init_channel()) + self._init_task = bot.instance.loop.create_task( + self._init_channel(), + name="StaleInventoryNotifier channel init" + ) self._warned_urls = set() async def _init_channel(self) -> None: @@ -110,7 +113,7 @@ class BatchParser: log.debug(f"Added items from {doc_item.url} to the parse queue.") if self._parse_task is None: - self._parse_task = scheduling.create_task(self._parse_queue()) + self._parse_task = scheduling.create_task(self._parse_queue(), name="Queue parse") else: self._item_futures[doc_item].user_requested = True with suppress(ValueError): @@ -141,7 +144,9 @@ class BatchParser: await doc_cache.set(item, markdown) else: # Don't wait for this coro as the parsing doesn't depend on anything it does. - scheduling.create_task(self.stale_inventory_notifier.send_warning(item)) + scheduling.create_task( + self.stale_inventory_notifier.send_warning(item), name="Stale inventory warning" + ) except Exception: log.exception(f"Unexpected error when handling {item}") future.set_result(markdown) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 8dcc1eff3..60f6d8eea 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -73,7 +73,10 @@ class DocCog(commands.Cog): self.refresh_event.set() self.symbol_get_event = SharedEvent() - self.init_refresh_task = self.bot.loop.create_task(self.init_refresh_inventory()) + self.init_refresh_task = self.bot.loop.create_task( + self.init_refresh_inventory(), + name="Doc inventory init" + ) @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) async def init_refresh_inventory(self) -> None: @@ -417,4 +420,4 @@ class DocCog(commands.Cog): """Clear scheduled inventories, queued symbols and cleanup task on cog unload.""" self.inventory_scheduler.cancel_all() self.init_refresh_task.cancel() - asyncio.create_task(self.item_fetcher.clear()) + asyncio.create_task(self.item_fetcher.clear(), name="DocCog.item_fetcher unload clear") -- cgit v1.2.3 From 150cb3371040e0fefbe24702ca80ce2808014f6f Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 02:39:14 +0100 Subject: Rename markup_hint to Markup --- bot/exts/info/doc/_html.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py index 334b82e98..94efd81b7 100644 --- a/bot/exts/info/doc/_html.py +++ b/bot/exts/info/doc/_html.py @@ -33,9 +33,9 @@ class Strainer(SoupStrainer): log.warning("`text` is not a supported kwarg in the custom strainer.") super().__init__(**kwargs) - markup_hint = Union[PageElement, List["markup_hint"]] + Markup = Union[PageElement, List["Markup"]] - def search(self, markup: markup_hint) -> Union[PageElement, str]: + def search(self, markup: Markup) -> Union[PageElement, str]: """Extend default SoupStrainer behaviour to allow matching both `Tag`s` and `NavigableString`s.""" if isinstance(markup, str): # Let everything through the text filter if we're including strings and tags. -- cgit v1.2.3 From f7b56c533df7bf8c520f5cf69df5bf6dd62cf2dc Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 02:44:25 +0100 Subject: Clarify the use of _set_expires and needs_expire --- bot/exts/info/doc/_redis_cache.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py index 7de2f3806..ad764816f 100644 --- a/bot/exts/info/doc/_redis_cache.py +++ b/bot/exts/info/doc/_redis_cache.py @@ -30,6 +30,9 @@ class DocRedisCache(RedisObject): with await self._get_pool_connection() as connection: if redis_key not in self._set_expires: + # An expire is only set if the key didn't exist before. + # If this is the first time setting values for this key check if it exists and add it to + # `_set_expires` to prevent redundant checks for subsequent uses with items from the same page. self._set_expires.add(redis_key) needs_expire = not await connection.exists(redis_key) -- cgit v1.2.3 From 74cbe44625a1e6e2e39f77b2663794d3ab5aaf58 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 5 Mar 2021 18:22:46 +0100 Subject: Correct tests cases The tests were not adjusted after the converter was corrected to accept digits --- tests/bot/test_converters.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/bot/test_converters.py b/tests/bot/test_converters.py index 231798a92..4af84dde5 100644 --- a/tests/bot/test_converters.py +++ b/tests/bot/test_converters.py @@ -80,7 +80,7 @@ class ConverterTests(unittest.IsolatedAsyncioTestCase): async def test_package_name_for_valid(self): """PackageName returns valid package names unchanged.""" - test_values = ('foo', 'le_mon') + test_values = ('foo', 'le_mon', 'num83r') for name in test_values: with self.subTest(identifier=name): @@ -89,7 +89,7 @@ class ConverterTests(unittest.IsolatedAsyncioTestCase): async def test_package_name_for_invalid(self): """PackageName raises the proper exception for invalid package names.""" - test_values = ('text_with_a_dot.', 'UpperCaseName', "num83r") + test_values = ('text_with_a_dot.', 'UpperCaseName', 'dashed-name') for name in test_values: with self.subTest(identifier=name): -- cgit v1.2.3 From 4f5f284d3eec46b9209d19142d5c21456c4c403a Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 6 Mar 2021 03:46:25 +0100 Subject: Abstract logic from create_symbol_embed into additional methods The method was also renamed from get_symbol_embed to create_symbol_embed --- bot/exts/info/doc/_cog.py | 54 +++++++++++++++++++++++++++++++---------------- 1 file changed, 36 insertions(+), 18 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 60f6d8eea..64e204fad 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -229,19 +229,13 @@ class DocCog(commands.Cog): log.debug("Finished inventory refresh.") self.refresh_event.set() - async def get_symbol_embed(self, symbol_name: str) -> Optional[discord.Embed]: + def get_symbol_item(self, symbol_name: str) -> Optional[DocItem]: """ - Attempt to scrape and fetch the data for the given `symbol_name`, and build an embed from its contents. - - If the symbol is known, an Embed with documentation about it is returned. + Get the `DocItem` associated with `symbol_name` from the `doc_symbols` dict. - First check the DocRedisCache before querying the cog's `BatchParser`. + If the doc item is not found directly from the name and the name contains a space, + the first word of the name will be attempted to be used to get the item. """ - log.trace(f"Building embed for symbol `{symbol_name}`") - if not self.refresh_event.is_set(): - log.debug("Waiting for inventories to be refreshed before processing item.") - await self.refresh_event.wait() - doc_item = self.doc_symbols.get(symbol_name) if doc_item is None and " " in symbol_name: # If an invalid symbol contains a space, check if the command was invoked @@ -249,25 +243,49 @@ class DocCog(commands.Cog): symbol_name = symbol_name.split(" ", maxsplit=1)[0] doc_item = self.doc_symbols.get(symbol_name) - if doc_item is None: - log.debug("Symbol does not exist.") - return None + return doc_item - self.bot.stats.incr(f"doc_fetches.{doc_item.package}") + async def get_symbol_markdown(self, doc_item: DocItem) -> str: + """ + Get the Markdown from the symbol `doc_item` refers to. + First a redis lookup is attempted, if that fails the `item_fetcher` + is used to fetch the page and parse the HTML from it into Markdown. + """ with self.symbol_get_event: markdown = await doc_cache.get(doc_item) if markdown is None: - log.debug(f"Redis cache miss for symbol `{symbol_name}`.") + log.debug(f"Redis cache miss with {doc_item}.") markdown = await self.item_fetcher.get_markdown(doc_item) if markdown is None: - markdown = "Unable to parse the requested symbol." + return "Unable to parse the requested symbol." + return markdown + + async def create_symbol_embed(self, symbol_name: str) -> Optional[discord.Embed]: + """ + Attempt to scrape and fetch the data for the given `symbol_name`, and build an embed from its contents. + + If the symbol is known, an Embed with documentation about it is returned. + + First check the DocRedisCache before querying the cog's `BatchParser`. + """ + log.trace(f"Building embed for symbol `{symbol_name}`") + if not self.refresh_event.is_set(): + log.debug("Waiting for inventories to be refreshed before processing item.") + await self.refresh_event.wait() + + doc_item = self.get_symbol_item(symbol_name) + if doc_item is None: + log.debug("Symbol does not exist.") + return None + + self.bot.stats.incr(f"doc_fetches.{doc_item.package}") embed = discord.Embed( title=discord.utils.escape_markdown(symbol_name), url=f"{doc_item.url}#{doc_item.symbol_id}", - description=markdown + description=await self.get_symbol_markdown(doc_item) ) # Show all symbols with the same name that were renamed in the footer, # with a max of 100 chars. @@ -314,7 +332,7 @@ class DocCog(commands.Cog): else: symbol = symbol_name.strip("`") async with ctx.typing(): - doc_embed = await self.get_symbol_embed(symbol) + doc_embed = await self.create_symbol_embed(symbol) if doc_embed is None: error_message = await send_denial(ctx, "No documentation found for the requested symbol.") -- cgit v1.2.3 From 7d596f5d8fb454f00288b0a6fbd60789c5dd17be Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 6 Mar 2021 03:47:21 +0100 Subject: Create the footer text before an inventory refresh can occur --- bot/exts/info/doc/_cog.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 64e204fad..c01e0f36a 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -282,11 +282,6 @@ class DocCog(commands.Cog): self.bot.stats.incr(f"doc_fetches.{doc_item.package}") - embed = discord.Embed( - title=discord.utils.escape_markdown(symbol_name), - url=f"{doc_item.url}#{doc_item.symbol_id}", - description=await self.get_symbol_markdown(doc_item) - ) # Show all symbols with the same name that were renamed in the footer, # with a max of 100 chars. if symbol_name in self.renamed_symbols: @@ -294,6 +289,12 @@ class DocCog(commands.Cog): footer_text = textwrap.shorten("Moved: " + renamed_symbols, 100, placeholder=' ...') else: footer_text = "" + + embed = discord.Embed( + title=discord.utils.escape_markdown(symbol_name), + url=f"{doc_item.url}#{doc_item.symbol_id}", + description=await self.get_symbol_markdown(doc_item) + ) embed.set_footer(text=footer_text) return embed -- cgit v1.2.3 From 51a11cc4b1ff9a4de0dfa33490ae7fceec96423d Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 6 Mar 2021 03:56:18 +0100 Subject: Handle unexpected errors when requesting markdown --- bot/exts/info/doc/_cog.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index c01e0f36a..0334f6001 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -9,6 +9,7 @@ from contextlib import suppress from types import SimpleNamespace from typing import Dict, NamedTuple, Optional, Union +import aiohttp import discord from discord.ext import commands @@ -257,7 +258,17 @@ class DocCog(commands.Cog): if markdown is None: log.debug(f"Redis cache miss with {doc_item}.") - markdown = await self.item_fetcher.get_markdown(doc_item) + try: + markdown = await self.item_fetcher.get_markdown(doc_item) + + except aiohttp.ClientError as e: + log.warning(f"A network error has occurred when requesting parsing of {doc_item}.", exc_info=e) + return "Unable to parse the requested symbol due to a network error." + + except Exception: + log.exception(f"An unexpected error has occurred when requesting parsing of {doc_item}.") + return "Unable to parse the requested symbol due to an error." + if markdown is None: return "Unable to parse the requested symbol." return markdown -- cgit v1.2.3 From e9bdccf7d51107691fab2f25573241a2e524d32a Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Sat, 6 Mar 2021 11:59:22 +0000 Subject: Add JSON logging dependencies --- Pipfile | 1 + Pipfile.lock | 260 +++++++++++++++++++++++++++++++++++++---------------------- 2 files changed, 163 insertions(+), 98 deletions(-) diff --git a/Pipfile b/Pipfile index 0a94fb888..f92d9ea68 100644 --- a/Pipfile +++ b/Pipfile @@ -28,6 +28,7 @@ sphinx = "~=2.2" statsd = "~=3.3" arrow = "~=0.17" emoji = "~=0.6" +python-json-logger = "*" [dev-packages] coverage = "~=5.0" diff --git a/Pipfile.lock b/Pipfile.lock index f8cedb08f..f19f91ce8 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "228ae55fe5700ac3827ba6b661933b60b1d06f44fea8bcbe8c5a769fa10ab2fd" + "sha256": "a55fa167f9581360b1258a6564ab9ae68f9c56dd9e7997f9c5a2f102be28c69c" }, "pipfile-spec": 6, "requires": { @@ -18,11 +18,11 @@ "default": { "aio-pika": { "hashes": [ - "sha256:9773440a89840941ac3099a7720bf9d51e8764a484066b82ede4d395660ff430", - "sha256:a8065be3c722eb8f9fff8c0e7590729e7782202cdb9363d9830d7d5d47b45c7c" + "sha256:1d4305a5f78af3857310b4fe48348cdcf6c097e0e275ea88c2cd08570531a369", + "sha256:e69afef8695f47c5d107bbdba21bdb845d5c249acb3be53ef5c2d497b02657c0" ], "index": "pypi", - "version": "==6.7.1" + "version": "==6.8.0" }, "aiodns": { "hashes": [ @@ -96,6 +96,7 @@ "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573", "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e" ], + "markers": "python_version >= '3.6'", "version": "==3.3.1" }, "alabaster": { @@ -122,6 +123,7 @@ "sha256:c25e4fff73f64d20645254783c3224a4c49e083e3fab67c44f17af944c5e26af" ], "index": "pypi", + "markers": "python_version ~= '3.7'", "version": "==0.1.4" }, "async-timeout": { @@ -129,6 +131,7 @@ "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f", "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3" ], + "markers": "python_full_version >= '3.5.3'", "version": "==3.0.1" }, "attrs": { @@ -136,6 +139,7 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "babel": { @@ -143,6 +147,7 @@ "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.9.0" }, "beautifulsoup4": { @@ -215,7 +220,6 @@ "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], - "index": "pypi", "markers": "sys_platform == 'win32'", "version": "==0.4.4" }, @@ -248,6 +252,7 @@ "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, "emoji": { @@ -330,6 +335,7 @@ "sha256:e64be68255234bb489a574c4f2f8df7029c98c81ec4d160d6cd836e7f0679390", "sha256:e82d6b930e02e80e5109b678c663a9ed210680ded81c1abaf54635d88d1da298" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.0" }, "humanfriendly": { @@ -337,6 +343,7 @@ "sha256:066562956639ab21ff2676d1fda0b5987e985c534fc76700a19bd54bcb81121d", "sha256:d5c731705114b9ad673754f3317d9fa4c23212f36b29bdc4272a892eafc9bc72" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==9.1" }, "idna": { @@ -344,6 +351,7 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "imagesize": { @@ -351,6 +359,7 @@ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, "jinja2": { @@ -358,6 +367,7 @@ "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.11.3" }, "lxml": { @@ -466,15 +476,16 @@ "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "more-itertools": { "hashes": [ - "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330", - "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf" + "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced", + "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713" ], "index": "pypi", - "version": "==8.6.0" + "version": "==8.7.0" }, "multidict": { "hashes": [ @@ -516,12 +527,14 @@ "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281", "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80" ], + "markers": "python_version >= '3.6'", "version": "==5.1.0" }, "ordered-set": { "hashes": [ "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95" ], + "markers": "python_version >= '3.5'", "version": "==4.0.2" }, "packaging": { @@ -529,6 +542,7 @@ "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.9" }, "pamqp": { @@ -577,6 +591,7 @@ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pygments": { @@ -584,6 +599,7 @@ "sha256:37a13ba168a02ac54cc5891a42b1caec333e59b66addb7fa633ea8a6d73445c0", "sha256:b21b072d0ccdf29297a82a2363359d99623597b8a265b8081760e4d0f7153c88" ], + "markers": "python_version >= '3.5'", "version": "==2.8.0" }, "pyparsing": { @@ -591,6 +607,7 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "python-dateutil": { @@ -601,6 +618,13 @@ "index": "pypi", "version": "==2.8.1" }, + "python-json-logger": { + "hashes": [ + "sha256:f26eea7898db40609563bed0a7ca11af12e2a79858632706d835a0f961b7d398" + ], + "index": "pypi", + "version": "==2.0.1" + }, "pytz": { "hashes": [ "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da", @@ -610,28 +634,37 @@ }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" ], "index": "pypi", - "version": "==5.3.1" + "version": "==5.4.1" }, "redis": { "hashes": [ "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2", "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.5.3" }, "requests": { @@ -644,17 +677,18 @@ }, "sentry-sdk": { "hashes": [ - "sha256:0a711ec952441c2ec89b8f5d226c33bc697914f46e876b44a4edd3e7864cf4d0", - "sha256:737a094e49a529dd0fdcaafa9e97cf7c3d5eb964bd229821d640bc77f3502b3f" + "sha256:4ae8d1ced6c67f1c8ea51d82a16721c166c489b76876c9f2c202b8a50334b237", + "sha256:e75c8c58932bda8cd293ea8e4b242527129e1caaec91433d21b8b2f20fee030b" ], "index": "pypi", - "version": "==0.19.5" + "version": "==0.20.3" }, "six": { "hashes": [ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -692,6 +726,7 @@ "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" ], + "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-devhelp": { @@ -699,6 +734,7 @@ "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" ], + "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-htmlhelp": { @@ -706,6 +742,7 @@ "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], + "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-jsmath": { @@ -713,6 +750,7 @@ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], + "markers": "python_version >= '3.5'", "version": "==1.0.1" }, "sphinxcontrib-qthelp": { @@ -720,6 +758,7 @@ "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" ], + "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-serializinghtml": { @@ -727,6 +766,7 @@ "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" ], + "markers": "python_version >= '3.5'", "version": "==1.1.4" }, "statsd": { @@ -750,6 +790,7 @@ "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.26.3" }, "yarl": { @@ -792,6 +833,7 @@ "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a", "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71" ], + "markers": "python_version >= '3.6'", "version": "==1.6.3" } }, @@ -808,6 +850,7 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "certifi": { @@ -822,6 +865,7 @@ "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d", "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1" ], + "markers": "python_full_version >= '3.6.1'", "version": "==3.2.0" }, "chardet": { @@ -833,58 +877,61 @@ }, "coverage": { "hashes": [ - "sha256:08b3ba72bd981531fd557f67beee376d6700fba183b167857038997ba30dd297", - "sha256:2757fa64e11ec12220968f65d086b7a29b6583d16e9a544c889b22ba98555ef1", - "sha256:3102bb2c206700a7d28181dbe04d66b30780cde1d1c02c5f3c165cf3d2489497", - "sha256:3498b27d8236057def41de3585f317abae235dd3a11d33e01736ffedb2ef8606", - "sha256:378ac77af41350a8c6b8801a66021b52da8a05fd77e578b7380e876c0ce4f528", - "sha256:38f16b1317b8dd82df67ed5daa5f5e7c959e46579840d77a67a4ceb9cef0a50b", - "sha256:3911c2ef96e5ddc748a3c8b4702c61986628bb719b8378bf1e4a6184bbd48fe4", - "sha256:3a3c3f8863255f3c31db3889f8055989527173ef6192a283eb6f4db3c579d830", - "sha256:3b14b1da110ea50c8bcbadc3b82c3933974dbeea1832e814aab93ca1163cd4c1", - "sha256:535dc1e6e68fad5355f9984d5637c33badbdc987b0c0d303ee95a6c979c9516f", - "sha256:6f61319e33222591f885c598e3e24f6a4be3533c1d70c19e0dc59e83a71ce27d", - "sha256:723d22d324e7997a651478e9c5a3120a0ecbc9a7e94071f7e1954562a8806cf3", - "sha256:76b2775dda7e78680d688daabcb485dc87cf5e3184a0b3e012e1d40e38527cc8", - "sha256:782a5c7df9f91979a7a21792e09b34a658058896628217ae6362088b123c8500", - "sha256:7e4d159021c2029b958b2363abec4a11db0ce8cd43abb0d9ce44284cb97217e7", - "sha256:8dacc4073c359f40fcf73aede8428c35f84639baad7e1b46fce5ab7a8a7be4bb", - "sha256:8f33d1156241c43755137288dea619105477961cfa7e47f48dbf96bc2c30720b", - "sha256:8ffd4b204d7de77b5dd558cdff986a8274796a1e57813ed005b33fd97e29f059", - "sha256:93a280c9eb736a0dcca19296f3c30c720cb41a71b1f9e617f341f0a8e791a69b", - "sha256:9a4f66259bdd6964d8cf26142733c81fb562252db74ea367d9beb4f815478e72", - "sha256:9a9d4ff06804920388aab69c5ea8a77525cf165356db70131616acd269e19b36", - "sha256:a2070c5affdb3a5e751f24208c5c4f3d5f008fa04d28731416e023c93b275277", - "sha256:a4857f7e2bc6921dbd487c5c88b84f5633de3e7d416c4dc0bb70256775551a6c", - "sha256:a607ae05b6c96057ba86c811d9c43423f35e03874ffb03fbdcd45e0637e8b631", - "sha256:a66ca3bdf21c653e47f726ca57f46ba7fc1f260ad99ba783acc3e58e3ebdb9ff", - "sha256:ab110c48bc3d97b4d19af41865e14531f300b482da21783fdaacd159251890e8", - "sha256:b239711e774c8eb910e9b1ac719f02f5ae4bf35fa0420f438cdc3a7e4e7dd6ec", - "sha256:be0416074d7f253865bb67630cf7210cbc14eb05f4099cc0f82430135aaa7a3b", - "sha256:c46643970dff9f5c976c6512fd35768c4a3819f01f61169d8cdac3f9290903b7", - "sha256:c5ec71fd4a43b6d84ddb88c1df94572479d9a26ef3f150cef3dacefecf888105", - "sha256:c6e5174f8ca585755988bc278c8bb5d02d9dc2e971591ef4a1baabdf2d99589b", - "sha256:c89b558f8a9a5a6f2cfc923c304d49f0ce629c3bd85cb442ca258ec20366394c", - "sha256:cc44e3545d908ecf3e5773266c487ad1877be718d9dc65fc7eb6e7d14960985b", - "sha256:cc6f8246e74dd210d7e2b56c76ceaba1cc52b025cd75dbe96eb48791e0250e98", - "sha256:cd556c79ad665faeae28020a0ab3bda6cd47d94bec48e36970719b0b86e4dcf4", - "sha256:ce6f3a147b4b1a8b09aae48517ae91139b1b010c5f36423fa2b866a8b23df879", - "sha256:ceb499d2b3d1d7b7ba23abe8bf26df5f06ba8c71127f188333dddcf356b4b63f", - "sha256:cef06fb382557f66d81d804230c11ab292d94b840b3cb7bf4450778377b592f4", - "sha256:e448f56cfeae7b1b3b5bcd99bb377cde7c4eb1970a525c770720a352bc4c8044", - "sha256:e52d3d95df81c8f6b2a1685aabffadf2d2d9ad97203a40f8d61e51b70f191e4e", - "sha256:ee2f1d1c223c3d2c24e3afbb2dd38be3f03b1a8d6a83ee3d9eb8c36a52bee899", - "sha256:f2c6888eada180814b8583c3e793f3f343a692fc802546eed45f40a001b1169f", - "sha256:f51dbba78d68a44e99d484ca8c8f604f17e957c1ca09c3ebc2c7e3bbd9ba0448", - "sha256:f54de00baf200b4539a5a092a759f000b5f45fd226d6d25a76b0dff71177a714", - "sha256:fa10fee7e32213f5c7b0d6428ea92e3a3fdd6d725590238a3f92c0de1c78b9d2", - "sha256:fabeeb121735d47d8eab8671b6b031ce08514c86b7ad8f7d5490a7b6dcd6267d", - "sha256:fac3c432851038b3e6afe086f777732bcf7f6ebbfd90951fa04ee53db6d0bcdd", - "sha256:fda29412a66099af6d6de0baa6bd7c52674de177ec2ad2630ca264142d69c6c7", - "sha256:ff1330e8bc996570221b450e2d539134baa9465f5cb98aff0e0f73f34172e0ae" + "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c", + "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6", + "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45", + "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a", + "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03", + "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529", + "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a", + "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a", + "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2", + "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6", + "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759", + "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53", + "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a", + "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4", + "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff", + "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502", + "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793", + "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb", + "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905", + "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821", + "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b", + "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81", + "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0", + "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b", + "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3", + "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184", + "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701", + "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a", + "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82", + "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638", + "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5", + "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083", + "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6", + "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90", + "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465", + "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a", + "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3", + "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e", + "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066", + "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf", + "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b", + "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae", + "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669", + "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873", + "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b", + "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6", + "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb", + "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160", + "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c", + "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079", + "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d", + "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6" ], "index": "pypi", - "version": "==5.3.1" + "version": "==5.5" }, "coveralls": { "hashes": [ @@ -924,11 +971,11 @@ }, "flake8-annotations": { "hashes": [ - "sha256:3a377140556aecf11fa9f3bb18c10db01f5ea56dc79a730e2ec9b4f1f49e2055", - "sha256:e17947a48a5b9f632fe0c72682fc797c385e451048e7dfb20139f448a074cb3e" + "sha256:8968ff12f296433028ad561c680ccc03a7cd62576d100c3f1475e058b3c11b43", + "sha256:bd0505616c0d85ebb45c6052d339c69f320d3f87fa079ab4e91a4f234a863d05" ], "index": "pypi", - "version": "==2.5.0" + "version": "==2.6.0" }, "flake8-bugbear": { "hashes": [ @@ -986,16 +1033,18 @@ }, "identify": { "hashes": [ - "sha256:de7129142a5c86d75a52b96f394d94d96d497881d2aaf8eafe320cdbe8ac4bcc", - "sha256:e0dae57c0397629ce13c289f6ddde0204edf518f557bfdb1e56474aa143e77c3" + "sha256:2179e7359471ab55729f201b3fdf7dc2778e221f868410fedcb0987b791ba552", + "sha256:2a5fdf2f5319cc357eda2550bea713a404392495961022cf2462624ce62f0f46" ], - "version": "==1.5.14" + "markers": "python_full_version >= '3.6.1'", + "version": "==2.1.0" }, "idna": { "hashes": [ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "mccabe": { @@ -1022,17 +1071,18 @@ }, "pre-commit": { "hashes": [ - "sha256:6c86d977d00ddc8a60d68eec19f51ef212d9462937acf3ea37c7adec32284ac0", - "sha256:ee784c11953e6d8badb97d19bc46b997a3a9eded849881ec587accd8608d74a4" + "sha256:16212d1fde2bed88159287da88ff03796863854b04dc9f838a55979325a3d20e", + "sha256:399baf78f13f4de82a29b649afd74bef2c4e28eb4f021661fc7f29246e8c7a3a" ], "index": "pypi", - "version": "==2.9.3" + "version": "==2.10.1" }, "pycodestyle": { "hashes": [ "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.6.0" }, "pydocstyle": { @@ -1040,6 +1090,7 @@ "sha256:19b86fa8617ed916776a11cd8bc0197e5b9856d5433b777f51a3defe13075325", "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678" ], + "markers": "python_version >= '3.5'", "version": "==5.1.1" }, "pyflakes": { @@ -1047,26 +1098,35 @@ "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.2.0" }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" ], "index": "pypi", - "version": "==5.3.1" + "version": "==5.4.1" }, "requests": { "hashes": [ @@ -1081,6 +1141,7 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -1095,6 +1156,7 @@ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.2" }, "urllib3": { @@ -1102,6 +1164,7 @@ "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.26.3" }, "virtualenv": { @@ -1109,6 +1172,7 @@ "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d", "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.4.2" } } -- cgit v1.2.3 From fd7a693a4ed4268ab4823142c21e9c85973a3d4f Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Sat, 6 Mar 2021 12:08:07 +0000 Subject: Use JSON logging when debug mode is disabled --- bot/log.py | 49 +++++++++++++++++++++++++++++++++---------------- 1 file changed, 33 insertions(+), 16 deletions(-) diff --git a/bot/log.py b/bot/log.py index e92233a33..bc3bba0af 100644 --- a/bot/log.py +++ b/bot/log.py @@ -1,11 +1,12 @@ import logging import os import sys -from logging import Logger, handlers +from logging import Logger, StreamHandler, handlers from pathlib import Path import coloredlogs import sentry_sdk +from pythonjsonlogger import jsonlogger from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.redis import RedisIntegration @@ -13,6 +14,15 @@ from bot import constants TRACE_LEVEL = 5 +PROD_FIELDS = [ + "asctime", + "name", + "levelname", + "message", + "funcName", + "filename" +] + def setup() -> None: """Set up loggers.""" @@ -33,21 +43,28 @@ def setup() -> None: root_log.setLevel(log_level) root_log.addHandler(file_handler) - if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: - coloredlogs.DEFAULT_LEVEL_STYLES = { - **coloredlogs.DEFAULT_LEVEL_STYLES, - "trace": {"color": 246}, - "critical": {"background": "red"}, - "debug": coloredlogs.DEFAULT_LEVEL_STYLES["info"] - } - - if "COLOREDLOGS_LOG_FORMAT" not in os.environ: - coloredlogs.DEFAULT_LOG_FORMAT = format_string - - if "COLOREDLOGS_LOG_LEVEL" not in os.environ: - coloredlogs.DEFAULT_LOG_LEVEL = log_level - - coloredlogs.install(logger=root_log, stream=sys.stdout) + if constants.DEBUG_MODE: + if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: + coloredlogs.DEFAULT_LEVEL_STYLES = { + **coloredlogs.DEFAULT_LEVEL_STYLES, + "trace": {"color": 246}, + "critical": {"background": "red"}, + "debug": coloredlogs.DEFAULT_LEVEL_STYLES["info"] + } + + if "COLOREDLOGS_LOG_FORMAT" not in os.environ: + coloredlogs.DEFAULT_LOG_FORMAT = format_string + + if "COLOREDLOGS_LOG_LEVEL" not in os.environ: + coloredlogs.DEFAULT_LOG_LEVEL = log_level + + coloredlogs.install(logger=root_log, stream=sys.stdout) + else: + json_format = " ".join([f"%({field})s" for field in PROD_FIELDS]) + stream_handler = StreamHandler() + formatter = jsonlogger.JsonFormatter(json_format) + stream_handler.setFormatter(formatter) + root_log.addHandler(stream_handler) logging.getLogger("discord").setLevel(logging.WARNING) logging.getLogger("websockets").setLevel(logging.WARNING) -- cgit v1.2.3 From b9141ea4def9868fb0f17476bcd4e4a6742c0afd Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 6 Mar 2021 14:15:11 +0200 Subject: Add parentheses back to previous nominations count Co-authored-by: Boris Muratov <8bee278@gmail.com> --- bot/exts/moderation/watchchannels/talentpool.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index 1649d4d48..11c629f1e 100644 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -121,7 +121,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): ) if history: - msg += f"\n\n{len(history)} previous nominations in total" + msg += f"\n\n({len(history)} previous nominations in total)" await ctx.send(msg) -- cgit v1.2.3 From 4532b405a590c5a45cffb90d48ff238f1e1cf7d4 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 6 Mar 2021 14:32:31 +0200 Subject: Fix trace logging of nomination 404 --- bot/exts/moderation/watchchannels/talentpool.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index 11c629f1e..e5414b0c9 100644 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -181,7 +181,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") except ResponseCodeError as e: if e.response.status == 404: - self.log.trace(f"Nomination API 404: Can't nomination with id {nomination_id}") + self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") return else: @@ -212,7 +212,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") except ResponseCodeError as e: if e.response.status == 404: - self.log.trace(f"Nomination API 404: Can't nomination with id {nomination_id}") + self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") return else: -- cgit v1.2.3 From ce5fb702639fa013c608f5e53059722aee68f6b8 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 6 Mar 2021 14:35:19 +0200 Subject: Fix grammar of nomination cog Co-authored-by: Boris Muratov <8bee278@gmail.com> --- bot/exts/moderation/watchchannels/talentpool.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index e5414b0c9..938720cc0 100644 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -102,7 +102,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): if response_data.get('user', False): await ctx.send(":x: The specified user can't be found in the database tables") elif response_data.get('actor', False): - await ctx.send(":x: You already have nominated this user") + await ctx.send(":x: You have already nominated this user") return else: @@ -176,7 +176,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): @nomination_edit_group.command(name='reason') @has_any_role(*MODERATION_ROLES) async def edit_reason_command(self, ctx: Context, nomination_id: int, actor: FetchedMember, *, reason: str) -> None: - """Edits the reason of `actor` entry for the nomination with the given `id`.""" + """Edits the reason of a specific nominator in a specific active nomination.""" try: nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") except ResponseCodeError as e: @@ -188,21 +188,21 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): raise if not nomination["active"]: - await ctx.send(":x: Can't edit reason of ended nomination.") + await ctx.send(":x: Can't edit the reason of an inactive nomination.") return if not any(entry["actor"] == actor.id for entry in nomination["entries"]): - await ctx.send(f":x: {actor} don't have entry for this nomination.") + await ctx.send(f":x: {actor} doesn't have an entry in this nomination.") return - self.log.trace(f"Changing reason for nomination with id {nomination_id} of actor {actor} to {reason}") + self.log.trace(f"Changing reason for nomination with id {nomination_id} of actor {actor} to {repr(reason)}") await self.bot.api_client.patch( f"{self.api_endpoint}/{nomination_id}", json={"actor": actor.id, "reason": reason} ) await self.fetch_user_cache() # Update cache - await ctx.send(":white_check_mark: Successfully updates reason of nomination.") + await ctx.send(":white_check_mark: Successfully updated nomination reason.") @nomination_edit_group.command(name='end_reason') @has_any_role(*MODERATION_ROLES) @@ -219,10 +219,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): raise if nomination["active"]: - await ctx.send(":x: Cannot edit end reason of active nomination.") + await ctx.send(":x: Can't edit the end reason of an active nomination.") return - self.log.trace(f"Changing end reason for nomination with id {nomination_id} to {reason}") + self.log.trace(f"Changing end reason for nomination with id {nomination_id} to {repr(reason)}") await self.bot.api_client.patch( f"{self.api_endpoint}/{nomination_id}", -- cgit v1.2.3 From 4f7a9fb9af2f4eac803a7b1b597ce5e0091f4210 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 6 Mar 2021 14:36:15 +0200 Subject: Use actor mention instead of username in nomination string --- bot/exts/moderation/watchchannels/talentpool.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index 938720cc0..55c41a754 100644 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -271,7 +271,9 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): reason = site_entry["reason"] or "*None*" created = time.format_infraction(site_entry["inserted_at"]) - entries.append(f"Actor: {actor or actor_id}\nReason: {reason}\nCreated: {created}") + entries.append( + f"Actor: {actor.mention if actor else actor_id}\nReason: {reason}\nCreated: {created}" + ) entries_string = "\n\n".join(entries) -- cgit v1.2.3 From 477956d44b331e57949d860eb3bfe985007a3b18 Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Sat, 6 Mar 2021 13:13:39 +0000 Subject: Version lock JSON logger --- Pipfile | 2 +- Pipfile.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Pipfile b/Pipfile index f92d9ea68..024aa6eff 100644 --- a/Pipfile +++ b/Pipfile @@ -28,7 +28,7 @@ sphinx = "~=2.2" statsd = "~=3.3" arrow = "~=0.17" emoji = "~=0.6" -python-json-logger = "*" +python-json-logger = "~=2.0" [dev-packages] coverage = "~=5.0" diff --git a/Pipfile.lock b/Pipfile.lock index f19f91ce8..dc7f6f21f 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "a55fa167f9581360b1258a6564ab9ae68f9c56dd9e7997f9c5a2f102be28c69c" + "sha256": "81ca9d1891e71de1c3f71958f082e1a8cad71e5b3ca425dc561d0ae74664fdb0" }, "pipfile-spec": 6, "requires": { -- cgit v1.2.3 From a98ecbfd2e3446ca9a17566220c41235d1328fcb Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Sat, 6 Mar 2021 17:44:34 +0200 Subject: Filtering hotfix Bug caused by an outdated function signature in a previous commit in the #1402 PR --- bot/exts/filters/filtering.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index 4093ba4ad..946bbf2c3 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -240,7 +240,13 @@ class Filtering(Cog): # We also do not need to worry about filters that take the full message, # since all we have is an arbitrary string. if _filter["enabled"] and _filter["content_only"]: - match, reason = await _filter["function"](result) + filter_result = await _filter["function"](result) + reason = None + + if isinstance(filter_result, tuple): + match, reason = filter_result + else: + match = filter_result if match: # If this is a filter (not a watchlist), we set the variable so we know -- cgit v1.2.3 From 8f1ef21bc2e140d39dabb16de8fbbd6077805a25 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Sat, 6 Mar 2021 17:47:16 +0200 Subject: Remove trailing whitespace --- bot/exts/filters/filtering.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index 946bbf2c3..c90b18dcb 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -242,7 +242,7 @@ class Filtering(Cog): if _filter["enabled"] and _filter["content_only"]: filter_result = await _filter["function"](result) reason = None - + if isinstance(filter_result, tuple): match, reason = filter_result else: -- cgit v1.2.3 From c53bff5771ded98b4ffc5c50fdd1634056889b07 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sat, 6 Mar 2021 17:22:00 +0100 Subject: Remove superfluous comment After the move to a separate method, the docstring now documents the behaviour so a comment is unnecessary --- bot/exts/info/doc/_cog.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 0334f6001..fb45d0bbb 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -239,8 +239,6 @@ class DocCog(commands.Cog): """ doc_item = self.doc_symbols.get(symbol_name) if doc_item is None and " " in symbol_name: - # If an invalid symbol contains a space, check if the command was invoked - # in the format !d symbol_name = symbol_name.split(" ", maxsplit=1)[0] doc_item = self.doc_symbols.get(symbol_name) -- cgit v1.2.3 From 96a369cf0922f3839c20c0c4c62f9fafb8f8ba9f Mon Sep 17 00:00:00 2001 From: Steele Farnsworth <32915757+swfarnsworth@users.noreply.github.com> Date: Sat, 6 Mar 2021 16:27:21 -0500 Subject: Made multiline concatenated string conform to a certain style. That style is not currently enforced by the linter. Co-authored-by: Matteo Bertucci --- bot/exts/moderation/infraction/_utils.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/infraction/_utils.py b/bot/exts/moderation/infraction/_utils.py index e58c2b22f..a98b4828b 100644 --- a/bot/exts/moderation/infraction/_utils.py +++ b/bot/exts/moderation/infraction/_utils.py @@ -32,8 +32,10 @@ APPEAL_EMAIL = "appeals@pythondiscord.com" INFRACTION_TITLE = "Please review our rules" INFRACTION_APPEAL_EMAIL_FOOTER = f"To appeal this infraction, send an e-mail to {APPEAL_EMAIL}" -INFRACTION_APPEAL_MODMAIL_FOOTER = ('If you would like to discuss or appeal this infraction, ' - 'send a message to the ModMail bot') +INFRACTION_APPEAL_MODMAIL_FOOTER = ( + 'If you would like to discuss or appeal this infraction, ' + 'send a message to the ModMail bot' +) INFRACTION_AUTHOR_NAME = "Infraction information" INFRACTION_DESCRIPTION_TEMPLATE = ( -- cgit v1.2.3 From dc7eef432189aaaf0ea8b0d16588852306104957 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Sun, 7 Mar 2021 05:18:02 +0100 Subject: Handle arbitrary amount of backslashes preceding the quote char Tests for this were added additionally --- bot/exts/info/doc/_parsing.py | 19 ++++++++----------- tests/bot/exts/info/doc/test_parsing.py | 7 +++++++ 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index fc38ff82a..57c991ae0 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -46,15 +46,6 @@ _BRACKET_PAIRS = { } -def _is_closing_quote(search_string: str, index: int) -> bool: - """Check whether the quote at `index` inside `search_string` can be a closing quote.""" - if search_string[index - 1] != "\\": - return True # The quote is not escaped. - elif search_string[index - 2] == "\\": - return True - return False - - def _split_parameters(parameters_string: str) -> Iterator[str]: """ Split parameters of a signature into individual parameter strings on commas. @@ -70,9 +61,15 @@ def _split_parameters(parameters_string: str) -> Iterator[str]: if character in {"'", '"'}: # Skip everything inside of strings, regardless of the depth. quote_character = character # The closing quote must equal the opening quote. - for index, character in enumerated_string: - if character == quote_character and _is_closing_quote(parameters_string, index): + preceding_backslashes = 0 + for _, character in enumerated_string: + # If an odd number of backslashes precedes the quote, it was escaped. + if character == quote_character and not preceding_backslashes % 2: break + if character == "\\": + preceding_backslashes += 1 + else: + preceding_backslashes = 0 elif current_search is None: if (current_search := _BRACKET_PAIRS.get(character)) is not None: diff --git a/tests/bot/exts/info/doc/test_parsing.py b/tests/bot/exts/info/doc/test_parsing.py index f302b38fc..1663d8491 100644 --- a/tests/bot/exts/info/doc/test_parsing.py +++ b/tests/bot/exts/info/doc/test_parsing.py @@ -42,6 +42,13 @@ class SignatureSplitter(TestCase): ) self._run_tests(test_cases) + def test_quote_escaped(self): + test_cases = ( + (r"'\',','\\',0", [r"'\','", r"'\\'", "0"]), + (r"'0\',0\\\'\\',0", [r"'0\',0\\\'\\'", "0"]), + ) + self._run_tests(test_cases) + def test_real_signatures(self): test_cases = ( ("start, stop[, step]", ["start", " stop[, step]"]), -- cgit v1.2.3 From fa016c096ef249ea1b8d722633882a09535e9c44 Mon Sep 17 00:00:00 2001 From: xithrius Date: Thu, 11 Feb 2021 01:51:40 -0800 Subject: Added filter. --- bot/exts/info/pypi.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/bot/exts/info/pypi.py b/bot/exts/info/pypi.py index 3e326e8bb..8fe249c8a 100644 --- a/bot/exts/info/pypi.py +++ b/bot/exts/info/pypi.py @@ -1,6 +1,7 @@ import itertools import logging import random +import re from discord import Embed from discord.ext.commands import Cog, Context, command @@ -12,8 +13,11 @@ from bot.constants import Colours, NEGATIVE_REPLIES URL = "https://pypi.org/pypi/{package}/json" FIELDS = ("author", "requires_python", "summary", "license") PYPI_ICON = "https://cdn.discordapp.com/emojis/766274397257334814.png" + PYPI_COLOURS = itertools.cycle((Colours.yellow, Colours.blue, Colours.white)) +ILLEGAL_CHARACTERS = re.compile(r"[^a-zA-Z0-9-.]+") + log = logging.getLogger(__name__) @@ -32,6 +36,11 @@ class PyPi(Cog): ) embed.set_thumbnail(url=PYPI_ICON) + if (character := re.search(ILLEGAL_CHARACTERS, package)) is not None: + embed.description = f"Illegal character passed into command: '{escape_markdown(character.group(0))}'" + await ctx.send(embed=embed) + return + async with self.bot.http_session.get(URL.format(package=package)) as response: if response.status == 404: embed.description = "Package could not be found." -- cgit v1.2.3 From 46762fc2ca2a3d05045e758c51f9a7633c17744d Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 7 Mar 2021 13:07:01 +0100 Subject: Pipenv: add 'python-frontmatter' & re-lock --- Pipfile | 1 + Pipfile.lock | 106 ++++++++++++++++++++++++++++++++--------------------------- 2 files changed, 59 insertions(+), 48 deletions(-) diff --git a/Pipfile b/Pipfile index 024aa6eff..e222a2108 100644 --- a/Pipfile +++ b/Pipfile @@ -21,6 +21,7 @@ lxml = "~=4.4" markdownify = "==0.5.3" more_itertools = "~=8.2" python-dateutil = "~=2.8" +python-frontmatter = "~=0.5.0" pyyaml = "~=5.1" requests = "~=2.22" sentry-sdk = "~=0.19" diff --git a/Pipfile.lock b/Pipfile.lock index dc7f6f21f..01a78af9b 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "81ca9d1891e71de1c3f71958f082e1a8cad71e5b3ca425dc561d0ae74664fdb0" + "sha256": "e8b1d8e8a3b258f482c25fe396aaa3255c749fdeae26770fccd7ce1a35f41180" }, "pipfile-spec": 6, "requires": { @@ -34,46 +34,46 @@ }, "aiohttp": { "hashes": [ - "sha256:119feb2bd551e58d83d1b38bfa4cb921af8ddedec9fad7183132db334c3133e0", - "sha256:16d0683ef8a6d803207f02b899c928223eb219111bd52420ef3d7a8aa76227b6", - "sha256:2eb3efe243e0f4ecbb654b08444ae6ffab37ac0ef8f69d3a2ffb958905379daf", - "sha256:2ffea7904e70350da429568113ae422c88d2234ae776519549513c8f217f58a9", - "sha256:40bd1b101b71a18a528ffce812cc14ff77d4a2a1272dfb8b11b200967489ef3e", - "sha256:418597633b5cd9639e514b1d748f358832c08cd5d9ef0870026535bd5eaefdd0", - "sha256:481d4b96969fbfdcc3ff35eea5305d8565a8300410d3d269ccac69e7256b1329", - "sha256:4c1bdbfdd231a20eee3e56bd0ac1cd88c4ff41b64ab679ed65b75c9c74b6c5c2", - "sha256:5563ad7fde451b1986d42b9bb9140e2599ecf4f8e42241f6da0d3d624b776f40", - "sha256:58c62152c4c8731a3152e7e650b29ace18304d086cb5552d317a54ff2749d32a", - "sha256:5b50e0b9460100fe05d7472264d1975f21ac007b35dcd6fd50279b72925a27f4", - "sha256:5d84ecc73141d0a0d61ece0742bb7ff5751b0657dab8405f899d3ceb104cc7de", - "sha256:5dde6d24bacac480be03f4f864e9a67faac5032e28841b00533cd168ab39cad9", - "sha256:5e91e927003d1ed9283dee9abcb989334fc8e72cf89ebe94dc3e07e3ff0b11e9", - "sha256:62bc216eafac3204877241569209d9ba6226185aa6d561c19159f2e1cbb6abfb", - "sha256:6c8200abc9dc5f27203986100579fc19ccad7a832c07d2bc151ce4ff17190076", - "sha256:6ca56bdfaf825f4439e9e3673775e1032d8b6ea63b8953d3812c71bd6a8b81de", - "sha256:71680321a8a7176a58dfbc230789790639db78dad61a6e120b39f314f43f1907", - "sha256:7c7820099e8b3171e54e7eedc33e9450afe7cd08172632d32128bd527f8cb77d", - "sha256:7dbd087ff2f4046b9b37ba28ed73f15fd0bc9f4fdc8ef6781913da7f808d9536", - "sha256:822bd4fd21abaa7b28d65fc9871ecabaddc42767884a626317ef5b75c20e8a2d", - "sha256:8ec1a38074f68d66ccb467ed9a673a726bb397142c273f90d4ba954666e87d54", - "sha256:950b7ef08b2afdab2488ee2edaff92a03ca500a48f1e1aaa5900e73d6cf992bc", - "sha256:99c5a5bf7135607959441b7d720d96c8e5c46a1f96e9d6d4c9498be8d5f24212", - "sha256:b84ad94868e1e6a5e30d30ec419956042815dfaea1b1df1cef623e4564c374d9", - "sha256:bc3d14bf71a3fb94e5acf5bbf67331ab335467129af6416a437bd6024e4f743d", - "sha256:c2a80fd9a8d7e41b4e38ea9fe149deed0d6aaede255c497e66b8213274d6d61b", - "sha256:c44d3c82a933c6cbc21039326767e778eface44fca55c65719921c4b9661a3f7", - "sha256:cc31e906be1cc121ee201adbdf844522ea3349600dd0a40366611ca18cd40e81", - "sha256:d5d102e945ecca93bcd9801a7bb2fa703e37ad188a2f81b1e65e4abe4b51b00c", - "sha256:dd7936f2a6daa861143e376b3a1fb56e9b802f4980923594edd9ca5670974895", - "sha256:dee68ec462ff10c1d836c0ea2642116aba6151c6880b688e56b4c0246770f297", - "sha256:e76e78863a4eaec3aee5722d85d04dcbd9844bc6cd3bfa6aa880ff46ad16bfcb", - "sha256:eab51036cac2da8a50d7ff0ea30be47750547c9aa1aa2cf1a1b710a1827e7dbe", - "sha256:f4496d8d04da2e98cc9133e238ccebf6a13ef39a93da2e87146c8c8ac9768242", - "sha256:fbd3b5e18d34683decc00d9a360179ac1e7a320a5fee10ab8053ffd6deab76e0", - "sha256:feb24ff1226beeb056e247cf2e24bba5232519efb5645121c4aea5b6ad74c1f2" + "sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe", + "sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe", + "sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5", + "sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8", + "sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd", + "sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb", + "sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c", + "sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87", + "sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0", + "sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290", + "sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5", + "sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287", + "sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde", + "sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf", + "sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8", + "sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16", + "sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf", + "sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809", + "sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213", + "sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f", + "sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013", + "sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b", + "sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9", + "sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5", + "sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb", + "sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df", + "sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4", + "sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439", + "sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f", + "sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22", + "sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f", + "sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5", + "sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970", + "sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009", + "sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc", + "sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a", + "sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95" ], "index": "pypi", - "version": "==3.7.4" + "version": "==3.7.4.post0" }, "aioping": { "hashes": [ @@ -210,10 +210,11 @@ }, "chardet": { "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" ], - "version": "==3.0.4" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==4.0.0" }, "colorama": { "hashes": [ @@ -596,11 +597,11 @@ }, "pygments": { "hashes": [ - "sha256:37a13ba168a02ac54cc5891a42b1caec333e59b66addb7fa633ea8a6d73445c0", - "sha256:b21b072d0ccdf29297a82a2363359d99623597b8a265b8081760e4d0f7153c88" + "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94", + "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8" ], "markers": "python_version >= '3.5'", - "version": "==2.8.0" + "version": "==2.8.1" }, "pyparsing": { "hashes": [ @@ -618,6 +619,14 @@ "index": "pypi", "version": "==2.8.1" }, + "python-frontmatter": { + "hashes": [ + "sha256:a7dcdfdaf498d488dce98bfa9452f8b70f803a923760ceab1ebd99291d98d28a", + "sha256:a9c2e90fc38e9f0c68d8b82299040f331ca3b8525ac7fa5f6beffef52b26c426" + ], + "index": "pypi", + "version": "==0.5.0" + }, "python-json-logger": { "hashes": [ "sha256:f26eea7898db40609563bed0a7ca11af12e2a79858632706d835a0f961b7d398" @@ -870,10 +879,11 @@ }, "chardet": { "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" ], - "version": "==3.0.4" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==4.0.0" }, "coverage": { "hashes": [ -- cgit v1.2.3 From cd71b8447eaad67b6885d99e00c230198c21cf0e Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sun, 7 Mar 2021 16:27:17 +0100 Subject: Mark #appeals as a mod channel --- config-default.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/config-default.yml b/config-default.yml index 18d9cd370..3dbc7bd6b 100644 --- a/config-default.yml +++ b/config-default.yml @@ -195,6 +195,7 @@ guild: incidents_archive: 720668923636351037 mods: &MODS 305126844661760000 mod_alerts: 473092532147060736 + mod_appeals: &MOD_APPEALS 808790025688711198 mod_meta: &MOD_META 775412552795947058 mod_spam: &MOD_SPAM 620607373828030464 mod_tools: &MOD_TOOLS 775413915391098921 @@ -230,6 +231,7 @@ guild: moderation_channels: - *ADMINS - *ADMIN_SPAM + - *MOD_APPEALS - *MOD_META - *MOD_TOOLS - *MODS -- cgit v1.2.3 From 75df6d9ac952c76260fd44f5191c02423bb847fa Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sun, 7 Mar 2021 18:56:18 +0200 Subject: Improve nomination string representation --- bot/exts/moderation/watchchannels/talentpool.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index 55c41a754..c2f6ab2c5 100644 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -272,7 +272,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): reason = site_entry["reason"] or "*None*" created = time.format_infraction(site_entry["inserted_at"]) entries.append( - f"Actor: {actor.mention if actor else actor_id}\nReason: {reason}\nCreated: {created}" + f"Actor: {actor.mention if actor else actor_id}\nCreated: {created}\nReason: {reason}" ) entries_string = "\n\n".join(entries) @@ -299,12 +299,12 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): =============== Status: Inactive Date: {start_date} + Nomination ID: `{nomination_object["id"]}` {entries_string} End date: {end_date} Unwatch reason: {nomination_object["end_reason"]} - Nomination ID: `{nomination_object["id"]}` =============== """ ) -- cgit v1.2.3 From c07b2117e6a473183050c58f89ee213f25cca77e Mon Sep 17 00:00:00 2001 From: xithrius Date: Sun, 7 Mar 2021 23:08:17 -0800 Subject: Added Redis caching. --- bot/exts/help_channels/_cog.py | 65 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 0995c8a79..b095429a3 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -5,6 +5,7 @@ import typing as t from datetime import datetime, timezone from operator import attrgetter +import async_rediscache import discord import discord.abc from discord.ext import commands @@ -20,6 +21,7 @@ NAMESPACE = "help" HELP_CHANNEL_TOPIC = """ This is a Python help channel. You can claim your own help channel in the Python Help: Available category. """ +AVAILABLE_HELP_CHANNELS = "**Currently available help channel(s):** {available}" class HelpChannels(commands.Cog): @@ -72,6 +74,14 @@ class HelpChannels(commands.Cog): self.last_notification: t.Optional[datetime] = None + # Caching the message object for the dynamic message. + self.dynamic_message_cache = async_rediscache.RedisCache(namespace="Dynamic Message") + + self.how_to_get_help: t.Optional[discord.TextChannel] = None + self.dynamic_message: t.Optional[discord.Message] = None + + self.available_help_channels: t.Set[discord.TextChannel] = set() + # Asyncio stuff self.queue_tasks: t.List[asyncio.Task] = [] self.init_task = self.bot.loop.create_task(self.init_cog()) @@ -102,6 +112,10 @@ class HelpChannels(commands.Cog): await _cooldown.revoke_send_permissions(message.author, self.scheduler) await _message.pin(message) + try: + await _message.dm_on_open(message) + except Exception as e: + log.warning("Error occurred while sending DM:", exc_info=e) # Add user with channel for dormant check. await _caches.claimants.set(message.channel.id, message.author.id) @@ -114,6 +128,9 @@ class HelpChannels(commands.Cog): await _caches.unanswered.set(message.channel.id, True) + # Removing the help channel from the dynamic message, and editing/sending that message. + self.available_help_channels.remove(message.channel) + # Not awaited because it may indefinitely hold the lock while waiting for a channel. scheduling.create_task(self.move_to_available(), name=f"help_claim_{message.id}") @@ -275,6 +292,15 @@ class HelpChannels(commands.Cog): # This may confuse users. So would potentially long delays for the cog to become ready. self.close_command.enabled = True + # Acquiring and modifying the channel to dynamically update the available help channels message. + log.trace("Attempting to fetch dynamic message ID along with How-to-get-help channel ID.") + self.how_to_get_help = await self.dynamic_message_cache.get("How-to-get-help Channel ID") + self.dynamic_message = await self.dynamic_message_cache.get("How-to-get-help Dynamic Message ID") + + # Getting channels that need to be included in the dynamic message. + await self.update_available_help_channels() + log.trace("Dynamic available help message updated.") + await self.init_available() _stats.report_counts() @@ -332,6 +358,10 @@ class HelpChannels(commands.Cog): category_id=constants.Categories.help_available, ) + # Adding the help channel to the dynamic message, and editing/sending that message. + self.available_help_channels.add(channel) + await self.update_available_help_channels() + _stats.report_counts() async def move_to_dormant(self, channel: discord.TextChannel) -> None: @@ -461,3 +491,38 @@ class HelpChannels(commands.Cog): self.queue_tasks.remove(task) return channel + + async def update_available_help_channels(self) -> None: + """Updates the dynamic message within #how-to-get-help for available help channels.""" + if not self.available_help_channels: + self.available_help_channels = set( + c for c in self.available_category.channels if not _channel.is_excluded_channel(c) + ) + + available_channels = AVAILABLE_HELP_CHANNELS.format( + available=', '.join(c.mention for c in self.available_help_channels) or None + ) + + if self.how_to_get_help is None: + self.how_to_get_help = await channel_utils.try_get_channel(constants.Channels.how_to_get_help) + await self.dynamic_message_cache.set("How-to-get-help Channel ID", self.how_to_get_help.id) + + if self.dynamic_message is None: + last_message = await self.how_to_get_help.history(limit=1).find(lambda m: m.author == self.bot.user) + + if not last_message: + self.dynamic_message = await self.how_to_get_help.send(available_channels) + log.trace("A dynamic message was sent for later modification because one couldn't be found.") + else: + await last_message.edit(content=available_channels) + + await self.dynamic_message_cache.set("How-to-get-help Dynamic Message ID", self.dynamic_message.id) + + else: + try: + await self.dynamic_message.edit(content=available_channels) + except discord.NotFound: + self.dynamic_message = await self.how_to_get_help.send(available_channels) + await self.dynamic_message_cache.set("How-to-get-help Dynamic Message ID", self.dynamic_message.id) + log.trace("Dynamic has been sent again since previous was removed during process of updating message.") + -- cgit v1.2.3 From 71ea5a6c9e6f3f4c09ee1b4b26fafb8e31e9ffbf Mon Sep 17 00:00:00 2001 From: xithrius Date: Sun, 7 Mar 2021 23:13:33 -0800 Subject: Removed extra newline at end of _cog.py. --- bot/exts/help_channels/_cog.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index b095429a3..680c77f10 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -525,4 +525,3 @@ class HelpChannels(commands.Cog): self.dynamic_message = await self.how_to_get_help.send(available_channels) await self.dynamic_message_cache.set("How-to-get-help Dynamic Message ID", self.dynamic_message.id) log.trace("Dynamic has been sent again since previous was removed during process of updating message.") - -- cgit v1.2.3 From 016614c24bf899122dc2f55b19c4de463bcf5524 Mon Sep 17 00:00:00 2001 From: xithrius Date: Mon, 8 Mar 2021 00:11:24 -0800 Subject: Using http methods to edit/send messages. --- bot/exts/help_channels/_cog.py | 42 ++++++++++++++++-------------------------- 1 file changed, 16 insertions(+), 26 deletions(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 680c77f10..11e3aef59 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -77,9 +77,7 @@ class HelpChannels(commands.Cog): # Caching the message object for the dynamic message. self.dynamic_message_cache = async_rediscache.RedisCache(namespace="Dynamic Message") - self.how_to_get_help: t.Optional[discord.TextChannel] = None - self.dynamic_message: t.Optional[discord.Message] = None - + self.dynamic_message: t.Optional[int] = None self.available_help_channels: t.Set[discord.TextChannel] = set() # Asyncio stuff @@ -292,9 +290,8 @@ class HelpChannels(commands.Cog): # This may confuse users. So would potentially long delays for the cog to become ready. self.close_command.enabled = True - # Acquiring and modifying the channel to dynamically update the available help channels message. - log.trace("Attempting to fetch dynamic message ID along with How-to-get-help channel ID.") - self.how_to_get_help = await self.dynamic_message_cache.get("How-to-get-help Channel ID") + # Acquiring the dynamic message ID, if it exists within the cache. + log.trace("Attempting to fetch How-to-get-help dynamic message ID.") self.dynamic_message = await self.dynamic_message_cache.get("How-to-get-help Dynamic Message ID") # Getting channels that need to be included in the dynamic message. @@ -503,25 +500,18 @@ class HelpChannels(commands.Cog): available=', '.join(c.mention for c in self.available_help_channels) or None ) - if self.how_to_get_help is None: - self.how_to_get_help = await channel_utils.try_get_channel(constants.Channels.how_to_get_help) - await self.dynamic_message_cache.set("How-to-get-help Channel ID", self.how_to_get_help.id) - - if self.dynamic_message is None: - last_message = await self.how_to_get_help.history(limit=1).find(lambda m: m.author == self.bot.user) - - if not last_message: - self.dynamic_message = await self.how_to_get_help.send(available_channels) - log.trace("A dynamic message was sent for later modification because one couldn't be found.") - else: - await last_message.edit(content=available_channels) - - await self.dynamic_message_cache.set("How-to-get-help Dynamic Message ID", self.dynamic_message.id) - - else: + if self.dynamic_message is not None: try: - await self.dynamic_message.edit(content=available_channels) + await self.bot.http.edit_message( + constants.Channels.how_to_get_help, self.dynamic_message, content=available_channels + ) + log.trace("Help channels have changed, dynamic message has been edited.") except discord.NotFound: - self.dynamic_message = await self.how_to_get_help.send(available_channels) - await self.dynamic_message_cache.set("How-to-get-help Dynamic Message ID", self.dynamic_message.id) - log.trace("Dynamic has been sent again since previous was removed during process of updating message.") + pass + else: + log.trace("No How-to-get-help dynamic message could be found in the Redis cache. Setting a new one.") + new_dynamic_message = await self.bot.http.send_message( + constants.Channels.how_to_get_help, available_channels + ) + self.dynamic_message = new_dynamic_message["id"] + await self.dynamic_message_cache.set("How-to-get-help Dynamic Message ID", self.dynamic_message) -- cgit v1.2.3 From 789689409cd437f77f27e89a1d8e4f8697f70872 Mon Sep 17 00:00:00 2001 From: xithrius Date: Mon, 8 Mar 2021 02:17:51 -0800 Subject: Moved cache, reworked logic so message sends on cog reload. --- bot/exts/help_channels/_caches.py | 4 ++++ bot/exts/help_channels/_cog.py | 21 ++++++++++----------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/bot/exts/help_channels/_caches.py b/bot/exts/help_channels/_caches.py index 4cea385b7..9986ddc09 100644 --- a/bot/exts/help_channels/_caches.py +++ b/bot/exts/help_channels/_caches.py @@ -17,3 +17,7 @@ question_messages = RedisCache(namespace="HelpChannels.question_messages") # activity and False being other activity. # RedisCache[discord.TextChannel.id, bool] unanswered = RedisCache(namespace="HelpChannels.unanswered") + +# This cache keeps track of the dynamic message ID for +# the continuously updated message in the #How-to-get-help channel. +dynamic_message = RedisCache(namespace="HelpChannels.dynamic_message") diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 11e3aef59..0c524d526 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -74,9 +74,6 @@ class HelpChannels(commands.Cog): self.last_notification: t.Optional[datetime] = None - # Caching the message object for the dynamic message. - self.dynamic_message_cache = async_rediscache.RedisCache(namespace="Dynamic Message") - self.dynamic_message: t.Optional[int] = None self.available_help_channels: t.Set[discord.TextChannel] = set() @@ -292,7 +289,7 @@ class HelpChannels(commands.Cog): # Acquiring the dynamic message ID, if it exists within the cache. log.trace("Attempting to fetch How-to-get-help dynamic message ID.") - self.dynamic_message = await self.dynamic_message_cache.get("How-to-get-help Dynamic Message ID") + self.dynamic_message = await _caches.dynamic_message.get("message_id") # Getting channels that need to be included in the dynamic message. await self.update_available_help_channels() @@ -508,10 +505,12 @@ class HelpChannels(commands.Cog): log.trace("Help channels have changed, dynamic message has been edited.") except discord.NotFound: pass - else: - log.trace("No How-to-get-help dynamic message could be found in the Redis cache. Setting a new one.") - new_dynamic_message = await self.bot.http.send_message( - constants.Channels.how_to_get_help, available_channels - ) - self.dynamic_message = new_dynamic_message["id"] - await self.dynamic_message_cache.set("How-to-get-help Dynamic Message ID", self.dynamic_message) + else: + return + + log.trace("No How-to-get-help dynamic message could be found in the Redis cache. Setting a new one.") + new_dynamic_message = await self.bot.http.send_message( + constants.Channels.how_to_get_help, available_channels + ) + self.dynamic_message = new_dynamic_message["id"] + await _caches.dynamic_message.set("message_id", self.dynamic_message) -- cgit v1.2.3 From 9d783670a2c9fd9249d5e1df00522032dddb6f77 Mon Sep 17 00:00:00 2001 From: xithrius Date: Mon, 8 Mar 2021 02:19:02 -0800 Subject: Removed async_rediscache import. --- bot/exts/help_channels/_cog.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 0c524d526..9a33a6bb1 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -5,7 +5,6 @@ import typing as t from datetime import datetime, timezone from operator import attrgetter -import async_rediscache import discord import discord.abc from discord.ext import commands -- cgit v1.2.3 From 1255bbebce25f82620af7c0c52ed70905c37ea53 Mon Sep 17 00:00:00 2001 From: xithrius Date: Mon, 8 Mar 2021 03:26:56 -0800 Subject: Purge ban now says 'purge ban' on user purge ban. --- bot/exts/moderation/infraction/_scheduler.py | 6 ++++-- bot/exts/moderation/infraction/infractions.py | 2 ++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py index a73f2e8da..b48c1c19e 100644 --- a/bot/exts/moderation/infraction/_scheduler.py +++ b/bot/exts/moderation/infraction/_scheduler.py @@ -173,6 +173,8 @@ class InfractionScheduler: total = len(infractions) end_msg = f" (#{id_} ; {total} infraction{ngettext('', 's', total)} total)" + purge = infraction['purge'] + # Execute the necessary actions to apply the infraction on Discord. if action_coro: log.trace(f"Awaiting the infraction #{id_} application action coroutine.") @@ -210,7 +212,7 @@ class InfractionScheduler: log.error(f"Deletion of {infr_type} infraction #{id_} failed with error code {e.status}.") infr_message = "" else: - infr_message = f" **{' '.join(infr_type.split('_'))}** to {user.mention}{expiry_msg}{end_msg}" + infr_message = f" **{purge}{' '.join(infr_type.split('_'))}** to {user.mention}{expiry_msg}{end_msg}" # Send a confirmation message to the invoking context. log.trace(f"Sending infraction #{id_} confirmation message.") @@ -234,7 +236,7 @@ class InfractionScheduler: footer=f"ID {infraction['id']}" ) - log.info(f"Applied {infr_type} infraction #{id_} to {user}.") + log.info(f"Applied {purge}{infr_type} infraction #{id_} to {user}.") return not failed async def pardon_infraction( diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py index 3b5b1df45..d89e80acc 100644 --- a/bot/exts/moderation/infraction/infractions.py +++ b/bot/exts/moderation/infraction/infractions.py @@ -318,6 +318,8 @@ class Infractions(InfractionScheduler, commands.Cog): if infraction is None: return + infraction["purge"] = "purge " if purge_days else "" + self.mod_log.ignore(Event.member_remove, user.id) if reason: -- cgit v1.2.3 From f76e47bf9b1d9956a36d891e3aa64593c65568c8 Mon Sep 17 00:00:00 2001 From: xithrius Date: Mon, 8 Mar 2021 03:35:54 -0800 Subject: Fixed unittest for purge infraction. --- tests/bot/exts/moderation/infraction/test_infractions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/bot/exts/moderation/infraction/test_infractions.py b/tests/bot/exts/moderation/infraction/test_infractions.py index 86c2617ea..08f39cd50 100644 --- a/tests/bot/exts/moderation/infraction/test_infractions.py +++ b/tests/bot/exts/moderation/infraction/test_infractions.py @@ -39,7 +39,7 @@ class TruncationTests(unittest.IsolatedAsyncioTestCase): delete_message_days=0 ) self.cog.apply_infraction.assert_awaited_once_with( - self.ctx, {"foo": "bar"}, self.target, self.ctx.guild.ban.return_value + self.ctx, {"foo": "bar", "purge": ""}, self.target, self.ctx.guild.ban.return_value ) @patch("bot.exts.moderation.infraction._utils.post_infraction") -- cgit v1.2.3 From 7a97eec931a8eb72ff1aac101e5bdd8e5b51de62 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Mon, 8 Mar 2021 17:08:07 +0100 Subject: Make the snowflake command accept many snowflakes --- bot/exts/utils/utils.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index eb92dfca7..1a5ded7a8 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -2,7 +2,7 @@ import difflib import logging import re import unicodedata -from typing import Tuple, Union +from typing import Tuple, Union, List from discord import Colour, Embed, utils from discord.ext.commands import BadArgument, Cog, Context, clean_content, command, has_any_role @@ -156,18 +156,19 @@ class Utils(Cog): @command(aliases=("snf", "snfl", "sf")) @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES) - async def snowflake(self, ctx: Context, snowflake: Snowflake) -> None: + async def snowflake(self, ctx: Context, *snowflakes: Snowflake) -> None: """Get Discord snowflake creation time.""" - created_at = snowflake_time(snowflake) - embed = Embed( - description=f"**Created at {created_at}** ({time_since(created_at, max_units=3)}).", - colour=Colour.blue() - ) - embed.set_author( - name=f"Snowflake: {snowflake}", - icon_url="https://github.com/twitter/twemoji/blob/master/assets/72x72/2744.png?raw=true" - ) - await ctx.send(embed=embed) + for snowflake in snowflakes: + created_at = snowflake_time(snowflake) + embed = Embed( + description=f"**Created at {created_at}** ({time_since(created_at, max_units=3)}).", + colour=Colour.blue() + ) + embed.set_author( + name=f"Snowflake: {snowflake}", + icon_url="https://github.com/twitter/twemoji/blob/master/assets/72x72/2744.png?raw=true" + ) + await ctx.send(embed=embed) @command(aliases=("poll",)) @has_any_role(*MODERATION_ROLES) -- cgit v1.2.3 From 0a2e08c28d0dc6ca523bdf421a4759d9c38d8a3f Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Mon, 8 Mar 2021 17:30:58 +0100 Subject: Restrict non-staffer to one snowflake at the time --- bot/exts/utils/utils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index 1a5ded7a8..a5d6f69b9 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -2,7 +2,7 @@ import difflib import logging import re import unicodedata -from typing import Tuple, Union, List +from typing import Tuple, Union from discord import Colour, Embed, utils from discord.ext.commands import BadArgument, Cog, Context, clean_content, command, has_any_role @@ -14,6 +14,7 @@ from bot.converters import Snowflake from bot.decorators import in_whitelist from bot.pagination import LinePaginator from bot.utils import messages +from bot.utils.checks import has_no_roles_check from bot.utils.time import time_since log = logging.getLogger(__name__) @@ -158,6 +159,9 @@ class Utils(Cog): @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES) async def snowflake(self, ctx: Context, *snowflakes: Snowflake) -> None: """Get Discord snowflake creation time.""" + if len(snowflakes) > 1 and await has_no_roles_check(ctx, *STAFF_ROLES): + raise BadArgument("Cannot process more than one snowflake in one invocation.") + for snowflake in snowflakes: created_at = snowflake_time(snowflake) embed = Embed( -- cgit v1.2.3 From ecdffd57c5a51143706d4fdc129645901352abb6 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Mon, 8 Mar 2021 19:18:28 +0200 Subject: Don't mention watching anymore in talent pool add message --- bot/exts/moderation/watchchannels/talentpool.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index c2f6ab2c5..737ee684d 100644 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -109,7 +109,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): resp.raise_for_status() self.watched_users[user.id] = response_data - msg = f":white_check_mark: Messages sent by {user} will now be relayed to the talent pool channel" + msg = f":white_check_mark: The nomination for {user} has been added to the talent pool" history = await self.bot.api_client.get( self.api_endpoint, -- cgit v1.2.3 From fa93d2fd8ed03fb991bf32573e67b49e89c56057 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Mon, 8 Mar 2021 19:21:16 +0200 Subject: Shorten reason of nomination string to 1000 characters --- bot/exts/moderation/watchchannels/talentpool.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index 737ee684d..49221002e 100644 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -269,7 +269,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): actor_id = site_entry["actor"] actor = guild.get_member(actor_id) - reason = site_entry["reason"] or "*None*" + reason = textwrap.shorten(site_entry["reason"], 1000, placeholder="...") or "*None*" created = time.format_infraction(site_entry["inserted_at"]) entries.append( f"Actor: {actor.mention if actor else actor_id}\nCreated: {created}\nReason: {reason}" -- cgit v1.2.3 From 7d3d3eaa6474902f120a94b885d4b4b789c2b87c Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Mon, 8 Mar 2021 19:33:34 +0200 Subject: Limit maximum characters for reasons to 1000 --- bot/exts/moderation/watchchannels/talentpool.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index 49221002e..d75688fa6 100644 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -14,6 +14,8 @@ from bot.exts.moderation.watchchannels._watchchannel import WatchChannel from bot.pagination import LinePaginator from bot.utils import time +REASON_MAX_CHARS = 1000 + log = logging.getLogger(__name__) @@ -84,6 +86,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): await ctx.send(f":x: Failed to update the user cache; can't add {user}") return + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") + return + # Manual request with `raise_for_status` as False because we want the actual response session = self.bot.api_client.session url = self.bot.api_client._url_for(self.api_endpoint) @@ -162,6 +168,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): Providing a `reason` is required. """ + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") + return + if await self.unwatch(user.id, reason): await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed") else: @@ -177,6 +187,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): @has_any_role(*MODERATION_ROLES) async def edit_reason_command(self, ctx: Context, nomination_id: int, actor: FetchedMember, *, reason: str) -> None: """Edits the reason of a specific nominator in a specific active nomination.""" + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") + return + try: nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") except ResponseCodeError as e: @@ -208,6 +222,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): @has_any_role(*MODERATION_ROLES) async def edit_end_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None: """Edits the unnominate reason for the nomination with the given `id`.""" + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") + return + try: nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") except ResponseCodeError as e: @@ -269,7 +287,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): actor_id = site_entry["actor"] actor = guild.get_member(actor_id) - reason = textwrap.shorten(site_entry["reason"], 1000, placeholder="...") or "*None*" + reason = site_entry["reason"] or "*None*" created = time.format_infraction(site_entry["inserted_at"]) entries.append( f"Actor: {actor.mention if actor else actor_id}\nCreated: {created}\nReason: {reason}" -- cgit v1.2.3 From 1ebc283ce03cd4beec562123a536bff58278e2ca Mon Sep 17 00:00:00 2001 From: Hassan Abouelela <47495861+HassanAbouelela@users.noreply.github.com> Date: Mon, 8 Mar 2021 20:55:19 +0300 Subject: Revert "Use JSON logging in production" --- Pipfile | 1 - Pipfile.lock | 260 ++++++++++++++++++++++------------------------------------- bot/log.py | 49 ++++------- 3 files changed, 114 insertions(+), 196 deletions(-) diff --git a/Pipfile b/Pipfile index 024aa6eff..0a94fb888 100644 --- a/Pipfile +++ b/Pipfile @@ -28,7 +28,6 @@ sphinx = "~=2.2" statsd = "~=3.3" arrow = "~=0.17" emoji = "~=0.6" -python-json-logger = "~=2.0" [dev-packages] coverage = "~=5.0" diff --git a/Pipfile.lock b/Pipfile.lock index dc7f6f21f..f8cedb08f 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "81ca9d1891e71de1c3f71958f082e1a8cad71e5b3ca425dc561d0ae74664fdb0" + "sha256": "228ae55fe5700ac3827ba6b661933b60b1d06f44fea8bcbe8c5a769fa10ab2fd" }, "pipfile-spec": 6, "requires": { @@ -18,11 +18,11 @@ "default": { "aio-pika": { "hashes": [ - "sha256:1d4305a5f78af3857310b4fe48348cdcf6c097e0e275ea88c2cd08570531a369", - "sha256:e69afef8695f47c5d107bbdba21bdb845d5c249acb3be53ef5c2d497b02657c0" + "sha256:9773440a89840941ac3099a7720bf9d51e8764a484066b82ede4d395660ff430", + "sha256:a8065be3c722eb8f9fff8c0e7590729e7782202cdb9363d9830d7d5d47b45c7c" ], "index": "pypi", - "version": "==6.8.0" + "version": "==6.7.1" }, "aiodns": { "hashes": [ @@ -96,7 +96,6 @@ "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573", "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e" ], - "markers": "python_version >= '3.6'", "version": "==3.3.1" }, "alabaster": { @@ -123,7 +122,6 @@ "sha256:c25e4fff73f64d20645254783c3224a4c49e083e3fab67c44f17af944c5e26af" ], "index": "pypi", - "markers": "python_version ~= '3.7'", "version": "==0.1.4" }, "async-timeout": { @@ -131,7 +129,6 @@ "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f", "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3" ], - "markers": "python_full_version >= '3.5.3'", "version": "==3.0.1" }, "attrs": { @@ -139,7 +136,6 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "babel": { @@ -147,7 +143,6 @@ "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.9.0" }, "beautifulsoup4": { @@ -220,6 +215,7 @@ "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], + "index": "pypi", "markers": "sys_platform == 'win32'", "version": "==0.4.4" }, @@ -252,7 +248,6 @@ "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, "emoji": { @@ -335,7 +330,6 @@ "sha256:e64be68255234bb489a574c4f2f8df7029c98c81ec4d160d6cd836e7f0679390", "sha256:e82d6b930e02e80e5109b678c663a9ed210680ded81c1abaf54635d88d1da298" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.0" }, "humanfriendly": { @@ -343,7 +337,6 @@ "sha256:066562956639ab21ff2676d1fda0b5987e985c534fc76700a19bd54bcb81121d", "sha256:d5c731705114b9ad673754f3317d9fa4c23212f36b29bdc4272a892eafc9bc72" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==9.1" }, "idna": { @@ -351,7 +344,6 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "imagesize": { @@ -359,7 +351,6 @@ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, "jinja2": { @@ -367,7 +358,6 @@ "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.11.3" }, "lxml": { @@ -476,16 +466,15 @@ "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "more-itertools": { "hashes": [ - "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced", - "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713" + "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330", + "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf" ], "index": "pypi", - "version": "==8.7.0" + "version": "==8.6.0" }, "multidict": { "hashes": [ @@ -527,14 +516,12 @@ "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281", "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80" ], - "markers": "python_version >= '3.6'", "version": "==5.1.0" }, "ordered-set": { "hashes": [ "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95" ], - "markers": "python_version >= '3.5'", "version": "==4.0.2" }, "packaging": { @@ -542,7 +529,6 @@ "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.9" }, "pamqp": { @@ -591,7 +577,6 @@ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pygments": { @@ -599,7 +584,6 @@ "sha256:37a13ba168a02ac54cc5891a42b1caec333e59b66addb7fa633ea8a6d73445c0", "sha256:b21b072d0ccdf29297a82a2363359d99623597b8a265b8081760e4d0f7153c88" ], - "markers": "python_version >= '3.5'", "version": "==2.8.0" }, "pyparsing": { @@ -607,7 +591,6 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "python-dateutil": { @@ -618,13 +601,6 @@ "index": "pypi", "version": "==2.8.1" }, - "python-json-logger": { - "hashes": [ - "sha256:f26eea7898db40609563bed0a7ca11af12e2a79858632706d835a0f961b7d398" - ], - "index": "pypi", - "version": "==2.0.1" - }, "pytz": { "hashes": [ "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da", @@ -634,37 +610,28 @@ }, "pyyaml": { "hashes": [ - "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", - "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", - "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", - "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", - "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", - "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", - "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", - "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", - "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", - "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", - "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", - "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", - "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", - "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", - "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", - "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", - "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", - "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", - "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", - "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", - "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", + "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", + "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", + "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", + "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" ], "index": "pypi", - "version": "==5.4.1" + "version": "==5.3.1" }, "redis": { "hashes": [ "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2", "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.5.3" }, "requests": { @@ -677,18 +644,17 @@ }, "sentry-sdk": { "hashes": [ - "sha256:4ae8d1ced6c67f1c8ea51d82a16721c166c489b76876c9f2c202b8a50334b237", - "sha256:e75c8c58932bda8cd293ea8e4b242527129e1caaec91433d21b8b2f20fee030b" + "sha256:0a711ec952441c2ec89b8f5d226c33bc697914f46e876b44a4edd3e7864cf4d0", + "sha256:737a094e49a529dd0fdcaafa9e97cf7c3d5eb964bd229821d640bc77f3502b3f" ], "index": "pypi", - "version": "==0.20.3" + "version": "==0.19.5" }, "six": { "hashes": [ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -726,7 +692,6 @@ "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" ], - "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-devhelp": { @@ -734,7 +699,6 @@ "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" ], - "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-htmlhelp": { @@ -742,7 +706,6 @@ "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], - "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-jsmath": { @@ -750,7 +713,6 @@ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], - "markers": "python_version >= '3.5'", "version": "==1.0.1" }, "sphinxcontrib-qthelp": { @@ -758,7 +720,6 @@ "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" ], - "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-serializinghtml": { @@ -766,7 +727,6 @@ "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" ], - "markers": "python_version >= '3.5'", "version": "==1.1.4" }, "statsd": { @@ -790,7 +750,6 @@ "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.26.3" }, "yarl": { @@ -833,7 +792,6 @@ "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a", "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71" ], - "markers": "python_version >= '3.6'", "version": "==1.6.3" } }, @@ -850,7 +808,6 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "certifi": { @@ -865,7 +822,6 @@ "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d", "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1" ], - "markers": "python_full_version >= '3.6.1'", "version": "==3.2.0" }, "chardet": { @@ -877,61 +833,58 @@ }, "coverage": { "hashes": [ - "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c", - "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6", - "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45", - "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a", - "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03", - "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529", - "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a", - "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a", - "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2", - "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6", - "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759", - "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53", - "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a", - "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4", - "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff", - "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502", - "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793", - "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb", - "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905", - "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821", - "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b", - "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81", - "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0", - "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b", - "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3", - "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184", - "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701", - "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a", - "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82", - "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638", - "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5", - "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083", - "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6", - "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90", - "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465", - "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a", - "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3", - "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e", - "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066", - "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf", - "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b", - "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae", - "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669", - "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873", - "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b", - "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6", - "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb", - "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160", - "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c", - "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079", - "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d", - "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6" + "sha256:08b3ba72bd981531fd557f67beee376d6700fba183b167857038997ba30dd297", + "sha256:2757fa64e11ec12220968f65d086b7a29b6583d16e9a544c889b22ba98555ef1", + "sha256:3102bb2c206700a7d28181dbe04d66b30780cde1d1c02c5f3c165cf3d2489497", + "sha256:3498b27d8236057def41de3585f317abae235dd3a11d33e01736ffedb2ef8606", + "sha256:378ac77af41350a8c6b8801a66021b52da8a05fd77e578b7380e876c0ce4f528", + "sha256:38f16b1317b8dd82df67ed5daa5f5e7c959e46579840d77a67a4ceb9cef0a50b", + "sha256:3911c2ef96e5ddc748a3c8b4702c61986628bb719b8378bf1e4a6184bbd48fe4", + "sha256:3a3c3f8863255f3c31db3889f8055989527173ef6192a283eb6f4db3c579d830", + "sha256:3b14b1da110ea50c8bcbadc3b82c3933974dbeea1832e814aab93ca1163cd4c1", + "sha256:535dc1e6e68fad5355f9984d5637c33badbdc987b0c0d303ee95a6c979c9516f", + "sha256:6f61319e33222591f885c598e3e24f6a4be3533c1d70c19e0dc59e83a71ce27d", + "sha256:723d22d324e7997a651478e9c5a3120a0ecbc9a7e94071f7e1954562a8806cf3", + "sha256:76b2775dda7e78680d688daabcb485dc87cf5e3184a0b3e012e1d40e38527cc8", + "sha256:782a5c7df9f91979a7a21792e09b34a658058896628217ae6362088b123c8500", + "sha256:7e4d159021c2029b958b2363abec4a11db0ce8cd43abb0d9ce44284cb97217e7", + "sha256:8dacc4073c359f40fcf73aede8428c35f84639baad7e1b46fce5ab7a8a7be4bb", + "sha256:8f33d1156241c43755137288dea619105477961cfa7e47f48dbf96bc2c30720b", + "sha256:8ffd4b204d7de77b5dd558cdff986a8274796a1e57813ed005b33fd97e29f059", + "sha256:93a280c9eb736a0dcca19296f3c30c720cb41a71b1f9e617f341f0a8e791a69b", + "sha256:9a4f66259bdd6964d8cf26142733c81fb562252db74ea367d9beb4f815478e72", + "sha256:9a9d4ff06804920388aab69c5ea8a77525cf165356db70131616acd269e19b36", + "sha256:a2070c5affdb3a5e751f24208c5c4f3d5f008fa04d28731416e023c93b275277", + "sha256:a4857f7e2bc6921dbd487c5c88b84f5633de3e7d416c4dc0bb70256775551a6c", + "sha256:a607ae05b6c96057ba86c811d9c43423f35e03874ffb03fbdcd45e0637e8b631", + "sha256:a66ca3bdf21c653e47f726ca57f46ba7fc1f260ad99ba783acc3e58e3ebdb9ff", + "sha256:ab110c48bc3d97b4d19af41865e14531f300b482da21783fdaacd159251890e8", + "sha256:b239711e774c8eb910e9b1ac719f02f5ae4bf35fa0420f438cdc3a7e4e7dd6ec", + "sha256:be0416074d7f253865bb67630cf7210cbc14eb05f4099cc0f82430135aaa7a3b", + "sha256:c46643970dff9f5c976c6512fd35768c4a3819f01f61169d8cdac3f9290903b7", + "sha256:c5ec71fd4a43b6d84ddb88c1df94572479d9a26ef3f150cef3dacefecf888105", + "sha256:c6e5174f8ca585755988bc278c8bb5d02d9dc2e971591ef4a1baabdf2d99589b", + "sha256:c89b558f8a9a5a6f2cfc923c304d49f0ce629c3bd85cb442ca258ec20366394c", + "sha256:cc44e3545d908ecf3e5773266c487ad1877be718d9dc65fc7eb6e7d14960985b", + "sha256:cc6f8246e74dd210d7e2b56c76ceaba1cc52b025cd75dbe96eb48791e0250e98", + "sha256:cd556c79ad665faeae28020a0ab3bda6cd47d94bec48e36970719b0b86e4dcf4", + "sha256:ce6f3a147b4b1a8b09aae48517ae91139b1b010c5f36423fa2b866a8b23df879", + "sha256:ceb499d2b3d1d7b7ba23abe8bf26df5f06ba8c71127f188333dddcf356b4b63f", + "sha256:cef06fb382557f66d81d804230c11ab292d94b840b3cb7bf4450778377b592f4", + "sha256:e448f56cfeae7b1b3b5bcd99bb377cde7c4eb1970a525c770720a352bc4c8044", + "sha256:e52d3d95df81c8f6b2a1685aabffadf2d2d9ad97203a40f8d61e51b70f191e4e", + "sha256:ee2f1d1c223c3d2c24e3afbb2dd38be3f03b1a8d6a83ee3d9eb8c36a52bee899", + "sha256:f2c6888eada180814b8583c3e793f3f343a692fc802546eed45f40a001b1169f", + "sha256:f51dbba78d68a44e99d484ca8c8f604f17e957c1ca09c3ebc2c7e3bbd9ba0448", + "sha256:f54de00baf200b4539a5a092a759f000b5f45fd226d6d25a76b0dff71177a714", + "sha256:fa10fee7e32213f5c7b0d6428ea92e3a3fdd6d725590238a3f92c0de1c78b9d2", + "sha256:fabeeb121735d47d8eab8671b6b031ce08514c86b7ad8f7d5490a7b6dcd6267d", + "sha256:fac3c432851038b3e6afe086f777732bcf7f6ebbfd90951fa04ee53db6d0bcdd", + "sha256:fda29412a66099af6d6de0baa6bd7c52674de177ec2ad2630ca264142d69c6c7", + "sha256:ff1330e8bc996570221b450e2d539134baa9465f5cb98aff0e0f73f34172e0ae" ], "index": "pypi", - "version": "==5.5" + "version": "==5.3.1" }, "coveralls": { "hashes": [ @@ -971,11 +924,11 @@ }, "flake8-annotations": { "hashes": [ - "sha256:8968ff12f296433028ad561c680ccc03a7cd62576d100c3f1475e058b3c11b43", - "sha256:bd0505616c0d85ebb45c6052d339c69f320d3f87fa079ab4e91a4f234a863d05" + "sha256:3a377140556aecf11fa9f3bb18c10db01f5ea56dc79a730e2ec9b4f1f49e2055", + "sha256:e17947a48a5b9f632fe0c72682fc797c385e451048e7dfb20139f448a074cb3e" ], "index": "pypi", - "version": "==2.6.0" + "version": "==2.5.0" }, "flake8-bugbear": { "hashes": [ @@ -1033,18 +986,16 @@ }, "identify": { "hashes": [ - "sha256:2179e7359471ab55729f201b3fdf7dc2778e221f868410fedcb0987b791ba552", - "sha256:2a5fdf2f5319cc357eda2550bea713a404392495961022cf2462624ce62f0f46" + "sha256:de7129142a5c86d75a52b96f394d94d96d497881d2aaf8eafe320cdbe8ac4bcc", + "sha256:e0dae57c0397629ce13c289f6ddde0204edf518f557bfdb1e56474aa143e77c3" ], - "markers": "python_full_version >= '3.6.1'", - "version": "==2.1.0" + "version": "==1.5.14" }, "idna": { "hashes": [ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "mccabe": { @@ -1071,18 +1022,17 @@ }, "pre-commit": { "hashes": [ - "sha256:16212d1fde2bed88159287da88ff03796863854b04dc9f838a55979325a3d20e", - "sha256:399baf78f13f4de82a29b649afd74bef2c4e28eb4f021661fc7f29246e8c7a3a" + "sha256:6c86d977d00ddc8a60d68eec19f51ef212d9462937acf3ea37c7adec32284ac0", + "sha256:ee784c11953e6d8badb97d19bc46b997a3a9eded849881ec587accd8608d74a4" ], "index": "pypi", - "version": "==2.10.1" + "version": "==2.9.3" }, "pycodestyle": { "hashes": [ "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.6.0" }, "pydocstyle": { @@ -1090,7 +1040,6 @@ "sha256:19b86fa8617ed916776a11cd8bc0197e5b9856d5433b777f51a3defe13075325", "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678" ], - "markers": "python_version >= '3.5'", "version": "==5.1.1" }, "pyflakes": { @@ -1098,35 +1047,26 @@ "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.2.0" }, "pyyaml": { "hashes": [ - "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", - "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", - "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", - "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", - "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", - "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", - "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", - "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", - "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", - "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", - "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", - "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", - "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", - "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", - "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", - "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", - "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", - "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", - "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", - "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", - "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", + "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", + "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", + "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", + "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" ], "index": "pypi", - "version": "==5.4.1" + "version": "==5.3.1" }, "requests": { "hashes": [ @@ -1141,7 +1081,6 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -1156,7 +1095,6 @@ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.2" }, "urllib3": { @@ -1164,7 +1102,6 @@ "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.26.3" }, "virtualenv": { @@ -1172,7 +1109,6 @@ "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d", "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.4.2" } } diff --git a/bot/log.py b/bot/log.py index bc3bba0af..e92233a33 100644 --- a/bot/log.py +++ b/bot/log.py @@ -1,12 +1,11 @@ import logging import os import sys -from logging import Logger, StreamHandler, handlers +from logging import Logger, handlers from pathlib import Path import coloredlogs import sentry_sdk -from pythonjsonlogger import jsonlogger from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.redis import RedisIntegration @@ -14,15 +13,6 @@ from bot import constants TRACE_LEVEL = 5 -PROD_FIELDS = [ - "asctime", - "name", - "levelname", - "message", - "funcName", - "filename" -] - def setup() -> None: """Set up loggers.""" @@ -43,28 +33,21 @@ def setup() -> None: root_log.setLevel(log_level) root_log.addHandler(file_handler) - if constants.DEBUG_MODE: - if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: - coloredlogs.DEFAULT_LEVEL_STYLES = { - **coloredlogs.DEFAULT_LEVEL_STYLES, - "trace": {"color": 246}, - "critical": {"background": "red"}, - "debug": coloredlogs.DEFAULT_LEVEL_STYLES["info"] - } - - if "COLOREDLOGS_LOG_FORMAT" not in os.environ: - coloredlogs.DEFAULT_LOG_FORMAT = format_string - - if "COLOREDLOGS_LOG_LEVEL" not in os.environ: - coloredlogs.DEFAULT_LOG_LEVEL = log_level - - coloredlogs.install(logger=root_log, stream=sys.stdout) - else: - json_format = " ".join([f"%({field})s" for field in PROD_FIELDS]) - stream_handler = StreamHandler() - formatter = jsonlogger.JsonFormatter(json_format) - stream_handler.setFormatter(formatter) - root_log.addHandler(stream_handler) + if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: + coloredlogs.DEFAULT_LEVEL_STYLES = { + **coloredlogs.DEFAULT_LEVEL_STYLES, + "trace": {"color": 246}, + "critical": {"background": "red"}, + "debug": coloredlogs.DEFAULT_LEVEL_STYLES["info"] + } + + if "COLOREDLOGS_LOG_FORMAT" not in os.environ: + coloredlogs.DEFAULT_LOG_FORMAT = format_string + + if "COLOREDLOGS_LOG_LEVEL" not in os.environ: + coloredlogs.DEFAULT_LOG_LEVEL = log_level + + coloredlogs.install(logger=root_log, stream=sys.stdout) logging.getLogger("discord").setLevel(logging.WARNING) logging.getLogger("websockets").setLevel(logging.WARNING) -- cgit v1.2.3 From bf5efea00f7409e46c5add14bb01c983ff849f2e Mon Sep 17 00:00:00 2001 From: xithrius Date: Mon, 8 Mar 2021 11:56:05 -0800 Subject: Resolving KeyError on infractions that don't purge. --- bot/exts/moderation/infraction/_scheduler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py index b48c1c19e..988fb7220 100644 --- a/bot/exts/moderation/infraction/_scheduler.py +++ b/bot/exts/moderation/infraction/_scheduler.py @@ -173,7 +173,7 @@ class InfractionScheduler: total = len(infractions) end_msg = f" (#{id_} ; {total} infraction{ngettext('', 's', total)} total)" - purge = infraction['purge'] + purge = infraction.get("purge", "") # Execute the necessary actions to apply the infraction on Discord. if action_coro: -- cgit v1.2.3 From 1c4e4387abc3e9cc9f4320457c5d6456cdce7a3f Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Tue, 9 Mar 2021 13:17:32 +0000 Subject: DevOps team reviews for bot deployments --- .github/workflows/deploy.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 5a4aede30..0caf02308 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -10,6 +10,7 @@ on: jobs: build: + environment: production if: github.event.workflow_run.conclusion == 'success' name: Build & Push runs-on: ubuntu-latest -- cgit v1.2.3 From b7d3419599b503198443dbef04ea9fd1d445108c Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 9 Mar 2021 15:06:24 +0100 Subject: Fix typo in stars.json Please have a bit of respect to the baguette land. Also this is a good way to test the new deploy approval system. --- bot/resources/stars.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/resources/stars.json b/bot/resources/stars.json index c0b253120..5ecad0213 100644 --- a/bot/resources/stars.json +++ b/bot/resources/stars.json @@ -17,7 +17,7 @@ "Bruce Springsteen", "Bruno Mars", "Bryan Adams", - "Celine Dion", + "Céline Dion", "Cher", "Christina Aguilera", "David Bowie", -- cgit v1.2.3 From 5a8cbaac5a91bfa83a4971961b87cf676b555f50 Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Tue, 9 Mar 2021 14:13:06 +0000 Subject: Delete repo specific FUNDING.yml file in favour of org one in python-discord/.github --- .github/FUNDING.yml | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 .github/FUNDING.yml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index 6d9919ef2..000000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1,2 +0,0 @@ -patreon: python_discord -custom: https://www.redbubble.com/people/pythondiscord -- cgit v1.2.3 From e61a5216bd19adcbc689fe2f18f969b94ce72e8f Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 10 Mar 2021 04:04:14 +0100 Subject: Hold the symbol_get_event in the entire body of create_symbol_embed While the previous code was safe, the synchronization was spread out over different modules and was hard to wrap around. Additionally changes could introduce context switches without the author being aware of them causing potential race conditions with the refresh. Moving the whole body into the with block solves both of these issues --- bot/exts/info/doc/_cog.py | 48 +++++++++++++++++++++++------------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index fb45d0bbb..24b571ddb 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -251,8 +251,7 @@ class DocCog(commands.Cog): First a redis lookup is attempted, if that fails the `item_fetcher` is used to fetch the page and parse the HTML from it into Markdown. """ - with self.symbol_get_event: - markdown = await doc_cache.get(doc_item) + markdown = await doc_cache.get(doc_item) if markdown is None: log.debug(f"Redis cache miss with {doc_item}.") @@ -283,29 +282,30 @@ class DocCog(commands.Cog): if not self.refresh_event.is_set(): log.debug("Waiting for inventories to be refreshed before processing item.") await self.refresh_event.wait() + # Ensure a refresh can't run in case of a context switch until the with block is exited + with self.symbol_get_event: + doc_item = self.get_symbol_item(symbol_name) + if doc_item is None: + log.debug("Symbol does not exist.") + return None + + self.bot.stats.incr(f"doc_fetches.{doc_item.package}") + + # Show all symbols with the same name that were renamed in the footer, + # with a max of 100 chars. + if symbol_name in self.renamed_symbols: + renamed_symbols = ', '.join(self.renamed_symbols[symbol_name]) + footer_text = textwrap.shorten("Moved: " + renamed_symbols, 100, placeholder=' ...') + else: + footer_text = "" - doc_item = self.get_symbol_item(symbol_name) - if doc_item is None: - log.debug("Symbol does not exist.") - return None - - self.bot.stats.incr(f"doc_fetches.{doc_item.package}") - - # Show all symbols with the same name that were renamed in the footer, - # with a max of 100 chars. - if symbol_name in self.renamed_symbols: - renamed_symbols = ', '.join(self.renamed_symbols[symbol_name]) - footer_text = textwrap.shorten("Moved: " + renamed_symbols, 100, placeholder=' ...') - else: - footer_text = "" - - embed = discord.Embed( - title=discord.utils.escape_markdown(symbol_name), - url=f"{doc_item.url}#{doc_item.symbol_id}", - description=await self.get_symbol_markdown(doc_item) - ) - embed.set_footer(text=footer_text) - return embed + embed = discord.Embed( + title=discord.utils.escape_markdown(symbol_name), + url=f"{doc_item.url}#{doc_item.symbol_id}", + description=await self.get_symbol_markdown(doc_item) + ) + embed.set_footer(text=footer_text) + return embed @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) async def docs_group(self, ctx: commands.Context, *, symbol_name: Optional[str]) -> None: -- cgit v1.2.3 From 522ed426f08845f3843aa3f60284205d1e36dfe8 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Wed, 10 Mar 2021 04:35:09 +0100 Subject: Use a clearer approach with less duplicate code Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 81 ++++++++++++++++++++--------------------------- 1 file changed, 35 insertions(+), 46 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 24b571ddb..9e41c6f1e 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -102,15 +102,11 @@ class DocCog(commands.Cog): # e.g. get 'class' from 'py:class' group_name = group.split(":")[1] - if (original_item := self.doc_symbols.get(symbol_name)) is not None: - replaced_symbol_name = self.ensure_unique_symbol_name( - package_name, - group_name, - original_item, - symbol_name, - ) - if replaced_symbol_name is not None: - symbol_name = replaced_symbol_name + symbol_name = self.ensure_unique_symbol_name( + package_name, + group_name, + symbol_name, + ) relative_url_path, _, symbol_id = relative_doc_url.partition("#") # Intern fields that have shared content so we're not storing unique strings for every object @@ -155,59 +151,52 @@ class DocCog(commands.Cog): else: self.update_single(api_package_name, base_url, package) - def ensure_unique_symbol_name( - self, - package_name: str, - group_name: str, - original_item: DocItem, - symbol_name: str, - ) -> Optional[str]: + def ensure_unique_symbol_name(self, package_name: str, group_name: str, symbol_name: str) -> str: """ Ensure `symbol_name` doesn't overwrite an another symbol in `doc_symbols`. - Should only be called with symbol names that already have a conflict in `doc_symbols`. + For conflicts, rename either the current symbol or the existing symbol with which it conflicts. + Store the new name in `renamed_symbols` and return the name to use for the symbol. - If None is returned, space was created for `symbol_name` in `doc_symbols` instead of - the symbol name being changed. + If the existing symbol was renamed or there was no conflict, the returned name is equivalent to `symbol_name`. """ + if (item := self.doc_symbols.get(symbol_name)) is None: + return symbol_name # There's no conflict so it's fine to simply use the given symbol name. + + def rename(prefix: str, *, rename_extant: bool = False) -> str: + new_name = f"{prefix}.{symbol_name}" + if new_name in self.doc_symbols: + # If there's still a conflict, qualify the name further. + if rename_extant: + new_name = f"{item.package}.{item.group}.{symbol_name}" + else: + new_name = f"{package_name}.{group_name}.{symbol_name}" + + self.renamed_symbols[symbol_name].append(new_name) + + if rename_extant: + # Instead of renaming the current symbol, rename the symbol with which it conflicts. + self.doc_symbols[new_name] = self.doc_symbols[symbol_name] + return symbol_name + else: + return new_name + # Certain groups are added as prefixes to disambiguate the symbols. if group_name in FORCE_PREFIX_GROUPS: - new_symbol_name = f"{group_name}.{symbol_name}" - if new_symbol_name in self.doc_symbols: - # If there's still a conflict, prefix with package name. - new_symbol_name = f"{package_name}.{new_symbol_name}" - self.renamed_symbols[symbol_name].append(new_symbol_name) - return new_symbol_name + return rename(group_name) # The existing symbol with which the current symbol conflicts should have a group prefix. # It currently doesn't have the group prefix because it's only added once there's a conflict. - elif (original_symbol_group := original_item.group) in FORCE_PREFIX_GROUPS: - overridden_symbol_name = f"{original_symbol_group}.{symbol_name}" - if overridden_symbol_name in self.doc_symbols: - # If there's still a conflict, prefix with package name. - overridden_symbol_name = f"{original_item.package}.{overridden_symbol_name}" - - self.doc_symbols[overridden_symbol_name] = original_item - self.renamed_symbols[symbol_name].append(overridden_symbol_name) + elif item.group in FORCE_PREFIX_GROUPS: + return rename(item.group, rename_extant=True) elif package_name in PRIORITY_PACKAGES: - overridden_symbol_name = f"{original_item.package}.{symbol_name}" - if overridden_symbol_name in self.doc_symbols: - # If there's still a conflict, add the symbol's group in the middle. - overridden_symbol_name = f"{original_item.package}.{original_item.group}.{symbol_name}" - - self.doc_symbols[overridden_symbol_name] = original_item - self.renamed_symbols[symbol_name].append(overridden_symbol_name) + return rename(item.package, rename_extant=True) # If we can't specially handle the symbol through its group or package, # fall back to prepending its package name to the front. else: - new_symbol_name = f"{package_name}.{symbol_name}" - if new_symbol_name in self.doc_symbols: - # If there's still a conflict, add the symbol's group in the middle. - new_symbol_name = f"{package_name}.{group_name}.{symbol_name}" - self.renamed_symbols[symbol_name].append(new_symbol_name) - return new_symbol_name + return rename(package_name) async def refresh_inventories(self) -> None: """Refresh internal documentation inventories.""" -- cgit v1.2.3 From 6f880bbc40049948f71af14723f46533fb8c4f1f Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Wed, 10 Mar 2021 20:06:18 +0200 Subject: Moved talentpool to a new recruitment extension --- bot/exts/moderation/watchchannels/talentpool.py | 335 ------------------------ bot/exts/recruitment/talentpool/talentpool.py | 335 ++++++++++++++++++++++++ 2 files changed, 335 insertions(+), 335 deletions(-) delete mode 100644 bot/exts/moderation/watchchannels/talentpool.py create mode 100644 bot/exts/recruitment/talentpool/talentpool.py diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py deleted file mode 100644 index d75688fa6..000000000 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ /dev/null @@ -1,335 +0,0 @@ -import logging -import textwrap -from collections import ChainMap -from typing import Union - -from discord import Color, Embed, Member, User -from discord.ext.commands import Cog, Context, group, has_any_role - -from bot.api import ResponseCodeError -from bot.bot import Bot -from bot.constants import Channels, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks -from bot.converters import FetchedMember -from bot.exts.moderation.watchchannels._watchchannel import WatchChannel -from bot.pagination import LinePaginator -from bot.utils import time - -REASON_MAX_CHARS = 1000 - -log = logging.getLogger(__name__) - - -class TalentPool(WatchChannel, Cog, name="Talentpool"): - """Relays messages of helper candidates to a watch channel to observe them.""" - - def __init__(self, bot: Bot) -> None: - super().__init__( - bot, - destination=Channels.talent_pool, - webhook_id=Webhooks.talent_pool, - api_endpoint='bot/nominations', - api_default_params={'active': 'true', 'ordering': '-inserted_at'}, - logger=log, - disable_header=True, - ) - - @group(name='talentpool', aliases=('tp', 'talent', 'nomination', 'n'), invoke_without_command=True) - @has_any_role(*MODERATION_ROLES) - async def nomination_group(self, ctx: Context) -> None: - """Highlights the activity of helper nominees by relaying their messages to the talent pool channel.""" - await ctx.send_help(ctx.command) - - @nomination_group.command(name='watched', aliases=('all', 'list'), root_aliases=("nominees",)) - @has_any_role(*MODERATION_ROLES) - async def watched_command( - self, ctx: Context, oldest_first: bool = False, update_cache: bool = True - ) -> None: - """ - Shows the users that are currently being monitored in the talent pool. - - The optional kwarg `oldest_first` can be used to order the list by oldest nomination. - - The optional kwarg `update_cache` can be used to update the user - cache using the API before listing the users. - """ - await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) - - @nomination_group.command(name='oldest') - @has_any_role(*MODERATION_ROLES) - async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None: - """ - Shows talent pool monitored users ordered by oldest nomination. - - The optional kwarg `update_cache` can be used to update the user - cache using the API before listing the users. - """ - await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache) - - @nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",)) - @has_any_role(*STAFF_ROLES) - async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None: - """ - Relay messages sent by the given `user` to the `#talent-pool` channel. - - A `reason` for adding the user to the talent pool is optional. - If given, it will be displayed in the header when relaying messages of this user to the channel. - """ - if user.bot: - await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.") - return - - if isinstance(user, Member) and any(role.id in STAFF_ROLES for role in user.roles): - await ctx.send(":x: Nominating staff members, eh? Here's a cookie :cookie:") - return - - if not await self.fetch_user_cache(): - await ctx.send(f":x: Failed to update the user cache; can't add {user}") - return - - if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") - return - - # Manual request with `raise_for_status` as False because we want the actual response - session = self.bot.api_client.session - url = self.bot.api_client._url_for(self.api_endpoint) - kwargs = { - 'json': { - 'actor': ctx.author.id, - 'reason': reason, - 'user': user.id - }, - 'raise_for_status': False, - } - async with session.post(url, **kwargs) as resp: - response_data = await resp.json() - - if resp.status == 400: - if response_data.get('user', False): - await ctx.send(":x: The specified user can't be found in the database tables") - elif response_data.get('actor', False): - await ctx.send(":x: You have already nominated this user") - - return - else: - resp.raise_for_status() - - self.watched_users[user.id] = response_data - msg = f":white_check_mark: The nomination for {user} has been added to the talent pool" - - history = await self.bot.api_client.get( - self.api_endpoint, - params={ - "user__id": str(user.id), - "active": "false", - "ordering": "-inserted_at" - } - ) - - if history: - msg += f"\n\n({len(history)} previous nominations in total)" - - await ctx.send(msg) - - @nomination_group.command(name='history', aliases=('info', 'search')) - @has_any_role(*MODERATION_ROLES) - async def history_command(self, ctx: Context, user: FetchedMember) -> None: - """Shows the specified user's nomination history.""" - result = await self.bot.api_client.get( - self.api_endpoint, - params={ - 'user__id': str(user.id), - 'ordering': "-active,-inserted_at" - } - ) - if not result: - await ctx.send(":warning: This user has never been nominated") - return - - embed = Embed( - title=f"Nominations for {user.display_name} `({user.id})`", - color=Color.blue() - ) - lines = [self._nomination_to_string(nomination) for nomination in result] - await LinePaginator.paginate( - lines, - ctx=ctx, - embed=embed, - empty=True, - max_lines=3, - max_size=1000 - ) - - @nomination_group.command(name='unwatch', aliases=('end', ), root_aliases=("unnominate",)) - @has_any_role(*MODERATION_ROLES) - async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: - """ - Ends the active nomination of the specified user with the given reason. - - Providing a `reason` is required. - """ - if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") - return - - if await self.unwatch(user.id, reason): - await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed") - else: - await ctx.send(":x: The specified user does not have an active nomination") - - @nomination_group.group(name='edit', aliases=('e',), invoke_without_command=True) - @has_any_role(*MODERATION_ROLES) - async def nomination_edit_group(self, ctx: Context) -> None: - """Commands to edit nominations.""" - await ctx.send_help(ctx.command) - - @nomination_edit_group.command(name='reason') - @has_any_role(*MODERATION_ROLES) - async def edit_reason_command(self, ctx: Context, nomination_id: int, actor: FetchedMember, *, reason: str) -> None: - """Edits the reason of a specific nominator in a specific active nomination.""" - if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") - return - - try: - nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") - except ResponseCodeError as e: - if e.response.status == 404: - self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") - await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") - return - else: - raise - - if not nomination["active"]: - await ctx.send(":x: Can't edit the reason of an inactive nomination.") - return - - if not any(entry["actor"] == actor.id for entry in nomination["entries"]): - await ctx.send(f":x: {actor} doesn't have an entry in this nomination.") - return - - self.log.trace(f"Changing reason for nomination with id {nomination_id} of actor {actor} to {repr(reason)}") - - await self.bot.api_client.patch( - f"{self.api_endpoint}/{nomination_id}", - json={"actor": actor.id, "reason": reason} - ) - await self.fetch_user_cache() # Update cache - await ctx.send(":white_check_mark: Successfully updated nomination reason.") - - @nomination_edit_group.command(name='end_reason') - @has_any_role(*MODERATION_ROLES) - async def edit_end_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None: - """Edits the unnominate reason for the nomination with the given `id`.""" - if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") - return - - try: - nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") - except ResponseCodeError as e: - if e.response.status == 404: - self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") - await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") - return - else: - raise - - if nomination["active"]: - await ctx.send(":x: Can't edit the end reason of an active nomination.") - return - - self.log.trace(f"Changing end reason for nomination with id {nomination_id} to {repr(reason)}") - - await self.bot.api_client.patch( - f"{self.api_endpoint}/{nomination_id}", - json={"end_reason": reason} - ) - await self.fetch_user_cache() # Update cache. - await ctx.send(":white_check_mark: Updated the end reason of the nomination!") - - @Cog.listener() - async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None: - """Remove `user` from the talent pool after they are banned.""" - await self.unwatch(user.id, "User was banned.") - - async def unwatch(self, user_id: int, reason: str) -> bool: - """End the active nomination of a user with the given reason and return True on success.""" - active_nomination = await self.bot.api_client.get( - self.api_endpoint, - params=ChainMap( - {"user__id": str(user_id)}, - self.api_default_params, - ) - ) - - if not active_nomination: - log.debug(f"No active nominate exists for {user_id=}") - return False - - log.info(f"Ending nomination: {user_id=} {reason=}") - - nomination = active_nomination[0] - await self.bot.api_client.patch( - f"{self.api_endpoint}/{nomination['id']}", - json={'end_reason': reason, 'active': False} - ) - self._remove_user(user_id) - - return True - - def _nomination_to_string(self, nomination_object: dict) -> str: - """Creates a string representation of a nomination.""" - guild = self.bot.get_guild(Guild.id) - entries = [] - for site_entry in nomination_object["entries"]: - actor_id = site_entry["actor"] - actor = guild.get_member(actor_id) - - reason = site_entry["reason"] or "*None*" - created = time.format_infraction(site_entry["inserted_at"]) - entries.append( - f"Actor: {actor.mention if actor else actor_id}\nCreated: {created}\nReason: {reason}" - ) - - entries_string = "\n\n".join(entries) - - active = nomination_object["active"] - - start_date = time.format_infraction(nomination_object["inserted_at"]) - if active: - lines = textwrap.dedent( - f""" - =============== - Status: **Active** - Date: {start_date} - Nomination ID: `{nomination_object["id"]}` - - {entries_string} - =============== - """ - ) - else: - end_date = time.format_infraction(nomination_object["ended_at"]) - lines = textwrap.dedent( - f""" - =============== - Status: Inactive - Date: {start_date} - Nomination ID: `{nomination_object["id"]}` - - {entries_string} - - End date: {end_date} - Unwatch reason: {nomination_object["end_reason"]} - =============== - """ - ) - - return lines.strip() - - -def setup(bot: Bot) -> None: - """Load the TalentPool cog.""" - bot.add_cog(TalentPool(bot)) diff --git a/bot/exts/recruitment/talentpool/talentpool.py b/bot/exts/recruitment/talentpool/talentpool.py new file mode 100644 index 000000000..d75688fa6 --- /dev/null +++ b/bot/exts/recruitment/talentpool/talentpool.py @@ -0,0 +1,335 @@ +import logging +import textwrap +from collections import ChainMap +from typing import Union + +from discord import Color, Embed, Member, User +from discord.ext.commands import Cog, Context, group, has_any_role + +from bot.api import ResponseCodeError +from bot.bot import Bot +from bot.constants import Channels, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks +from bot.converters import FetchedMember +from bot.exts.moderation.watchchannels._watchchannel import WatchChannel +from bot.pagination import LinePaginator +from bot.utils import time + +REASON_MAX_CHARS = 1000 + +log = logging.getLogger(__name__) + + +class TalentPool(WatchChannel, Cog, name="Talentpool"): + """Relays messages of helper candidates to a watch channel to observe them.""" + + def __init__(self, bot: Bot) -> None: + super().__init__( + bot, + destination=Channels.talent_pool, + webhook_id=Webhooks.talent_pool, + api_endpoint='bot/nominations', + api_default_params={'active': 'true', 'ordering': '-inserted_at'}, + logger=log, + disable_header=True, + ) + + @group(name='talentpool', aliases=('tp', 'talent', 'nomination', 'n'), invoke_without_command=True) + @has_any_role(*MODERATION_ROLES) + async def nomination_group(self, ctx: Context) -> None: + """Highlights the activity of helper nominees by relaying their messages to the talent pool channel.""" + await ctx.send_help(ctx.command) + + @nomination_group.command(name='watched', aliases=('all', 'list'), root_aliases=("nominees",)) + @has_any_role(*MODERATION_ROLES) + async def watched_command( + self, ctx: Context, oldest_first: bool = False, update_cache: bool = True + ) -> None: + """ + Shows the users that are currently being monitored in the talent pool. + + The optional kwarg `oldest_first` can be used to order the list by oldest nomination. + + The optional kwarg `update_cache` can be used to update the user + cache using the API before listing the users. + """ + await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) + + @nomination_group.command(name='oldest') + @has_any_role(*MODERATION_ROLES) + async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None: + """ + Shows talent pool monitored users ordered by oldest nomination. + + The optional kwarg `update_cache` can be used to update the user + cache using the API before listing the users. + """ + await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache) + + @nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",)) + @has_any_role(*STAFF_ROLES) + async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None: + """ + Relay messages sent by the given `user` to the `#talent-pool` channel. + + A `reason` for adding the user to the talent pool is optional. + If given, it will be displayed in the header when relaying messages of this user to the channel. + """ + if user.bot: + await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.") + return + + if isinstance(user, Member) and any(role.id in STAFF_ROLES for role in user.roles): + await ctx.send(":x: Nominating staff members, eh? Here's a cookie :cookie:") + return + + if not await self.fetch_user_cache(): + await ctx.send(f":x: Failed to update the user cache; can't add {user}") + return + + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") + return + + # Manual request with `raise_for_status` as False because we want the actual response + session = self.bot.api_client.session + url = self.bot.api_client._url_for(self.api_endpoint) + kwargs = { + 'json': { + 'actor': ctx.author.id, + 'reason': reason, + 'user': user.id + }, + 'raise_for_status': False, + } + async with session.post(url, **kwargs) as resp: + response_data = await resp.json() + + if resp.status == 400: + if response_data.get('user', False): + await ctx.send(":x: The specified user can't be found in the database tables") + elif response_data.get('actor', False): + await ctx.send(":x: You have already nominated this user") + + return + else: + resp.raise_for_status() + + self.watched_users[user.id] = response_data + msg = f":white_check_mark: The nomination for {user} has been added to the talent pool" + + history = await self.bot.api_client.get( + self.api_endpoint, + params={ + "user__id": str(user.id), + "active": "false", + "ordering": "-inserted_at" + } + ) + + if history: + msg += f"\n\n({len(history)} previous nominations in total)" + + await ctx.send(msg) + + @nomination_group.command(name='history', aliases=('info', 'search')) + @has_any_role(*MODERATION_ROLES) + async def history_command(self, ctx: Context, user: FetchedMember) -> None: + """Shows the specified user's nomination history.""" + result = await self.bot.api_client.get( + self.api_endpoint, + params={ + 'user__id': str(user.id), + 'ordering': "-active,-inserted_at" + } + ) + if not result: + await ctx.send(":warning: This user has never been nominated") + return + + embed = Embed( + title=f"Nominations for {user.display_name} `({user.id})`", + color=Color.blue() + ) + lines = [self._nomination_to_string(nomination) for nomination in result] + await LinePaginator.paginate( + lines, + ctx=ctx, + embed=embed, + empty=True, + max_lines=3, + max_size=1000 + ) + + @nomination_group.command(name='unwatch', aliases=('end', ), root_aliases=("unnominate",)) + @has_any_role(*MODERATION_ROLES) + async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: + """ + Ends the active nomination of the specified user with the given reason. + + Providing a `reason` is required. + """ + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") + return + + if await self.unwatch(user.id, reason): + await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed") + else: + await ctx.send(":x: The specified user does not have an active nomination") + + @nomination_group.group(name='edit', aliases=('e',), invoke_without_command=True) + @has_any_role(*MODERATION_ROLES) + async def nomination_edit_group(self, ctx: Context) -> None: + """Commands to edit nominations.""" + await ctx.send_help(ctx.command) + + @nomination_edit_group.command(name='reason') + @has_any_role(*MODERATION_ROLES) + async def edit_reason_command(self, ctx: Context, nomination_id: int, actor: FetchedMember, *, reason: str) -> None: + """Edits the reason of a specific nominator in a specific active nomination.""" + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") + return + + try: + nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") + except ResponseCodeError as e: + if e.response.status == 404: + self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") + await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") + return + else: + raise + + if not nomination["active"]: + await ctx.send(":x: Can't edit the reason of an inactive nomination.") + return + + if not any(entry["actor"] == actor.id for entry in nomination["entries"]): + await ctx.send(f":x: {actor} doesn't have an entry in this nomination.") + return + + self.log.trace(f"Changing reason for nomination with id {nomination_id} of actor {actor} to {repr(reason)}") + + await self.bot.api_client.patch( + f"{self.api_endpoint}/{nomination_id}", + json={"actor": actor.id, "reason": reason} + ) + await self.fetch_user_cache() # Update cache + await ctx.send(":white_check_mark: Successfully updated nomination reason.") + + @nomination_edit_group.command(name='end_reason') + @has_any_role(*MODERATION_ROLES) + async def edit_end_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None: + """Edits the unnominate reason for the nomination with the given `id`.""" + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") + return + + try: + nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") + except ResponseCodeError as e: + if e.response.status == 404: + self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") + await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") + return + else: + raise + + if nomination["active"]: + await ctx.send(":x: Can't edit the end reason of an active nomination.") + return + + self.log.trace(f"Changing end reason for nomination with id {nomination_id} to {repr(reason)}") + + await self.bot.api_client.patch( + f"{self.api_endpoint}/{nomination_id}", + json={"end_reason": reason} + ) + await self.fetch_user_cache() # Update cache. + await ctx.send(":white_check_mark: Updated the end reason of the nomination!") + + @Cog.listener() + async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None: + """Remove `user` from the talent pool after they are banned.""" + await self.unwatch(user.id, "User was banned.") + + async def unwatch(self, user_id: int, reason: str) -> bool: + """End the active nomination of a user with the given reason and return True on success.""" + active_nomination = await self.bot.api_client.get( + self.api_endpoint, + params=ChainMap( + {"user__id": str(user_id)}, + self.api_default_params, + ) + ) + + if not active_nomination: + log.debug(f"No active nominate exists for {user_id=}") + return False + + log.info(f"Ending nomination: {user_id=} {reason=}") + + nomination = active_nomination[0] + await self.bot.api_client.patch( + f"{self.api_endpoint}/{nomination['id']}", + json={'end_reason': reason, 'active': False} + ) + self._remove_user(user_id) + + return True + + def _nomination_to_string(self, nomination_object: dict) -> str: + """Creates a string representation of a nomination.""" + guild = self.bot.get_guild(Guild.id) + entries = [] + for site_entry in nomination_object["entries"]: + actor_id = site_entry["actor"] + actor = guild.get_member(actor_id) + + reason = site_entry["reason"] or "*None*" + created = time.format_infraction(site_entry["inserted_at"]) + entries.append( + f"Actor: {actor.mention if actor else actor_id}\nCreated: {created}\nReason: {reason}" + ) + + entries_string = "\n\n".join(entries) + + active = nomination_object["active"] + + start_date = time.format_infraction(nomination_object["inserted_at"]) + if active: + lines = textwrap.dedent( + f""" + =============== + Status: **Active** + Date: {start_date} + Nomination ID: `{nomination_object["id"]}` + + {entries_string} + =============== + """ + ) + else: + end_date = time.format_infraction(nomination_object["ended_at"]) + lines = textwrap.dedent( + f""" + =============== + Status: Inactive + Date: {start_date} + Nomination ID: `{nomination_object["id"]}` + + {entries_string} + + End date: {end_date} + Unwatch reason: {nomination_object["end_reason"]} + =============== + """ + ) + + return lines.strip() + + +def setup(bot: Bot) -> None: + """Load the TalentPool cog.""" + bot.add_cog(TalentPool(bot)) -- cgit v1.2.3 From 65f93df5388e4c90ddbc985305d14d5120b24863 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Wed, 10 Mar 2021 20:13:18 +0200 Subject: Rename talentpool.py to _cog.py This change is done in preparation to having the cog split across multiple files. --- bot/exts/recruitment/talentpool/_cog.py | 335 ++++++++++++++++++++++++++ bot/exts/recruitment/talentpool/talentpool.py | 335 -------------------------- 2 files changed, 335 insertions(+), 335 deletions(-) create mode 100644 bot/exts/recruitment/talentpool/_cog.py delete mode 100644 bot/exts/recruitment/talentpool/talentpool.py diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py new file mode 100644 index 000000000..d75688fa6 --- /dev/null +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -0,0 +1,335 @@ +import logging +import textwrap +from collections import ChainMap +from typing import Union + +from discord import Color, Embed, Member, User +from discord.ext.commands import Cog, Context, group, has_any_role + +from bot.api import ResponseCodeError +from bot.bot import Bot +from bot.constants import Channels, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks +from bot.converters import FetchedMember +from bot.exts.moderation.watchchannels._watchchannel import WatchChannel +from bot.pagination import LinePaginator +from bot.utils import time + +REASON_MAX_CHARS = 1000 + +log = logging.getLogger(__name__) + + +class TalentPool(WatchChannel, Cog, name="Talentpool"): + """Relays messages of helper candidates to a watch channel to observe them.""" + + def __init__(self, bot: Bot) -> None: + super().__init__( + bot, + destination=Channels.talent_pool, + webhook_id=Webhooks.talent_pool, + api_endpoint='bot/nominations', + api_default_params={'active': 'true', 'ordering': '-inserted_at'}, + logger=log, + disable_header=True, + ) + + @group(name='talentpool', aliases=('tp', 'talent', 'nomination', 'n'), invoke_without_command=True) + @has_any_role(*MODERATION_ROLES) + async def nomination_group(self, ctx: Context) -> None: + """Highlights the activity of helper nominees by relaying their messages to the talent pool channel.""" + await ctx.send_help(ctx.command) + + @nomination_group.command(name='watched', aliases=('all', 'list'), root_aliases=("nominees",)) + @has_any_role(*MODERATION_ROLES) + async def watched_command( + self, ctx: Context, oldest_first: bool = False, update_cache: bool = True + ) -> None: + """ + Shows the users that are currently being monitored in the talent pool. + + The optional kwarg `oldest_first` can be used to order the list by oldest nomination. + + The optional kwarg `update_cache` can be used to update the user + cache using the API before listing the users. + """ + await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) + + @nomination_group.command(name='oldest') + @has_any_role(*MODERATION_ROLES) + async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None: + """ + Shows talent pool monitored users ordered by oldest nomination. + + The optional kwarg `update_cache` can be used to update the user + cache using the API before listing the users. + """ + await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache) + + @nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",)) + @has_any_role(*STAFF_ROLES) + async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None: + """ + Relay messages sent by the given `user` to the `#talent-pool` channel. + + A `reason` for adding the user to the talent pool is optional. + If given, it will be displayed in the header when relaying messages of this user to the channel. + """ + if user.bot: + await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.") + return + + if isinstance(user, Member) and any(role.id in STAFF_ROLES for role in user.roles): + await ctx.send(":x: Nominating staff members, eh? Here's a cookie :cookie:") + return + + if not await self.fetch_user_cache(): + await ctx.send(f":x: Failed to update the user cache; can't add {user}") + return + + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") + return + + # Manual request with `raise_for_status` as False because we want the actual response + session = self.bot.api_client.session + url = self.bot.api_client._url_for(self.api_endpoint) + kwargs = { + 'json': { + 'actor': ctx.author.id, + 'reason': reason, + 'user': user.id + }, + 'raise_for_status': False, + } + async with session.post(url, **kwargs) as resp: + response_data = await resp.json() + + if resp.status == 400: + if response_data.get('user', False): + await ctx.send(":x: The specified user can't be found in the database tables") + elif response_data.get('actor', False): + await ctx.send(":x: You have already nominated this user") + + return + else: + resp.raise_for_status() + + self.watched_users[user.id] = response_data + msg = f":white_check_mark: The nomination for {user} has been added to the talent pool" + + history = await self.bot.api_client.get( + self.api_endpoint, + params={ + "user__id": str(user.id), + "active": "false", + "ordering": "-inserted_at" + } + ) + + if history: + msg += f"\n\n({len(history)} previous nominations in total)" + + await ctx.send(msg) + + @nomination_group.command(name='history', aliases=('info', 'search')) + @has_any_role(*MODERATION_ROLES) + async def history_command(self, ctx: Context, user: FetchedMember) -> None: + """Shows the specified user's nomination history.""" + result = await self.bot.api_client.get( + self.api_endpoint, + params={ + 'user__id': str(user.id), + 'ordering': "-active,-inserted_at" + } + ) + if not result: + await ctx.send(":warning: This user has never been nominated") + return + + embed = Embed( + title=f"Nominations for {user.display_name} `({user.id})`", + color=Color.blue() + ) + lines = [self._nomination_to_string(nomination) for nomination in result] + await LinePaginator.paginate( + lines, + ctx=ctx, + embed=embed, + empty=True, + max_lines=3, + max_size=1000 + ) + + @nomination_group.command(name='unwatch', aliases=('end', ), root_aliases=("unnominate",)) + @has_any_role(*MODERATION_ROLES) + async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: + """ + Ends the active nomination of the specified user with the given reason. + + Providing a `reason` is required. + """ + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") + return + + if await self.unwatch(user.id, reason): + await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed") + else: + await ctx.send(":x: The specified user does not have an active nomination") + + @nomination_group.group(name='edit', aliases=('e',), invoke_without_command=True) + @has_any_role(*MODERATION_ROLES) + async def nomination_edit_group(self, ctx: Context) -> None: + """Commands to edit nominations.""" + await ctx.send_help(ctx.command) + + @nomination_edit_group.command(name='reason') + @has_any_role(*MODERATION_ROLES) + async def edit_reason_command(self, ctx: Context, nomination_id: int, actor: FetchedMember, *, reason: str) -> None: + """Edits the reason of a specific nominator in a specific active nomination.""" + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") + return + + try: + nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") + except ResponseCodeError as e: + if e.response.status == 404: + self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") + await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") + return + else: + raise + + if not nomination["active"]: + await ctx.send(":x: Can't edit the reason of an inactive nomination.") + return + + if not any(entry["actor"] == actor.id for entry in nomination["entries"]): + await ctx.send(f":x: {actor} doesn't have an entry in this nomination.") + return + + self.log.trace(f"Changing reason for nomination with id {nomination_id} of actor {actor} to {repr(reason)}") + + await self.bot.api_client.patch( + f"{self.api_endpoint}/{nomination_id}", + json={"actor": actor.id, "reason": reason} + ) + await self.fetch_user_cache() # Update cache + await ctx.send(":white_check_mark: Successfully updated nomination reason.") + + @nomination_edit_group.command(name='end_reason') + @has_any_role(*MODERATION_ROLES) + async def edit_end_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None: + """Edits the unnominate reason for the nomination with the given `id`.""" + if len(reason) > REASON_MAX_CHARS: + await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") + return + + try: + nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") + except ResponseCodeError as e: + if e.response.status == 404: + self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") + await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") + return + else: + raise + + if nomination["active"]: + await ctx.send(":x: Can't edit the end reason of an active nomination.") + return + + self.log.trace(f"Changing end reason for nomination with id {nomination_id} to {repr(reason)}") + + await self.bot.api_client.patch( + f"{self.api_endpoint}/{nomination_id}", + json={"end_reason": reason} + ) + await self.fetch_user_cache() # Update cache. + await ctx.send(":white_check_mark: Updated the end reason of the nomination!") + + @Cog.listener() + async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None: + """Remove `user` from the talent pool after they are banned.""" + await self.unwatch(user.id, "User was banned.") + + async def unwatch(self, user_id: int, reason: str) -> bool: + """End the active nomination of a user with the given reason and return True on success.""" + active_nomination = await self.bot.api_client.get( + self.api_endpoint, + params=ChainMap( + {"user__id": str(user_id)}, + self.api_default_params, + ) + ) + + if not active_nomination: + log.debug(f"No active nominate exists for {user_id=}") + return False + + log.info(f"Ending nomination: {user_id=} {reason=}") + + nomination = active_nomination[0] + await self.bot.api_client.patch( + f"{self.api_endpoint}/{nomination['id']}", + json={'end_reason': reason, 'active': False} + ) + self._remove_user(user_id) + + return True + + def _nomination_to_string(self, nomination_object: dict) -> str: + """Creates a string representation of a nomination.""" + guild = self.bot.get_guild(Guild.id) + entries = [] + for site_entry in nomination_object["entries"]: + actor_id = site_entry["actor"] + actor = guild.get_member(actor_id) + + reason = site_entry["reason"] or "*None*" + created = time.format_infraction(site_entry["inserted_at"]) + entries.append( + f"Actor: {actor.mention if actor else actor_id}\nCreated: {created}\nReason: {reason}" + ) + + entries_string = "\n\n".join(entries) + + active = nomination_object["active"] + + start_date = time.format_infraction(nomination_object["inserted_at"]) + if active: + lines = textwrap.dedent( + f""" + =============== + Status: **Active** + Date: {start_date} + Nomination ID: `{nomination_object["id"]}` + + {entries_string} + =============== + """ + ) + else: + end_date = time.format_infraction(nomination_object["ended_at"]) + lines = textwrap.dedent( + f""" + =============== + Status: Inactive + Date: {start_date} + Nomination ID: `{nomination_object["id"]}` + + {entries_string} + + End date: {end_date} + Unwatch reason: {nomination_object["end_reason"]} + =============== + """ + ) + + return lines.strip() + + +def setup(bot: Bot) -> None: + """Load the TalentPool cog.""" + bot.add_cog(TalentPool(bot)) diff --git a/bot/exts/recruitment/talentpool/talentpool.py b/bot/exts/recruitment/talentpool/talentpool.py deleted file mode 100644 index d75688fa6..000000000 --- a/bot/exts/recruitment/talentpool/talentpool.py +++ /dev/null @@ -1,335 +0,0 @@ -import logging -import textwrap -from collections import ChainMap -from typing import Union - -from discord import Color, Embed, Member, User -from discord.ext.commands import Cog, Context, group, has_any_role - -from bot.api import ResponseCodeError -from bot.bot import Bot -from bot.constants import Channels, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks -from bot.converters import FetchedMember -from bot.exts.moderation.watchchannels._watchchannel import WatchChannel -from bot.pagination import LinePaginator -from bot.utils import time - -REASON_MAX_CHARS = 1000 - -log = logging.getLogger(__name__) - - -class TalentPool(WatchChannel, Cog, name="Talentpool"): - """Relays messages of helper candidates to a watch channel to observe them.""" - - def __init__(self, bot: Bot) -> None: - super().__init__( - bot, - destination=Channels.talent_pool, - webhook_id=Webhooks.talent_pool, - api_endpoint='bot/nominations', - api_default_params={'active': 'true', 'ordering': '-inserted_at'}, - logger=log, - disable_header=True, - ) - - @group(name='talentpool', aliases=('tp', 'talent', 'nomination', 'n'), invoke_without_command=True) - @has_any_role(*MODERATION_ROLES) - async def nomination_group(self, ctx: Context) -> None: - """Highlights the activity of helper nominees by relaying their messages to the talent pool channel.""" - await ctx.send_help(ctx.command) - - @nomination_group.command(name='watched', aliases=('all', 'list'), root_aliases=("nominees",)) - @has_any_role(*MODERATION_ROLES) - async def watched_command( - self, ctx: Context, oldest_first: bool = False, update_cache: bool = True - ) -> None: - """ - Shows the users that are currently being monitored in the talent pool. - - The optional kwarg `oldest_first` can be used to order the list by oldest nomination. - - The optional kwarg `update_cache` can be used to update the user - cache using the API before listing the users. - """ - await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) - - @nomination_group.command(name='oldest') - @has_any_role(*MODERATION_ROLES) - async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None: - """ - Shows talent pool monitored users ordered by oldest nomination. - - The optional kwarg `update_cache` can be used to update the user - cache using the API before listing the users. - """ - await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache) - - @nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",)) - @has_any_role(*STAFF_ROLES) - async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None: - """ - Relay messages sent by the given `user` to the `#talent-pool` channel. - - A `reason` for adding the user to the talent pool is optional. - If given, it will be displayed in the header when relaying messages of this user to the channel. - """ - if user.bot: - await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.") - return - - if isinstance(user, Member) and any(role.id in STAFF_ROLES for role in user.roles): - await ctx.send(":x: Nominating staff members, eh? Here's a cookie :cookie:") - return - - if not await self.fetch_user_cache(): - await ctx.send(f":x: Failed to update the user cache; can't add {user}") - return - - if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") - return - - # Manual request with `raise_for_status` as False because we want the actual response - session = self.bot.api_client.session - url = self.bot.api_client._url_for(self.api_endpoint) - kwargs = { - 'json': { - 'actor': ctx.author.id, - 'reason': reason, - 'user': user.id - }, - 'raise_for_status': False, - } - async with session.post(url, **kwargs) as resp: - response_data = await resp.json() - - if resp.status == 400: - if response_data.get('user', False): - await ctx.send(":x: The specified user can't be found in the database tables") - elif response_data.get('actor', False): - await ctx.send(":x: You have already nominated this user") - - return - else: - resp.raise_for_status() - - self.watched_users[user.id] = response_data - msg = f":white_check_mark: The nomination for {user} has been added to the talent pool" - - history = await self.bot.api_client.get( - self.api_endpoint, - params={ - "user__id": str(user.id), - "active": "false", - "ordering": "-inserted_at" - } - ) - - if history: - msg += f"\n\n({len(history)} previous nominations in total)" - - await ctx.send(msg) - - @nomination_group.command(name='history', aliases=('info', 'search')) - @has_any_role(*MODERATION_ROLES) - async def history_command(self, ctx: Context, user: FetchedMember) -> None: - """Shows the specified user's nomination history.""" - result = await self.bot.api_client.get( - self.api_endpoint, - params={ - 'user__id': str(user.id), - 'ordering': "-active,-inserted_at" - } - ) - if not result: - await ctx.send(":warning: This user has never been nominated") - return - - embed = Embed( - title=f"Nominations for {user.display_name} `({user.id})`", - color=Color.blue() - ) - lines = [self._nomination_to_string(nomination) for nomination in result] - await LinePaginator.paginate( - lines, - ctx=ctx, - embed=embed, - empty=True, - max_lines=3, - max_size=1000 - ) - - @nomination_group.command(name='unwatch', aliases=('end', ), root_aliases=("unnominate",)) - @has_any_role(*MODERATION_ROLES) - async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: - """ - Ends the active nomination of the specified user with the given reason. - - Providing a `reason` is required. - """ - if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") - return - - if await self.unwatch(user.id, reason): - await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed") - else: - await ctx.send(":x: The specified user does not have an active nomination") - - @nomination_group.group(name='edit', aliases=('e',), invoke_without_command=True) - @has_any_role(*MODERATION_ROLES) - async def nomination_edit_group(self, ctx: Context) -> None: - """Commands to edit nominations.""" - await ctx.send_help(ctx.command) - - @nomination_edit_group.command(name='reason') - @has_any_role(*MODERATION_ROLES) - async def edit_reason_command(self, ctx: Context, nomination_id: int, actor: FetchedMember, *, reason: str) -> None: - """Edits the reason of a specific nominator in a specific active nomination.""" - if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") - return - - try: - nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") - except ResponseCodeError as e: - if e.response.status == 404: - self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") - await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") - return - else: - raise - - if not nomination["active"]: - await ctx.send(":x: Can't edit the reason of an inactive nomination.") - return - - if not any(entry["actor"] == actor.id for entry in nomination["entries"]): - await ctx.send(f":x: {actor} doesn't have an entry in this nomination.") - return - - self.log.trace(f"Changing reason for nomination with id {nomination_id} of actor {actor} to {repr(reason)}") - - await self.bot.api_client.patch( - f"{self.api_endpoint}/{nomination_id}", - json={"actor": actor.id, "reason": reason} - ) - await self.fetch_user_cache() # Update cache - await ctx.send(":white_check_mark: Successfully updated nomination reason.") - - @nomination_edit_group.command(name='end_reason') - @has_any_role(*MODERATION_ROLES) - async def edit_end_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None: - """Edits the unnominate reason for the nomination with the given `id`.""" - if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") - return - - try: - nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") - except ResponseCodeError as e: - if e.response.status == 404: - self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") - await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") - return - else: - raise - - if nomination["active"]: - await ctx.send(":x: Can't edit the end reason of an active nomination.") - return - - self.log.trace(f"Changing end reason for nomination with id {nomination_id} to {repr(reason)}") - - await self.bot.api_client.patch( - f"{self.api_endpoint}/{nomination_id}", - json={"end_reason": reason} - ) - await self.fetch_user_cache() # Update cache. - await ctx.send(":white_check_mark: Updated the end reason of the nomination!") - - @Cog.listener() - async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None: - """Remove `user` from the talent pool after they are banned.""" - await self.unwatch(user.id, "User was banned.") - - async def unwatch(self, user_id: int, reason: str) -> bool: - """End the active nomination of a user with the given reason and return True on success.""" - active_nomination = await self.bot.api_client.get( - self.api_endpoint, - params=ChainMap( - {"user__id": str(user_id)}, - self.api_default_params, - ) - ) - - if not active_nomination: - log.debug(f"No active nominate exists for {user_id=}") - return False - - log.info(f"Ending nomination: {user_id=} {reason=}") - - nomination = active_nomination[0] - await self.bot.api_client.patch( - f"{self.api_endpoint}/{nomination['id']}", - json={'end_reason': reason, 'active': False} - ) - self._remove_user(user_id) - - return True - - def _nomination_to_string(self, nomination_object: dict) -> str: - """Creates a string representation of a nomination.""" - guild = self.bot.get_guild(Guild.id) - entries = [] - for site_entry in nomination_object["entries"]: - actor_id = site_entry["actor"] - actor = guild.get_member(actor_id) - - reason = site_entry["reason"] or "*None*" - created = time.format_infraction(site_entry["inserted_at"]) - entries.append( - f"Actor: {actor.mention if actor else actor_id}\nCreated: {created}\nReason: {reason}" - ) - - entries_string = "\n\n".join(entries) - - active = nomination_object["active"] - - start_date = time.format_infraction(nomination_object["inserted_at"]) - if active: - lines = textwrap.dedent( - f""" - =============== - Status: **Active** - Date: {start_date} - Nomination ID: `{nomination_object["id"]}` - - {entries_string} - =============== - """ - ) - else: - end_date = time.format_infraction(nomination_object["ended_at"]) - lines = textwrap.dedent( - f""" - =============== - Status: Inactive - Date: {start_date} - Nomination ID: `{nomination_object["id"]}` - - {entries_string} - - End date: {end_date} - Unwatch reason: {nomination_object["end_reason"]} - =============== - """ - ) - - return lines.strip() - - -def setup(bot: Bot) -> None: - """Load the TalentPool cog.""" - bot.add_cog(TalentPool(bot)) -- cgit v1.2.3 From f7f38d30cd7c26f9941b77c155ed5876fc2c410a Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Wed, 10 Mar 2021 20:20:17 +0200 Subject: Make talentpool a package and move cog load to __init__.py --- bot/exts/recruitment/talentpool/__init__.py | 8 ++++++++ bot/exts/recruitment/talentpool/_cog.py | 5 ----- 2 files changed, 8 insertions(+), 5 deletions(-) create mode 100644 bot/exts/recruitment/talentpool/__init__.py diff --git a/bot/exts/recruitment/talentpool/__init__.py b/bot/exts/recruitment/talentpool/__init__.py new file mode 100644 index 000000000..52d27eb99 --- /dev/null +++ b/bot/exts/recruitment/talentpool/__init__.py @@ -0,0 +1,8 @@ +from bot.bot import Bot + + +def setup(bot: Bot) -> None: + """Load the TalentPool cog.""" + from bot.exts.recruitment.talentpool._cog import TalentPool + + bot.add_cog(TalentPool(bot)) diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index d75688fa6..67513f386 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -328,8 +328,3 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): ) return lines.strip() - - -def setup(bot: Bot) -> None: - """Load the TalentPool cog.""" - bot.add_cog(TalentPool(bot)) -- cgit v1.2.3 From f6b608a977406810d95e4a1dfccbb915bf62268e Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Wed, 10 Mar 2021 20:36:06 +0200 Subject: Add __init__.py to recruitment Make it a package as well so that the talentpool actually loads. --- bot/exts/recruitment/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 bot/exts/recruitment/__init__.py diff --git a/bot/exts/recruitment/__init__.py b/bot/exts/recruitment/__init__.py new file mode 100644 index 000000000..e69de29bb -- cgit v1.2.3 From 4f08f041d03a130012d83c50999a18a39e75dbdc Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Thu, 11 Mar 2021 01:37:40 +0200 Subject: Added an auto-reviewer to the talentpool cog This commit adds the functionality to automatically review a nominee a set number of days after being nominated. This is implemented by subclassing the Scheduler and formatting a review after 30 days. The review contains details of the nominee, their nominations, the number of messages they have and the channels they're most active in, and statistics about their infractions and previous nominations. Lastly, the bot will add three emojis to the review: eyes to mark as seen, a thumbsup, and thumbsdown for the vote itself. The code accounts for the possibility of the review being too long for a single message but splitting it where necessary. --- bot/exts/moderation/watchchannels/_watchchannel.py | 78 ++++-- bot/exts/recruitment/talentpool/_cog.py | 71 +++++- bot/exts/recruitment/talentpool/_review.py | 273 +++++++++++++++++++++ bot/utils/time.py | 8 + 4 files changed, 404 insertions(+), 26 deletions(-) create mode 100644 bot/exts/recruitment/talentpool/_review.py diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py index 0793a66af..b121243ce 100644 --- a/bot/exts/moderation/watchchannels/_watchchannel.py +++ b/bot/exts/moderation/watchchannels/_watchchannel.py @@ -5,9 +5,8 @@ import textwrap from abc import abstractmethod from collections import defaultdict, deque from dataclasses import dataclass -from typing import Optional +from typing import Any, Dict, Optional -import dateutil.parser import discord from discord import Color, DMChannel, Embed, HTTPException, Message, errors from discord.ext.commands import Cog, Context @@ -20,7 +19,7 @@ from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE from bot.exts.moderation.modlog import ModLog from bot.pagination import LinePaginator from bot.utils import CogABCMeta, messages -from bot.utils.time import time_since +from bot.utils.time import get_time_delta log = logging.getLogger(__name__) @@ -136,7 +135,10 @@ class WatchChannel(metaclass=CogABCMeta): if not await self.fetch_user_cache(): await self.modlog.send_log_message( title=f"Warning: Failed to retrieve user cache for the {self.__class__.__name__} watch channel", - text="Could not retrieve the list of watched users from the API and messages will not be relayed.", + text=( + "Could not retrieve the list of watched users from the API. " + "Messages will not be relayed, and reviews not rescheduled." + ), ping_everyone=True, icon_url=Icons.token_removed, colour=Color.red() @@ -280,7 +282,7 @@ class WatchChannel(metaclass=CogABCMeta): actor = actor.display_name if actor else self.watched_users[user_id]['actor'] inserted_at = self.watched_users[user_id]['inserted_at'] - time_delta = self._get_time_delta(inserted_at) + time_delta = get_time_delta(inserted_at) reason = self.watched_users[user_id]['reason'] @@ -308,35 +310,61 @@ class WatchChannel(metaclass=CogABCMeta): The optional kwarg `update_cache` specifies whether the cache should be refreshed by polling the API. """ - if update_cache: - if not await self.fetch_user_cache(): - await ctx.send(f":x: Failed to update {self.__class__.__name__} user cache, serving from cache") - update_cache = False + watched_data = await self.prepare_watched_users_data(ctx, oldest_first, update_cache) - lines = [] - for user_id, user_data in self.watched_users.items(): - inserted_at = user_data['inserted_at'] - time_delta = self._get_time_delta(inserted_at) - lines.append(f"• <@{user_id}> (added {time_delta})") - - if oldest_first: - lines.reverse() + if update_cache and not watched_data["updated"]: + await ctx.send(f":x: Failed to update {self.__class__.__name__} user cache, serving from cache") - lines = lines or ("There's nothing here yet.",) + lines = watched_data["info"].values() or ("There's nothing here yet.",) embed = Embed( - title=f"{self.__class__.__name__} watched users ({'updated' if update_cache else 'cached'})", + title=watched_data["title"], color=Color.blue() ) await LinePaginator.paginate(lines, ctx, embed, empty=False) - @staticmethod - def _get_time_delta(time_string: str) -> str: - """Returns the time in human-readable time delta format.""" - date_time = dateutil.parser.isoparse(time_string).replace(tzinfo=None) - time_delta = time_since(date_time, precision="minutes", max_units=1) + async def prepare_watched_users_data( + self, ctx: Context, oldest_first: bool = False, update_cache: bool = True + ) -> Dict[str, Any]: + """ + Prepare overview information of watched users to list. + + The optional kwarg `oldest_first` orders the list by oldest entry. + + The optional kwarg `update_cache` specifies whether the cache should + be refreshed by polling the API. + + Returns a dictionary with a "title" key for the list's title, and a "info" key with + information about each user. + + The dictionary additionally has an "updated" field which is true if a cache update was + requested and it succeeded. + """ + list_data = {} + if update_cache: + if not await self.fetch_user_cache(): + update_cache = False + list_data["updated"] = update_cache + + watched_iter = self.watched_users.items() + if oldest_first: + watched_iter = reversed(watched_iter) + + list_data["info"] = {} + for user_id, user_data in watched_iter: + member = ctx.guild.get_member(user_id) + line = f"• <@{user_id}>" + if member: + line += f" ({member.name}#{member.discriminator})" + inserted_at = user_data['inserted_at'] + line += f", added {get_time_delta(inserted_at)}" + if not member: # Cross off users who left the server. + line = f"~~{line}~~" + list_data["info"][user_id] = line + + list_data["title"] = f"{self.__class__.__name__} watched users ({'updated' if update_cache else 'cached'})" - return time_delta + return list_data def _remove_user(self, user_id: int) -> None: """Removes a user from a watch channel.""" diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index 67513f386..60f5cdf8c 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -1,3 +1,4 @@ + import logging import textwrap from collections import ChainMap @@ -11,6 +12,7 @@ from bot.bot import Bot from bot.constants import Channels, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks from bot.converters import FetchedMember from bot.exts.moderation.watchchannels._watchchannel import WatchChannel +from bot.exts.recruitment.talentpool._review import Reviewer from bot.pagination import LinePaginator from bot.utils import time @@ -33,6 +35,9 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): disable_header=True, ) + self.reviewer = Reviewer(self.__class__.__name__, bot, self) + self.bot.loop.create_task(self.reviewer.reschedule_reviews()) + @group(name='talentpool', aliases=('tp', 'talent', 'nomination', 'n'), invoke_without_command=True) @has_any_role(*MODERATION_ROLES) async def nomination_group(self, ctx: Context) -> None: @@ -54,6 +59,44 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): """ await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) + async def list_watched_users( + self, ctx: Context, oldest_first: bool = False, update_cache: bool = True + ) -> None: + """ + Gives an overview of the nominated users list. + + It specifies the users' mention, name, how long ago they were nominated, and whether their + review was scheduled or already posted. + + The optional kwarg `oldest_first` orders the list by oldest entry. + + The optional kwarg `update_cache` specifies whether the cache should + be refreshed by polling the API. + """ + # TODO Once the watch channel is removed, this can be done in a smarter way, without splitting and overriding + # the list_watched_users function. + watched_data = await self.prepare_watched_users_data(ctx, oldest_first, update_cache) + + if update_cache and not watched_data["updated"]: + await ctx.send(f":x: Failed to update {self.__class__.__name__} user cache, serving from cache") + + lines = [] + for user_id, line in watched_data["info"].items(): + if self.watched_users[user_id]['reviewed']: + line += " *(reviewed)*" + elif user_id in self.reviewer: + line += " *(scheduled)*" + lines.append(line) + + if not lines: + lines = ("There's nothing here yet.",) + + embed = Embed( + title=watched_data["title"], + color=Color.blue() + ) + await LinePaginator.paginate(lines, ctx, embed, empty=False) + @nomination_group.command(name='oldest') @has_any_role(*MODERATION_ROLES) async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None: @@ -115,7 +158,9 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): resp.raise_for_status() self.watched_users[user.id] = response_data - msg = f":white_check_mark: The nomination for {user} has been added to the talent pool" + + if user.id not in self.reviewer: + self.reviewer.schedule_review(user.id) history = await self.bot.api_client.get( self.api_endpoint, @@ -126,6 +171,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): } ) + msg = f"✅ The nomination for {user} has been added to the talent pool" if history: msg += f"\n\n({len(history)} previous nominations in total)" @@ -249,6 +295,22 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): await self.fetch_user_cache() # Update cache. await ctx.send(":white_check_mark: Updated the end reason of the nomination!") + @nomination_group.command(aliases=('mr',)) + async def mark_reviewed(self, ctx: Context, nomination_id: int) -> None: + """Mark a nomination as reviewed and cancel the review task.""" + if not await self.reviewer.mark_reviewed(ctx, nomination_id): + return + await ctx.channel.send(f"✅ The nomination with ID `{nomination_id}` was marked as reviewed.") + + @nomination_group.command(aliases=('review',)) + async def post_review(self, ctx: Context, nomination_id: int) -> None: + """Post the automatic review for the user ahead of time.""" + if not (user_id := await self.reviewer.mark_reviewed(ctx, nomination_id)): + return + + await self.reviewer.post_review(user_id, update_database=False) + await ctx.message.add_reaction("✅") + @Cog.listener() async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None: """Remove `user` from the talent pool after they are banned.""" @@ -277,6 +339,8 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): ) self._remove_user(user_id) + self.reviewer.cancel(user_id) + return True def _nomination_to_string(self, nomination_object: dict) -> str: @@ -328,3 +392,8 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): ) return lines.strip() + + def cog_unload(self) -> None: + """Cancels all review tasks on cog unload.""" + super().cog_unload() + self.reviewer.cancel_all() diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py new file mode 100644 index 000000000..64a1c6226 --- /dev/null +++ b/bot/exts/recruitment/talentpool/_review.py @@ -0,0 +1,273 @@ +import asyncio +import logging +import textwrap +import typing +from collections import Counter +from datetime import datetime, timedelta +from typing import List, Optional + +from dateutil.parser import isoparse +from dateutil.relativedelta import relativedelta +from discord import Member, Message, TextChannel +from discord.ext.commands import Context + +from bot.api import ResponseCodeError +from bot.bot import Bot +from bot.constants import Channels, Guild, Roles +from bot.utils.scheduling import Scheduler +from bot.utils.time import get_time_delta, humanize_delta, time_since + +if typing.TYPE_CHECKING: + from bot.exts.recruitment.talentpool._cog import TalentPool + +log = logging.getLogger(__name__) + +# Maximum amount of days before an automatic review is posted. +MAX_DAYS_IN_POOL = 30 + +# Maximum amount of characters allowed in a message +MAX_MESSAGE_SIZE = 2000 + + +class Reviewer(Scheduler): + """Schedules, formats, and publishes reviews of helper nominees.""" + + def __init__(self, name: str, bot: Bot, pool: 'TalentPool'): + super().__init__(name) + self.bot = bot + self._pool = pool + + async def reschedule_reviews(self) -> None: + """Reschedule all active nominations to be reviewed at the appropriate time.""" + log.trace("Rescheduling reviews") + await self.bot.wait_until_guild_available() + # TODO Once the watch channel is removed, this can be done in a smarter way, e.g create a sync function. + await self._pool.fetch_user_cache() + + for user_id, user_data in self._pool.watched_users.items(): + if not user_data["reviewed"]: + self.schedule_review(user_id) + + def schedule_review(self, user_id: int) -> None: + """Schedules a single user for review.""" + log.trace(f"Scheduling review of user with ID {user_id}") + + user_data = self._pool.watched_users[user_id] + inserted_at = isoparse(user_data['inserted_at']).replace(tzinfo=None) + review_at = inserted_at + timedelta(days=MAX_DAYS_IN_POOL) + + self.schedule_at(review_at, user_id, self.post_review(user_id, update_database=True)) + + async def post_review(self, user_id: int, update_database: bool) -> None: + """Format a generic review of a user and post it to the mod announcements channel.""" + log.trace(f"Posting the review of {user_id}") + + nomination = self._pool.watched_users[user_id] + guild = self.bot.get_guild(Guild.id) + channel = guild.get_channel(Channels.mod_announcements) + member = guild.get_member(user_id) + if not member: + channel.send(f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server 😔") + return + + if update_database: + await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) + + opening = f"<@&{Roles.moderators}> <@&{Roles.admins}>\n{member.mention} ({member}) for Helper!" + + current_nominations = "\n\n".join( + f"**<@{entry['actor']}>:** {entry['reason']}" for entry in nomination['entries'] + ) + current_nominations = f"**Nominated by:**\n{current_nominations}" + + review_body = await self._construct_review_body(member) + + vote_request = "*Refer to their nomination and infraction histories for further details*.\n" + vote_request += "*Please react 👀 if you've seen this post. Then react 👍 for approval, or 👎 for disapproval*." + + review = "\n\n".join(part for part in (opening, current_nominations, review_body, vote_request)) + + message = (await self._bulk_send(channel, review))[-1] + for reaction in ("👀", "👍", "👎"): + await message.add_reaction(reaction) + + async def _construct_review_body(self, member: Member) -> str: + """Formats the body of the nomination, with details of activity, infractions, and previous nominations.""" + activity = await self._activity_review(member) + infractions = await self._infractions_review(member) + prev_nominations = await self._previous_nominations_review(member) + + body = f"{activity}\n\n{infractions}" + if prev_nominations: + body += f"\n\n{prev_nominations}" + return body + + async def _activity_review(self, member: Member) -> str: + """ + Format the activity of the nominee. + + Adds details on how long they've been on the server, their total message count, + and the channels they're the most active in. + """ + log.trace(f"Fetching the metricity data for {member.id}'s review") + try: + user_activity = await self.bot.api_client.get(f"bot/users/{member.id}/metricity_review_data") + except ResponseCodeError as e: + if e.status == 404: + messages = "no" + channels = "" + else: + raise + else: + messages = user_activity["total_messages"] + # Making this part flexible to the amount of expected and returned channels. + first_channel = user_activity["top_channel_activity"][0] + channels = f", with {first_channel[1]} messages in {first_channel[0]}" + + if len(user_activity["top_channel_activity"]) > 1: + channels += ", " + ", ".join( + f"{count} in {channel}" for channel, count in user_activity["top_channel_activity"][1: -1] + ) + last_channel = user_activity["top_channel_activity"][-1] + channels += f", and {last_channel[1]} in {last_channel[0]}" + + time_on_server = humanize_delta(relativedelta(datetime.utcnow(), member.joined_at), max_units=2) + review = f"{member.name} has been on the server for **{time_on_server}**" + review += f" and has **{messages} messages**{channels}." + + return review + + async def _infractions_review(self, member: Member) -> str: + """ + Formats the review of the nominee's infractions, if any. + + The infractions are listed by type and amount, and it is stated how long ago the last one was issued. + """ + log.trace(f"Fetching the infraction data for {member.id}'s review") + infraction_list = await self.bot.api_client.get( + 'bot/infractions/expanded', + params={'user__id': str(member.id), 'ordering': '-inserted_at'} + ) + + if not infraction_list: + return "They have no infractions." + + # Count the amount of each type of infraction. + infr_stats = list(Counter(infr["type"] for infr in infraction_list).items()) + + # Format into a sentence. + infractions = ", ".join( + f"{count} {self._format_infr_name(infr_type, count)}" + for infr_type, count in infr_stats[:-1] + ) + if len(infr_stats) > 1: + last_infr, last_count = infr_stats[-1] + infractions += f", and {last_count} {self._format_infr_name(last_infr, last_count)}" + + infractions = f"**{infractions}**" + + # Show when the last one was issued. + if len(infraction_list) == 1: + infractions += ", issued " + else: + infractions += ", with the last infraction issued " + + # Infractions were ordered by time since insertion descending. + infractions += get_time_delta(infraction_list[0]['inserted_at']) + + return f"They have {infractions}." + + @staticmethod + def _format_infr_name(infr_type: str, count: int) -> str: + """ + Format the infraction type in a way readable in a sentence. + + Underscores are replaced with spaces, as well as *attempting* to show the appropriate plural form if necessary. + This function by no means covers all rules of grammar. + """ + formatted = infr_type.replace("_", " ") + if count > 1: + if infr_type.endswith(('ch', 'sh')): + formatted += "e" + formatted += "s" + + return formatted + + async def _previous_nominations_review(self, member: Member) -> Optional[str]: + """ + Formats the review of the nominee's previous nominations. + + The number of previous nominations and unnominations are shown, as well as the reason the last one ended. + """ + log.trace(f"Fetching the nomination history data for {member.id}'s review") + history = await self.bot.api_client.get( + self._pool.api_endpoint, + params={ + "user__id": str(member.id), + "active": "false", + "ordering": "-inserted_at" + } + ) + + if not history: + return + + num_entries = sum(len(nomination["entries"]) for nomination in history) + + nomination_times = f"{num_entries} times" if num_entries > 1 else "once" + rejection_times = f"{len(history)} times" if len(history) > 1 else "once" + review = f"They were nominated **{nomination_times}** before" + review += f", but their nomination was called off **{rejection_times}**." + + end_time = time_since(isoparse(history[0]['ended_at']).replace(tzinfo=None), max_units=2) + review += f"\nThe last one ended {end_time} with the reason: {history[0]['end_reason']}" + + return review + + @staticmethod + async def _bulk_send(channel: TextChannel, text: str) -> List[Message]: + """ + Split a text into several if necessary, and post them to the channel. + + Returns the resulting message objects. + """ + messages = textwrap.wrap(text, width=MAX_MESSAGE_SIZE, replace_whitespace=False) + + results = [] + for message in messages: + await asyncio.sleep(1) + results.append(await channel.send(message)) + + return results + + async def mark_reviewed(self, ctx: Context, nomination_id: int) -> Optional[int]: + """ + Mark an active nomination as reviewed, updating the database and canceling the review task. + + On success, returns the user ID. + """ + log.trace(f"Updating nomination #{nomination_id} as review") + try: + nomination = await self.bot.api_client.get(f"{self._pool.api_endpoint}/{nomination_id}") + except ResponseCodeError as e: + if e.response.status == 404: + self.log.trace(f"Nomination API 404: Can't find nomination with id {nomination_id}") + await ctx.send(f"❌ Can't find a nomination with id `{nomination_id}`") + return None + else: + raise + + if nomination["reviewed"]: + await ctx.send("❌ This nomination was already reviewed, but here's a cookie 🍪") + return None + elif not nomination["active"]: + await ctx.send("❌ This nomination is inactive") + return None + + await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) + if nomination["user"] in self: + self.cancel(nomination["user"]) + + await self._pool.fetch_user_cache() + + return nomination["user"] diff --git a/bot/utils/time.py b/bot/utils/time.py index f862e40f7..466f0adc2 100644 --- a/bot/utils/time.py +++ b/bot/utils/time.py @@ -85,6 +85,14 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units: return humanized +def get_time_delta(time_string: str) -> str: + """Returns the time in human-readable time delta format.""" + date_time = dateutil.parser.isoparse(time_string).replace(tzinfo=None) + time_delta = time_since(date_time, precision="minutes", max_units=1) + + return time_delta + + def parse_duration_string(duration: str) -> Optional[relativedelta]: """ Converts a `duration` string to a relativedelta object. -- cgit v1.2.3 From 0eb8059a0ba6bb6bce464b4b3afb7847aa3bf098 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Thu, 11 Mar 2021 02:59:13 +0200 Subject: Limit new commands to mods+ --- bot/exts/recruitment/talentpool/_cog.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index 60f5cdf8c..070a4fd83 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -296,6 +296,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): await ctx.send(":white_check_mark: Updated the end reason of the nomination!") @nomination_group.command(aliases=('mr',)) + @has_any_role(*MODERATION_ROLES) async def mark_reviewed(self, ctx: Context, nomination_id: int) -> None: """Mark a nomination as reviewed and cancel the review task.""" if not await self.reviewer.mark_reviewed(ctx, nomination_id): @@ -303,6 +304,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): await ctx.channel.send(f"✅ The nomination with ID `{nomination_id}` was marked as reviewed.") @nomination_group.command(aliases=('review',)) + @has_any_role(*MODERATION_ROLES) async def post_review(self, ctx: Context, nomination_id: int) -> None: """Post the automatic review for the user ahead of time.""" if not (user_id := await self.reviewer.mark_reviewed(ctx, nomination_id)): -- cgit v1.2.3 From 608f755deead9f180d8c714b69d82c606dba931a Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Thu, 11 Mar 2021 22:50:02 +0200 Subject: The 'seen vote' emoji is now a random ducky. --- bot/exts/recruitment/talentpool/_review.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index 64a1c6226..adab1a907 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -1,14 +1,15 @@ import asyncio import logging +import random import textwrap import typing from collections import Counter from datetime import datetime, timedelta -from typing import List, Optional +from typing import List, Optional, Union from dateutil.parser import isoparse from dateutil.relativedelta import relativedelta -from discord import Member, Message, TextChannel +from discord import Emoji, Member, Message, TextChannel from discord.ext.commands import Context from bot.api import ResponseCodeError @@ -82,13 +83,15 @@ class Reviewer(Scheduler): review_body = await self._construct_review_body(member) + seen_emoji = self._random_ducky(guild) vote_request = "*Refer to their nomination and infraction histories for further details*.\n" - vote_request += "*Please react 👀 if you've seen this post. Then react 👍 for approval, or 👎 for disapproval*." + vote_request += f"*Please react {seen_emoji} if you've seen this post." + vote_request += " Then react 👍 for approval, or 👎 for disapproval*." review = "\n\n".join(part for part in (opening, current_nominations, review_body, vote_request)) message = (await self._bulk_send(channel, review))[-1] - for reaction in ("👀", "👍", "👎"): + for reaction in (seen_emoji, "👍", "👎"): await message.add_reaction(reaction) async def _construct_review_body(self, member: Member) -> str: @@ -224,6 +227,14 @@ class Reviewer(Scheduler): return review + @staticmethod + def _random_ducky(guild: Guild) -> Union[Emoji, str]: + """Picks a random ducky emoji to be used to mark the vote as seen. If no duckies found returns 👀.""" + duckies = [emoji for emoji in guild.emojis if emoji.name.startswith("ducky")] + if not duckies: + return "👀" + return random.choice(duckies) + @staticmethod async def _bulk_send(channel: TextChannel, text: str) -> List[Message]: """ -- cgit v1.2.3 From 326cd6dccee276da9b6deee827cb893615be352b Mon Sep 17 00:00:00 2001 From: kwzrd Date: Thu, 11 Mar 2021 23:57:36 +0100 Subject: Compose: read GitHub API key from '.env' --- docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-compose.yml b/docker-compose.yml index 0002d1d56..f9a29388d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -62,3 +62,4 @@ services: BOT_API_KEY: badbot13m0n8f570f942013fc818f234916ca531 REDDIT_CLIENT_ID: ${REDDIT_CLIENT_ID} REDDIT_SECRET: ${REDDIT_SECRET} + GITHUB_API_KEY: ${GITHUB_API_KEY} -- cgit v1.2.3 From a8c0da00248fa3dc3100a55e47b7c2df5952e0a4 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Fri, 12 Mar 2021 00:56:25 +0100 Subject: Compose: read all environment variables from '.env' --- docker-compose.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index f9a29388d..8afdd6ef1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -57,9 +57,7 @@ services: - web - redis - snekbox + env_file: + - .env environment: - BOT_TOKEN: ${BOT_TOKEN} BOT_API_KEY: badbot13m0n8f570f942013fc818f234916ca531 - REDDIT_CLIENT_ID: ${REDDIT_CLIENT_ID} - REDDIT_SECRET: ${REDDIT_SECRET} - GITHUB_API_KEY: ${GITHUB_API_KEY} -- cgit v1.2.3 From 92bfdd3e4aab061d62387ba2abc413d7e803641b Mon Sep 17 00:00:00 2001 From: xithrius Date: Mon, 8 Mar 2021 01:05:01 -0800 Subject: Remove invoked command and message after failure. --- bot/exts/info/pypi.py | 62 +++++++++++++++++++++++++++++---------------------- 1 file changed, 35 insertions(+), 27 deletions(-) diff --git a/bot/exts/info/pypi.py b/bot/exts/info/pypi.py index 8fe249c8a..10029aa73 100644 --- a/bot/exts/info/pypi.py +++ b/bot/exts/info/pypi.py @@ -8,7 +8,7 @@ from discord.ext.commands import Cog, Context, command from discord.utils import escape_markdown from bot.bot import Bot -from bot.constants import Colours, NEGATIVE_REPLIES +from bot.constants import Colours, NEGATIVE_REPLIES, RedirectOutput URL = "https://pypi.org/pypi/{package}/json" FIELDS = ("author", "requires_python", "summary", "license") @@ -17,6 +17,7 @@ PYPI_ICON = "https://cdn.discordapp.com/emojis/766274397257334814.png" PYPI_COLOURS = itertools.cycle((Colours.yellow, Colours.blue, Colours.white)) ILLEGAL_CHARACTERS = re.compile(r"[^a-zA-Z0-9-.]+") +INVALID_INPUT_DELETE_DELAY = RedirectOutput.delete_delay log = logging.getLogger(__name__) @@ -36,42 +37,49 @@ class PyPi(Cog): ) embed.set_thumbnail(url=PYPI_ICON) + error = True + if (character := re.search(ILLEGAL_CHARACTERS, package)) is not None: embed.description = f"Illegal character passed into command: '{escape_markdown(character.group(0))}'" - await ctx.send(embed=embed) - return - async with self.bot.http_session.get(URL.format(package=package)) as response: - if response.status == 404: - embed.description = "Package could not be found." + else: + async with self.bot.http_session.get(URL.format(package=package)) as response: + if response.status == 404: + embed.description = "Package could not be found." - elif response.status == 200 and response.content_type == "application/json": - response_json = await response.json() - info = response_json["info"] + elif response.status == 200 and response.content_type == "application/json": + response_json = await response.json() + info = response_json["info"] - embed.title = f"{info['name']} v{info['version']}" - embed.url = info['package_url'] - embed.colour = next(PYPI_COLOURS) + embed.title = f"{info['name']} v{info['version']}" + embed.url = info['package_url'] + embed.colour = next(PYPI_COLOURS) - for field in FIELDS: - field_data = info[field] + for field in FIELDS: + field_data = info[field] - # Field could be completely empty, in some cases can be a string with whitespaces, or None. - if field_data and not field_data.isspace(): - if '\n' in field_data and field == "license": - field_data = field_data.split('\n')[0] + # Field could be completely empty, in some cases can be a string with whitespaces, or None. + if field_data and not field_data.isspace(): + if '\n' in field_data and field == "license": + field_data = field_data.split('\n')[0] - embed.add_field( - name=field.replace("_", " ").title(), - value=escape_markdown(field_data), - inline=False, - ) + embed.add_field( + name=field.replace("_", " ").title(), + value=escape_markdown(field_data), + inline=False, + ) - else: - embed.description = "There was an error when fetching your PyPi package." - log.trace(f"Error when fetching PyPi package: {response.status}.") + error = False - await ctx.send(embed=embed) + else: + embed.description = "There was an error when fetching your PyPi package." + log.trace(f"Error when fetching PyPi package: {response.status}.") + + if error: + await ctx.send(embed=embed, delete_after=INVALID_INPUT_DELETE_DELAY) + await ctx.message.delete(delay=INVALID_INPUT_DELETE_DELAY) + else: + await ctx.send(embed=embed) def setup(bot: Bot) -> None: -- cgit v1.2.3 From 8d93afa4047d3e87fdd1bff6f003e1cfb44bd01c Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 12 Mar 2021 04:48:20 +0100 Subject: Correct length limits to embed limits Previously the code used limits that apply to raw messages, not embeds. Both the description and footer limits are separate, while their individual limits are 2048 chars instead of 2000. The footer overhead was removed from the max description length and the footer is now truncated to 200 chars which is roughly 2 lines --- bot/exts/info/doc/_cog.py | 2 +- bot/exts/info/doc/_parsing.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 9e41c6f1e..bf49e0aee 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -284,7 +284,7 @@ class DocCog(commands.Cog): # with a max of 100 chars. if symbol_name in self.renamed_symbols: renamed_symbols = ', '.join(self.renamed_symbols[symbol_name]) - footer_text = textwrap.shorten("Moved: " + renamed_symbols, 100, placeholder=' ...') + footer_text = textwrap.shorten("Moved: " + renamed_symbols, 200, placeholder=' ...') else: footer_text = "" diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index 57c991ae0..b06aebd45 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -33,8 +33,8 @@ _NO_SIGNATURE_GROUPS = { _EMBED_CODE_BLOCK_LINE_LENGTH = 61 # _MAX_SIGNATURE_AMOUNT code block wrapped lines with py syntax highlight _MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LINE_LENGTH + 8) * MAX_SIGNATURE_AMOUNT -# Maximum discord message length - signatures on top - space for footer -_MAX_DESCRIPTION_LENGTH = 1900 - _MAX_SIGNATURES_LENGTH +# Maximum embed description length - signatures on top +_MAX_DESCRIPTION_LENGTH = 2048 - _MAX_SIGNATURES_LENGTH _TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace BracketPair = namedtuple("BracketPair", ["opening_bracket", "closing_bracket"]) -- cgit v1.2.3 From de0bc6ea58a2766d9637af80e703e11291e424e1 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 12 Mar 2021 14:14:12 +0200 Subject: Reviewer no longer subclasses Scheduler It didn't make much sense for the Reviewer to subclasses Scheduler. The Scheduler has methods that don't make sense to use on the Reviewer directly. There is now a Scheduler object as an attribute of the Reviewer. Interacting with it is done by adding __contains__, cancel, and cancel_all methods. --- bot/exts/recruitment/talentpool/_review.py | 36 +++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index adab1a907..beb4c130f 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -30,13 +30,17 @@ MAX_DAYS_IN_POOL = 30 MAX_MESSAGE_SIZE = 2000 -class Reviewer(Scheduler): +class Reviewer: """Schedules, formats, and publishes reviews of helper nominees.""" def __init__(self, name: str, bot: Bot, pool: 'TalentPool'): - super().__init__(name) self.bot = bot self._pool = pool + self._review_scheduler = Scheduler(name) + + def __contains__(self, user_id: int) -> bool: + """Return True if the user with ID user_id is scheduled for review, False otherwise.""" + return user_id in self._review_scheduler async def reschedule_reviews(self) -> None: """Reschedule all active nominations to be reviewed at the appropriate time.""" @@ -57,13 +61,17 @@ class Reviewer(Scheduler): inserted_at = isoparse(user_data['inserted_at']).replace(tzinfo=None) review_at = inserted_at + timedelta(days=MAX_DAYS_IN_POOL) - self.schedule_at(review_at, user_id, self.post_review(user_id, update_database=True)) + self._review_scheduler.schedule_at(review_at, user_id, self.post_review(user_id, update_database=True)) async def post_review(self, user_id: int, update_database: bool) -> None: """Format a generic review of a user and post it to the mod announcements channel.""" log.trace(f"Posting the review of {user_id}") nomination = self._pool.watched_users[user_id] + if not nomination: + log.trace(f"There doesn't appear to be an active nomination for {user_id}") + return + guild = self.bot.get_guild(Guild.id) channel = guild.get_channel(Channels.mod_announcements) member = guild.get_member(user_id) @@ -276,9 +284,27 @@ class Reviewer(Scheduler): return None await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) - if nomination["user"] in self: - self.cancel(nomination["user"]) + if nomination["user"] in self._review_scheduler: + self._review_scheduler.cancel(nomination["user"]) await self._pool.fetch_user_cache() return nomination["user"] + + def cancel(self, user_id: int) -> None: + """ + Cancels the review of the nominee with ID user_id. + + It's important to note that this applies only until reschedule_reviews is called again. + To permenantly cancel someone's review, either remove them from the pool, or use mark_reviewed. + """ + self._review_scheduler.cancel(user_id) + + def cancel_all(self) -> None: + """ + Cancels all reviews. + + It's important to note that this applies only until reschedule_reviews is called again. + To permenantly cancel someone's review, either remove them from the pool, or use mark_reviewed. + """ + self._review_scheduler.cancel_all() -- cgit v1.2.3 From 4f17ba526995927fa3b1fb8e925179ab61e26265 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 12 Mar 2021 15:16:43 +0200 Subject: Improve string building for long lines --- bot/exts/recruitment/talentpool/_review.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index beb4c130f..56b51925e 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -92,9 +92,11 @@ class Reviewer: review_body = await self._construct_review_body(member) seen_emoji = self._random_ducky(guild) - vote_request = "*Refer to their nomination and infraction histories for further details*.\n" - vote_request += f"*Please react {seen_emoji} if you've seen this post." - vote_request += " Then react 👍 for approval, or 👎 for disapproval*." + vote_request = ( + "*Refer to their nomination and infraction histories for further details*.\n" + f"*Please react {seen_emoji} if you've seen this post." + " Then react 👍 for approval, or 👎 for disapproval*." + ) review = "\n\n".join(part for part in (opening, current_nominations, review_body, vote_request)) @@ -143,8 +145,10 @@ class Reviewer: channels += f", and {last_channel[1]} in {last_channel[0]}" time_on_server = humanize_delta(relativedelta(datetime.utcnow(), member.joined_at), max_units=2) - review = f"{member.name} has been on the server for **{time_on_server}**" - review += f" and has **{messages} messages**{channels}." + review = ( + f"{member.name} has been on the server for **{time_on_server}**" + f" and has **{messages} messages**{channels}." + ) return review @@ -227,11 +231,13 @@ class Reviewer: nomination_times = f"{num_entries} times" if num_entries > 1 else "once" rejection_times = f"{len(history)} times" if len(history) > 1 else "once" - review = f"They were nominated **{nomination_times}** before" - review += f", but their nomination was called off **{rejection_times}**." - end_time = time_since(isoparse(history[0]['ended_at']).replace(tzinfo=None), max_units=2) - review += f"\nThe last one ended {end_time} with the reason: {history[0]['end_reason']}" + + review = ( + f"They were nominated **{nomination_times}** before" + f", but their nomination was called off **{rejection_times}**." + f"\nThe last one ended {end_time} with the reason: {history[0]['end_reason']}" + ) return review -- cgit v1.2.3 From e82931be287d956237ad2e0562e46492f4f5b839 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Fri, 12 Mar 2021 14:51:46 +0100 Subject: Fix typo in the token remover --- bot/exts/filters/webhook_remover.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/filters/webhook_remover.py b/bot/exts/filters/webhook_remover.py index 08fe94055..f11fc8912 100644 --- a/bot/exts/filters/webhook_remover.py +++ b/bot/exts/filters/webhook_remover.py @@ -14,7 +14,7 @@ WEBHOOK_URL_RE = re.compile(r"((?:https?://)?discord(?:app)?\.com/api/webhooks/\ ALERT_MESSAGE_TEMPLATE = ( "{user}, looks like you posted a Discord webhook URL. Therefore, your " "message has been removed. Your webhook may have been **compromised** so " - "please re-create the webhook **immediately**. If you believe this was " + "please re-create the webhook **immediately**. If you believe this was a " "mistake, please let us know." ) -- cgit v1.2.3 From fb74ec007410bfb6afb92d4170f0c6402561c79d Mon Sep 17 00:00:00 2001 From: xithrius Date: Fri, 12 Mar 2021 12:13:20 -0800 Subject: Cleared up messages in logs. --- bot/exts/help_channels/_cog.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 9a33a6bb1..e70cfc71d 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -498,16 +498,16 @@ class HelpChannels(commands.Cog): if self.dynamic_message is not None: try: + log.trace("Help channels have changed, dynamic message has been edited.") await self.bot.http.edit_message( constants.Channels.how_to_get_help, self.dynamic_message, content=available_channels ) - log.trace("Help channels have changed, dynamic message has been edited.") except discord.NotFound: pass else: return - log.trace("No How-to-get-help dynamic message could be found in the Redis cache. Setting a new one.") + log.trace("Dynamic message could not be edited or found. Creating a new one.") new_dynamic_message = await self.bot.http.send_message( constants.Channels.how_to_get_help, available_channels ) -- cgit v1.2.3 From c242c373be2cc5f5d577ba18e5e213fc8aed22bf Mon Sep 17 00:00:00 2001 From: xithrius Date: Fri, 12 Mar 2021 12:16:17 -0800 Subject: First dynamic update moved to the init_available method. --- bot/exts/help_channels/_cog.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index e70cfc71d..16f6b10bd 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -242,6 +242,10 @@ class HelpChannels(commands.Cog): for channel in channels[:abs(missing)]: await self.unclaim_channel(channel) + # Getting channels that need to be included in the dynamic message. + await self.update_available_help_channels() + log.trace("Dynamic available help message updated.") + async def init_categories(self) -> None: """Get the help category objects. Remove the cog if retrieval fails.""" log.trace("Getting the CategoryChannel objects for the help categories.") @@ -290,10 +294,6 @@ class HelpChannels(commands.Cog): log.trace("Attempting to fetch How-to-get-help dynamic message ID.") self.dynamic_message = await _caches.dynamic_message.get("message_id") - # Getting channels that need to be included in the dynamic message. - await self.update_available_help_channels() - log.trace("Dynamic available help message updated.") - await self.init_available() _stats.report_counts() -- cgit v1.2.3 From 1b1e7adaca4b116a69db06955ab2a3edb222ef52 Mon Sep 17 00:00:00 2001 From: xithrius Date: Fri, 12 Mar 2021 11:39:12 -0800 Subject: Added '_' to allowed chars, shortened embed. --- bot/exts/info/pypi.py | 33 ++++++++++++--------------------- 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/bot/exts/info/pypi.py b/bot/exts/info/pypi.py index 10029aa73..2e42e7d6b 100644 --- a/bot/exts/info/pypi.py +++ b/bot/exts/info/pypi.py @@ -11,12 +11,11 @@ from bot.bot import Bot from bot.constants import Colours, NEGATIVE_REPLIES, RedirectOutput URL = "https://pypi.org/pypi/{package}/json" -FIELDS = ("author", "requires_python", "summary", "license") PYPI_ICON = "https://cdn.discordapp.com/emojis/766274397257334814.png" PYPI_COLOURS = itertools.cycle((Colours.yellow, Colours.blue, Colours.white)) -ILLEGAL_CHARACTERS = re.compile(r"[^a-zA-Z0-9-.]+") +ILLEGAL_CHARACTERS = re.compile(r"[^-_.a-zA-Z0-9]+") INVALID_INPUT_DELETE_DELAY = RedirectOutput.delete_delay log = logging.getLogger(__name__) @@ -31,16 +30,13 @@ class PyPi(Cog): @command(name="pypi", aliases=("package", "pack")) async def get_package_info(self, ctx: Context, package: str) -> None: """Provide information about a specific package from PyPI.""" - embed = Embed( - title=random.choice(NEGATIVE_REPLIES), - colour=Colours.soft_red - ) + embed = Embed(title=random.choice(NEGATIVE_REPLIES), colour=Colours.soft_red) embed.set_thumbnail(url=PYPI_ICON) error = True - if (character := re.search(ILLEGAL_CHARACTERS, package)) is not None: - embed.description = f"Illegal character passed into command: '{escape_markdown(character.group(0))}'" + if characters := re.search(ILLEGAL_CHARACTERS, package): + embed.description = f"Illegal character(s) passed into command: '{escape_markdown(characters.group(0))}'" else: async with self.bot.http_session.get(URL.format(package=package)) as response: @@ -52,22 +48,17 @@ class PyPi(Cog): info = response_json["info"] embed.title = f"{info['name']} v{info['version']}" - embed.url = info['package_url'] - embed.colour = next(PYPI_COLOURS) - for field in FIELDS: - field_data = info[field] + embed.url = info["package_url"] + embed.colour = next(PYPI_COLOURS) - # Field could be completely empty, in some cases can be a string with whitespaces, or None. - if field_data and not field_data.isspace(): - if '\n' in field_data and field == "license": - field_data = field_data.split('\n')[0] + summary = escape_markdown(info["summary"]) - embed.add_field( - name=field.replace("_", " ").title(), - value=escape_markdown(field_data), - inline=False, - ) + # Summary could be completely empty, or just whitespace. + if summary and not summary.isspace(): + embed.description = summary + else: + embed.description = "No summary provided." error = False -- cgit v1.2.3 From 7bc390ed20bda22cf5a2b455be6d4b15eedf47c0 Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Sat, 13 Mar 2021 11:04:25 +0000 Subject: Update help channel names from chemical elements to fruit * Update and rename elements.json to fruits.json * Update _name.py * Update _cog.py --- bot/exts/help_channels/_cog.py | 2 +- bot/exts/help_channels/_name.py | 12 ++-- bot/resources/elements.json | 119 ---------------------------------------- bot/resources/foods.json | 52 ++++++++++++++++++ 4 files changed, 59 insertions(+), 126 deletions(-) delete mode 100644 bot/resources/elements.json create mode 100644 bot/resources/foods.json diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 6abf99810..1c730dce9 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -54,7 +54,7 @@ class HelpChannels(commands.Cog): * Contains channels which aren't in use * Channels are used to refill the Available category - Help channels are named after the chemical elements in `bot/resources/elements.json`. + Help channels are named after the foods in `bot/resources/foods.json`. """ def __init__(self, bot: Bot): diff --git a/bot/exts/help_channels/_name.py b/bot/exts/help_channels/_name.py index 728234b1e..061f855ae 100644 --- a/bot/exts/help_channels/_name.py +++ b/bot/exts/help_channels/_name.py @@ -14,11 +14,11 @@ log = logging.getLogger(__name__) def create_name_queue(*categories: discord.CategoryChannel) -> deque: """ - Return a queue of element names to use for creating new channels. + Return a queue of food names to use for creating new channels. Skip names that are already in use by channels in `categories`. """ - log.trace("Creating the chemical element name queue.") + log.trace("Creating the food name queue.") used_names = _get_used_names(*categories) @@ -31,7 +31,7 @@ def create_name_queue(*categories: discord.CategoryChannel) -> deque: def _get_names() -> t.List[str]: """ - Return a truncated list of prefixed element names. + Return a truncated list of prefixed food names. The amount of names is configured with `HelpChannels.max_total_channels`. The prefix is configured with `HelpChannels.name_prefix`. @@ -39,10 +39,10 @@ def _get_names() -> t.List[str]: count = constants.HelpChannels.max_total_channels prefix = constants.HelpChannels.name_prefix - log.trace(f"Getting the first {count} element names from JSON.") + log.trace(f"Getting the first {count} food names from JSON.") - with Path("bot/resources/elements.json").open(encoding="utf-8") as elements_file: - all_names = json.load(elements_file) + with Path("bot/resources/foods.json").open(encoding="utf-8") as foods_file: + all_names = json.load(foods_file) if prefix: return [prefix + name for name in all_names[:count]] diff --git a/bot/resources/elements.json b/bot/resources/elements.json deleted file mode 100644 index a3ac5b99f..000000000 --- a/bot/resources/elements.json +++ /dev/null @@ -1,119 +0,0 @@ -[ - "hydrogen", - "helium", - "lithium", - "beryllium", - "boron", - "carbon", - "nitrogen", - "oxygen", - "fluorine", - "neon", - "sodium", - "magnesium", - "aluminium", - "silicon", - "phosphorus", - "sulfur", - "chlorine", - "argon", - "potassium", - "calcium", - "scandium", - "titanium", - "vanadium", - "chromium", - "manganese", - "iron", - "cobalt", - "nickel", - "copper", - "zinc", - "gallium", - "germanium", - "arsenic", - "bromine", - "krypton", - "rubidium", - "strontium", - "yttrium", - "zirconium", - "niobium", - "molybdenum", - "technetium", - "ruthenium", - "rhodium", - "palladium", - "silver", - "cadmium", - "indium", - "tin", - "antimony", - "tellurium", - "iodine", - "xenon", - "caesium", - "barium", - "lanthanum", - "cerium", - "praseodymium", - "neodymium", - "promethium", - "samarium", - "europium", - "gadolinium", - "terbium", - "dysprosium", - "holmium", - "erbium", - "thulium", - "ytterbium", - "lutetium", - "hafnium", - "tantalum", - "tungsten", - "rhenium", - "osmium", - "iridium", - "platinum", - "gold", - "mercury", - "thallium", - "lead", - "bismuth", - "polonium", - "astatine", - "radon", - "francium", - "radium", - "actinium", - "thorium", - "protactinium", - "uranium", - "neptunium", - "plutonium", - "americium", - "curium", - "berkelium", - "californium", - "einsteinium", - "fermium", - "mendelevium", - "nobelium", - "lawrencium", - "rutherfordium", - "dubnium", - "seaborgium", - "bohrium", - "hassium", - "meitnerium", - "darmstadtium", - "roentgenium", - "copernicium", - "nihonium", - "flerovium", - "moscovium", - "livermorium", - "tennessine", - "oganesson" -] diff --git a/bot/resources/foods.json b/bot/resources/foods.json new file mode 100644 index 000000000..61d9ea98f --- /dev/null +++ b/bot/resources/foods.json @@ -0,0 +1,52 @@ +[ + "apple", + "avocado", + "bagel", + "banana", + "bread", + "broccoli", + "burrito", + "cake", + "candy", + "carrot", + "cheese", + "cherries", + "chestnut", + "chili", + "chocolate", + "coconut", + "coffee", + "cookie", + "corn", + "croissant", + "cupcake", + "donut", + "dumpling", + "falafel", + "grapes", + "honey", + "kiwi", + "lemon", + "lollipop", + "mango", + "mushroom", + "orange", + "pancakes", + "peanut", + "pear", + "pie", + "pineapple", + "popcorn", + "potato", + "pretzel", + "ramen", + "rice", + "salad", + "spaghetti", + "stew", + "strawberry", + "sushi", + "taco", + "tomato", + "watermelon" +] -- cgit v1.2.3 From 98b24b2fd38907807f7cd6c837687f0708afb9e1 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Tue, 9 Mar 2021 16:00:41 +0100 Subject: Branding: remove current implementation Since we're planning substantial changes, it will be easier to build from scratch. --- bot/decorators.py | 23 +- bot/exts/backend/branding/__init__.py | 6 +- bot/exts/backend/branding/_cog.py | 556 +------------------------------ bot/exts/backend/branding/_constants.py | 48 +-- bot/exts/backend/branding/_decorators.py | 27 -- bot/exts/backend/branding/_errors.py | 2 - bot/exts/backend/branding/_seasons.py | 175 ---------- bot/exts/backend/error_handler.py | 7 +- 8 files changed, 32 insertions(+), 812 deletions(-) delete mode 100644 bot/exts/backend/branding/_decorators.py delete mode 100644 bot/exts/backend/branding/_errors.py delete mode 100644 bot/exts/backend/branding/_seasons.py diff --git a/bot/decorators.py b/bot/decorators.py index 063c8f878..0b50cc365 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -1,4 +1,5 @@ import asyncio +import functools import logging import typing as t from contextlib import suppress @@ -8,7 +9,7 @@ from discord import Member, NotFound from discord.ext import commands from discord.ext.commands import Cog, Context -from bot.constants import Channels, RedirectOutput +from bot.constants import Channels, DEBUG_MODE, RedirectOutput from bot.utils import function from bot.utils.checks import in_whitelist_check @@ -153,3 +154,23 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: await func(*args, **kwargs) return wrapper return decorator + + +def mock_in_debug(return_value: t.Any) -> t.Callable: + """ + Short-circuit function execution if in debug mode and return `return_value`. + + The original function name, and the incoming args and kwargs are DEBUG level logged + upon each call. This is useful for expensive operations, i.e. media asset uploads + that are prone to rate-limits but need to be tested extensively. + """ + def decorator(func: t.Callable) -> t.Callable: + @functools.wraps(func) + async def wrapped(*args, **kwargs) -> t.Any: + """Short-circuit and log if in debug mode.""" + if DEBUG_MODE: + log.debug(f"Function {func.__name__} called with args: {args}, kwargs: {kwargs}") + return return_value + return await func(*args, **kwargs) + return wrapped + return decorator diff --git a/bot/exts/backend/branding/__init__.py b/bot/exts/backend/branding/__init__.py index 81ea3bf49..20a747b7f 100644 --- a/bot/exts/backend/branding/__init__.py +++ b/bot/exts/backend/branding/__init__.py @@ -1,7 +1,7 @@ from bot.bot import Bot -from bot.exts.backend.branding._cog import BrandingManager +from bot.exts.backend.branding._cog import Branding def setup(bot: Bot) -> None: - """Loads BrandingManager cog.""" - bot.add_cog(BrandingManager(bot)) + """Load Branding cog.""" + bot.add_cog(Branding(bot)) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 20df83a89..afe575e1a 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -1,566 +1,14 @@ -import asyncio -import itertools import logging -import random -import typing as t -from datetime import datetime, time, timedelta -import arrow -import async_timeout -import discord -from async_rediscache import RedisCache from discord.ext import commands from bot.bot import Bot -from bot.constants import Branding, Colours, Emojis, Guild, MODERATION_ROLES -from bot.exts.backend.branding import _constants, _decorators, _errors, _seasons log = logging.getLogger(__name__) -class GitHubFile(t.NamedTuple): - """ - Represents a remote file on GitHub. - - The `sha` hash is kept so that we can determine that a file has changed, - despite its filename remaining unchanged. - """ - - download_url: str - path: str - sha: str - - -def pretty_files(files: t.Iterable[GitHubFile]) -> str: - """Provide a human-friendly representation of `files`.""" - return "\n".join(file.path for file in files) - - -def time_until_midnight() -> timedelta: - """ - Determine amount of time until the next-up UTC midnight. - - The exact `midnight` moment is actually delayed to 5 seconds after, in order - to avoid potential problems due to imprecise sleep. - """ - now = datetime.utcnow() - tomorrow = now + timedelta(days=1) - midnight = datetime.combine(tomorrow, time(second=5)) - - return midnight - now - - -class BrandingManager(commands.Cog): - """ - Manages the guild's branding. - - The purpose of this cog is to help automate the synchronization of the branding - repository with the guild. It is capable of discovering assets in the repository - via GitHub's API, resolving download urls for them, and delegating - to the `bot` instance to upload them to the guild. - - BrandingManager is designed to be entirely autonomous. Its `daemon` background task awakens - once a day (see `time_until_midnight`) to detect new seasons, or to cycle icons within a single - season. The daemon can be turned on and off via the `daemon` cmd group. The value set via - its `start` and `stop` commands is persisted across sessions. If turned on, the daemon will - automatically start on the next bot start-up. Otherwise, it will wait to be started manually. - - All supported operations, e.g. setting seasons, applying the branding, or cycling icons, can - also be invoked manually, via the following API: - - branding list - - Show all available seasons - - branding set - - Set the cog's internal state to represent `season_name`, if it exists - - If no `season_name` is given, set chronologically current season - - This will not automatically apply the season's branding to the guild, - the cog's state can be detached from the guild - - Seasons can therefore be 'previewed' using this command - - branding info - - View detailed information about resolved assets for current season - - branding refresh - - Refresh internal state, i.e. synchronize with branding repository - - branding apply - - Apply the current internal state to the guild, i.e. upload the assets - - branding cycle - - If there are multiple available icons for current season, randomly pick - and apply the next one - - The daemon calls these methods autonomously as appropriate. The use of this cog - is locked to moderation roles. As it performs media asset uploads, it is prone to - rate-limits - the `apply` command should be used with caution. The `set` command can, - however, be used freely to 'preview' seasonal branding and check whether paths have been - resolved as appropriate. - - While the bot is in debug mode, it will 'mock' asset uploads by logging the passed - download urls and pretending that the upload was successful. Make use of this - to test this cog's behaviour. - """ - - current_season: t.Type[_seasons.SeasonBase] - - banner: t.Optional[GitHubFile] - - available_icons: t.List[GitHubFile] - remaining_icons: t.List[GitHubFile] - - days_since_cycle: t.Iterator - - daemon: t.Optional[asyncio.Task] - - # Branding configuration - branding_configuration = RedisCache() +class Branding(commands.Cog): + """Guild branding management.""" def __init__(self, bot: Bot) -> None: - """ - Assign safe default values on init. - - At this point, we don't have information about currently available branding. - Most of these attributes will be overwritten once the daemon connects, or once - the `refresh` command is used. - """ self.bot = bot - self.current_season = _seasons.get_current_season() - - self.banner = None - - self.available_icons = [] - self.remaining_icons = [] - - self.days_since_cycle = itertools.cycle([None]) - - self.daemon = None - self._startup_task = self.bot.loop.create_task(self._initial_start_daemon()) - - async def _initial_start_daemon(self) -> None: - """Checks is daemon active and when is, start it at cog load.""" - if await self.branding_configuration.get("daemon_active"): - self.daemon = self.bot.loop.create_task(self._daemon_func()) - - @property - def _daemon_running(self) -> bool: - """True if the daemon is currently active, False otherwise.""" - return self.daemon is not None and not self.daemon.done() - - async def _daemon_func(self) -> None: - """ - Manage all automated behaviour of the BrandingManager cog. - - Once a day, the daemon will perform the following tasks: - - Update `current_season` - - Poll GitHub API to see if the available branding for `current_season` has changed - - Update assets if changes are detected (banner, guild icon, bot avatar, bot nickname) - - Check whether it's time to cycle guild icons - - The internal loop runs once when activated, then periodically at the time - given by `time_until_midnight`. - - All method calls in the internal loop are considered safe, i.e. no errors propagate - to the daemon's loop. The daemon itself does not perform any error handling on its own. - """ - await self.bot.wait_until_guild_available() - - while True: - self.current_season = _seasons.get_current_season() - branding_changed = await self.refresh() - - if branding_changed: - await self.apply() - - elif next(self.days_since_cycle) == Branding.cycle_frequency: - await self.cycle() - - until_midnight = time_until_midnight() - await asyncio.sleep(until_midnight.total_seconds()) - - async def _info_embed(self) -> discord.Embed: - """Make an informative embed representing current season.""" - info_embed = discord.Embed(description=self.current_season.description, colour=self.current_season.colour) - - # If we're in a non-evergreen season, also show active months - if self.current_season is not _seasons.SeasonBase: - title = f"{self.current_season.season_name} ({', '.join(str(m) for m in self.current_season.months)})" - else: - title = self.current_season.season_name - - # Use the author field to show the season's name and avatar if available - info_embed.set_author(name=title) - - banner = self.banner.path if self.banner is not None else "Unavailable" - info_embed.add_field(name="Banner", value=banner, inline=False) - - icons = pretty_files(self.available_icons) or "Unavailable" - info_embed.add_field(name="Available icons", value=icons, inline=False) - - # Only display cycle frequency if we're actually cycling - if len(self.available_icons) > 1 and Branding.cycle_frequency: - info_embed.set_footer(text=f"Icon cycle frequency: {Branding.cycle_frequency}") - - return info_embed - - async def _reset_remaining_icons(self) -> None: - """Set `remaining_icons` to a shuffled copy of `available_icons`.""" - self.remaining_icons = random.sample(self.available_icons, k=len(self.available_icons)) - - async def _reset_days_since_cycle(self) -> None: - """ - Reset the `days_since_cycle` iterator based on configured frequency. - - If the current season only has 1 icon, or if `Branding.cycle_frequency` is falsey, - the iterator will always yield None. This signals that the icon shouldn't be cycled. - - Otherwise, it will yield ints in range [1, `Branding.cycle_frequency`] indefinitely. - When the iterator yields a value equal to `Branding.cycle_frequency`, it is time to cycle. - """ - if len(self.available_icons) > 1 and Branding.cycle_frequency: - sequence = range(1, Branding.cycle_frequency + 1) - else: - sequence = [None] - - self.days_since_cycle = itertools.cycle(sequence) - - async def _get_files(self, path: str, include_dirs: bool = False) -> t.Dict[str, GitHubFile]: - """ - Get files at `path` in the branding repository. - - If `include_dirs` is False (default), only returns files at `path`. - Otherwise, will return both files and directories. Never returns symlinks. - - Return dict mapping from filename to corresponding `GitHubFile` instance. - This may return an empty dict if the response status is non-200, - or if the target directory is empty. - """ - url = f"{_constants.BRANDING_URL}/{path}" - async with self.bot.http_session.get( - url, headers=_constants.HEADERS, params=_constants.PARAMS - ) as resp: - # Short-circuit if we get non-200 response - if resp.status != _constants.STATUS_OK: - log.error(f"GitHub API returned non-200 response: {resp}") - return {} - directory = await resp.json() # Directory at `path` - - allowed_types = {"file", "dir"} if include_dirs else {"file"} - return { - file["name"]: GitHubFile(file["download_url"], file["path"], file["sha"]) - for file in directory - if file["type"] in allowed_types - } - - async def refresh(self) -> bool: - """ - Synchronize available assets with branding repository. - - If the current season is not the evergreen, and lacks at least one asset, - we use the evergreen seasonal dir as fallback for missing assets. - - Finally, if neither the seasonal nor fallback branding directories contain - an asset, it will simply be ignored. - - Return True if the branding has changed. This will be the case when we enter - a new season, or when something changes in the current seasons's directory - in the branding repository. - """ - old_branding = (self.banner, self.available_icons) - seasonal_dir = await self._get_files(self.current_season.branding_path, include_dirs=True) - - # Only make a call to the fallback directory if there is something to be gained - branding_incomplete = any( - asset not in seasonal_dir - for asset in (_constants.FILE_BANNER, _constants.FILE_AVATAR, _constants.SERVER_ICONS) - ) - if branding_incomplete and self.current_season is not _seasons.SeasonBase: - fallback_dir = await self._get_files( - _seasons.SeasonBase.branding_path, include_dirs=True - ) - else: - fallback_dir = {} - - # Resolve assets in this directory, None is a safe value - self.banner = ( - seasonal_dir.get(_constants.FILE_BANNER) - or fallback_dir.get(_constants.FILE_BANNER) - ) - - # Now resolve server icons by making a call to the proper sub-directory - if _constants.SERVER_ICONS in seasonal_dir: - icons_dir = await self._get_files( - f"{self.current_season.branding_path}/{_constants.SERVER_ICONS}" - ) - self.available_icons = list(icons_dir.values()) - - elif _constants.SERVER_ICONS in fallback_dir: - icons_dir = await self._get_files( - f"{_seasons.SeasonBase.branding_path}/{_constants.SERVER_ICONS}" - ) - self.available_icons = list(icons_dir.values()) - - else: - self.available_icons = [] # This should never be the case, but an empty list is a safe value - - # GitHubFile instances carry a `sha` attr so this will pick up if a file changes - branding_changed = old_branding != (self.banner, self.available_icons) - - if branding_changed: - log.info(f"New branding detected (season: {self.current_season.season_name})") - await self._reset_remaining_icons() - await self._reset_days_since_cycle() - - return branding_changed - - async def cycle(self) -> bool: - """ - Apply the next-up server icon. - - Returns True if an icon is available and successfully gets applied, False otherwise. - """ - if not self.available_icons: - log.info("Cannot cycle: no icons for this season") - return False - - if not self.remaining_icons: - log.info("Reset & shuffle remaining icons") - await self._reset_remaining_icons() - - next_up = self.remaining_icons.pop(0) - success = await self.set_icon(next_up.download_url) - - return success - - async def apply(self) -> t.List[str]: - """ - Apply current branding to the guild and bot. - - This delegates to the bot instance to do all the work. We only provide download urls - for available assets. Assets unavailable in the branding repo will be ignored. - - Returns a list of names of all failed assets. An asset is considered failed - if it isn't found in the branding repo, or if something goes wrong while the - bot is trying to apply it. - - An empty list denotes that all assets have been applied successfully. - """ - report = {asset: False for asset in ("banner", "icon")} - - if self.banner is not None: - report["banner"] = await self.set_banner(self.banner.download_url) - - report["icon"] = await self.cycle() - - failed_assets = [asset for asset, succeeded in report.items() if not succeeded] - return failed_assets - - @commands.has_any_role(*MODERATION_ROLES) - @commands.group(name="branding") - async def branding_cmds(self, ctx: commands.Context) -> None: - """Manual branding control.""" - if not ctx.invoked_subcommand: - await ctx.send_help(ctx.command) - - @branding_cmds.command(name="list", aliases=["ls"]) - async def branding_list(self, ctx: commands.Context) -> None: - """List all available seasons and branding sources.""" - embed = discord.Embed(title="Available seasons", colour=Colours.soft_green) - - for season in _seasons.get_all_seasons(): - if season is _seasons.SeasonBase: - active_when = "always" - else: - active_when = f"in {', '.join(str(m) for m in season.months)}" - - description = ( - f"Active {active_when}\n" - f"Branding: {season.branding_path}" - ) - embed.add_field(name=season.season_name, value=description, inline=False) - - await ctx.send(embed=embed) - - @branding_cmds.command(name="set") - async def branding_set(self, ctx: commands.Context, *, season_name: t.Optional[str] = None) -> None: - """ - Manually set season, or reset to current if none given. - - Season search is a case-less comparison against both seasonal class name, - and its `season_name` attr. - - This only pre-loads the cog's internal state to the chosen season, but does not - automatically apply the branding. As that is an expensive operation, the `apply` - command must be called explicitly after this command finishes. - - This means that this command can be used to 'preview' a season gathering info - about its available assets, without applying them to the guild. - - If the daemon is running, it will automatically reset the season to current when - it wakes up. The season set via this command can therefore remain 'detached' from - what it should be - the daemon will make sure that it's set back properly. - """ - if season_name is None: - new_season = _seasons.get_current_season() - else: - new_season = _seasons.get_season(season_name) - if new_season is None: - raise _errors.BrandingError("No such season exists") - - if self.current_season is new_season: - raise _errors.BrandingError(f"Season {self.current_season.season_name} already active") - - self.current_season = new_season - await self.branding_refresh(ctx) - - @branding_cmds.command(name="info", aliases=["status"]) - async def branding_info(self, ctx: commands.Context) -> None: - """ - Show available assets for current season. - - This can be used to confirm that assets have been resolved properly. - When `apply` is used, it attempts to upload exactly the assets listed here. - """ - await ctx.send(embed=await self._info_embed()) - - @branding_cmds.command(name="refresh") - async def branding_refresh(self, ctx: commands.Context) -> None: - """Sync currently available assets with branding repository.""" - async with ctx.typing(): - await self.refresh() - await self.branding_info(ctx) - - @branding_cmds.command(name="apply") - async def branding_apply(self, ctx: commands.Context) -> None: - """ - Apply current season's branding to the guild. - - Use `info` to check which assets will be applied. Shows which assets have - failed to be applied, if any. - """ - async with ctx.typing(): - failed_assets = await self.apply() - if failed_assets: - raise _errors.BrandingError( - f"Failed to apply following assets: {', '.join(failed_assets)}" - ) - - response = discord.Embed(description=f"All assets applied {Emojis.ok_hand}", colour=Colours.soft_green) - await ctx.send(embed=response) - - @branding_cmds.command(name="cycle") - async def branding_cycle(self, ctx: commands.Context) -> None: - """ - Apply the next-up guild icon, if multiple are available. - - The order is random. - """ - async with ctx.typing(): - success = await self.cycle() - if not success: - raise _errors.BrandingError("Failed to cycle icon") - - response = discord.Embed(description=f"Success {Emojis.ok_hand}", colour=Colours.soft_green) - await ctx.send(embed=response) - - @branding_cmds.group(name="daemon", aliases=["d", "task"]) - async def daemon_group(self, ctx: commands.Context) -> None: - """Control the background daemon.""" - if not ctx.invoked_subcommand: - await ctx.send_help(ctx.command) - - @daemon_group.command(name="status") - async def daemon_status(self, ctx: commands.Context) -> None: - """Check whether daemon is currently active.""" - if self._daemon_running: - remaining_time = (arrow.utcnow() + time_until_midnight()).humanize() - response = discord.Embed(description=f"Daemon running {Emojis.ok_hand}", colour=Colours.soft_green) - response.set_footer(text=f"Next refresh {remaining_time}") - else: - response = discord.Embed(description="Daemon not running", colour=Colours.soft_red) - - await ctx.send(embed=response) - - @daemon_group.command(name="start") - async def daemon_start(self, ctx: commands.Context) -> None: - """If the daemon isn't running, start it.""" - if self._daemon_running: - raise _errors.BrandingError("Daemon already running!") - - self.daemon = self.bot.loop.create_task(self._daemon_func()) - await self.branding_configuration.set("daemon_active", True) - - response = discord.Embed(description=f"Daemon started {Emojis.ok_hand}", colour=Colours.soft_green) - await ctx.send(embed=response) - - @daemon_group.command(name="stop") - async def daemon_stop(self, ctx: commands.Context) -> None: - """If the daemon is running, stop it.""" - if not self._daemon_running: - raise _errors.BrandingError("Daemon not running!") - - self.daemon.cancel() - await self.branding_configuration.set("daemon_active", False) - - response = discord.Embed(description=f"Daemon stopped {Emojis.ok_hand}", colour=Colours.soft_green) - await ctx.send(embed=response) - - async def _fetch_image(self, url: str) -> bytes: - """Retrieve and read image from `url`.""" - log.debug(f"Getting image from: {url}") - async with self.bot.http_session.get(url) as resp: - return await resp.read() - - async def _apply_asset(self, target: discord.Guild, asset: _constants.AssetType, url: str) -> bool: - """ - Internal method for applying media assets to the guild. - - This shouldn't be called directly. The purpose of this method is mainly generic - error handling to reduce needless code repetition. - - Return True if upload was successful, False otherwise. - """ - log.info(f"Attempting to set {asset.name}: {url}") - - kwargs = {asset.value: await self._fetch_image(url)} - try: - async with async_timeout.timeout(5): - await target.edit(**kwargs) - - except asyncio.TimeoutError: - log.info("Asset upload timed out") - return False - - except discord.HTTPException as discord_error: - log.exception("Asset upload failed", exc_info=discord_error) - return False - - else: - log.info("Asset successfully applied") - return True - - @_decorators.mock_in_debug(return_value=True) - async def set_banner(self, url: str) -> bool: - """Set the guild's banner to image at `url`.""" - guild = self.bot.get_guild(Guild.id) - if guild is None: - log.info("Failed to get guild instance, aborting asset upload") - return False - - return await self._apply_asset(guild, _constants.AssetType.BANNER, url) - - @_decorators.mock_in_debug(return_value=True) - async def set_icon(self, url: str) -> bool: - """Sets the guild's icon to image at `url`.""" - guild = self.bot.get_guild(Guild.id) - if guild is None: - log.info("Failed to get guild instance, aborting asset upload") - return False - - return await self._apply_asset(guild, _constants.AssetType.SERVER_ICON, url) - - def cog_unload(self) -> None: - """Cancels startup and daemon task.""" - self._startup_task.cancel() - if self.daemon is not None: - self.daemon.cancel() diff --git a/bot/exts/backend/branding/_constants.py b/bot/exts/backend/branding/_constants.py index dbc7615f2..8afac6538 100644 --- a/bot/exts/backend/branding/_constants.py +++ b/bot/exts/backend/branding/_constants.py @@ -1,51 +1,11 @@ -from enum import Enum, IntEnum - from bot.constants import Keys +# Base URL for requests into the branding repository +BRANDING_URL = "https://api.github.com/repos/kwzrd/pydis-branding/contents" -class Month(IntEnum): - """All month constants for seasons.""" - - JANUARY = 1 - FEBRUARY = 2 - MARCH = 3 - APRIL = 4 - MAY = 5 - JUNE = 6 - JULY = 7 - AUGUST = 8 - SEPTEMBER = 9 - OCTOBER = 10 - NOVEMBER = 11 - DECEMBER = 12 - - def __str__(self) -> str: - return self.name.title() - - -class AssetType(Enum): - """ - Discord media assets. - - The values match exactly the kwarg keys that can be passed to `Guild.edit`. - """ - - BANNER = "banner" - SERVER_ICON = "icon" - - -STATUS_OK = 200 # HTTP status code - -FILE_BANNER = "banner.png" -FILE_AVATAR = "avatar.png" -SERVER_ICONS = "server_icons" - -BRANDING_URL = "https://api.github.com/repos/python-discord/branding/contents" - -PARAMS = {"ref": "master"} # Target branch +PARAMS = {"ref": "kwzrd/events-rework"} # Target branch HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3 -# A GitHub token is not necessary for the cog to operate, -# unauthorized requests are however limited to 60 per hour +# A GitHub token is not necessary for the cog to operate, unauthorized requests are however limited to 60 per hour if Keys.github: HEADERS["Authorization"] = f"token {Keys.github}" diff --git a/bot/exts/backend/branding/_decorators.py b/bot/exts/backend/branding/_decorators.py deleted file mode 100644 index 6a1e7e869..000000000 --- a/bot/exts/backend/branding/_decorators.py +++ /dev/null @@ -1,27 +0,0 @@ -import functools -import logging -import typing as t - -from bot.constants import DEBUG_MODE - -log = logging.getLogger(__name__) - - -def mock_in_debug(return_value: t.Any) -> t.Callable: - """ - Short-circuit function execution if in debug mode and return `return_value`. - - The original function name, and the incoming args and kwargs are DEBUG level logged - upon each call. This is useful for expensive operations, i.e. media asset uploads - that are prone to rate-limits but need to be tested extensively. - """ - def decorator(func: t.Callable) -> t.Callable: - @functools.wraps(func) - async def wrapped(*args, **kwargs) -> t.Any: - """Short-circuit and log if in debug mode.""" - if DEBUG_MODE: - log.debug(f"Function {func.__name__} called with args: {args}, kwargs: {kwargs}") - return return_value - return await func(*args, **kwargs) - return wrapped - return decorator diff --git a/bot/exts/backend/branding/_errors.py b/bot/exts/backend/branding/_errors.py deleted file mode 100644 index 7cd271af3..000000000 --- a/bot/exts/backend/branding/_errors.py +++ /dev/null @@ -1,2 +0,0 @@ -class BrandingError(Exception): - """Exception raised by the BrandingManager cog.""" diff --git a/bot/exts/backend/branding/_seasons.py b/bot/exts/backend/branding/_seasons.py deleted file mode 100644 index 5f6256b30..000000000 --- a/bot/exts/backend/branding/_seasons.py +++ /dev/null @@ -1,175 +0,0 @@ -import logging -import typing as t -from datetime import datetime - -from bot.constants import Colours -from bot.exts.backend.branding._constants import Month -from bot.exts.backend.branding._errors import BrandingError - -log = logging.getLogger(__name__) - - -class SeasonBase: - """ - Base for Seasonal classes. - - This serves as the off-season fallback for when no specific - seasons are active. - - Seasons are 'registered' simply by inheriting from `SeasonBase`. - We discover them by calling `__subclasses__`. - """ - - season_name: str = "Evergreen" - - colour: str = Colours.soft_green - description: str = "The default season!" - - branding_path: str = "seasonal/evergreen" - - months: t.Set[Month] = set(Month) - - -class Christmas(SeasonBase): - """Branding for December.""" - - season_name = "Festive season" - - colour = Colours.soft_red - description = ( - "The time is here to get into the festive spirit! No matter who you are, where you are, " - "or what beliefs you may follow, we hope every one of you enjoy this festive season!" - ) - - branding_path = "seasonal/christmas" - - months = {Month.DECEMBER} - - -class Easter(SeasonBase): - """Branding for April.""" - - season_name = "Easter" - - colour = Colours.bright_green - description = ( - "Bunny here, bunny there, bunny everywhere! Here at Python Discord, we celebrate " - "our version of Easter during the entire month of April." - ) - - branding_path = "seasonal/easter" - - months = {Month.APRIL} - - -class Halloween(SeasonBase): - """Branding for October.""" - - season_name = "Halloween" - - colour = Colours.orange - description = "Trick or treat?!" - - branding_path = "seasonal/halloween" - - months = {Month.OCTOBER} - - -class Pride(SeasonBase): - """Branding for June.""" - - season_name = "Pride" - - colour = Colours.pink - description = ( - "The month of June is a special month for us at Python Discord. It is very important to us " - "that everyone feels welcome here, no matter their origin, identity or sexuality. During the " - "month of June, while some of you are participating in Pride festivals across the world, " - "we will be celebrating individuality and commemorating the history and challenges " - "of the LGBTQ+ community with a Pride event of our own!" - ) - - branding_path = "seasonal/pride" - - months = {Month.JUNE} - - -class Valentines(SeasonBase): - """Branding for February.""" - - season_name = "Valentines" - - colour = Colours.pink - description = "Love is in the air!" - - branding_path = "seasonal/valentines" - - months = {Month.FEBRUARY} - - -class Wildcard(SeasonBase): - """Branding for August.""" - - season_name = "Wildcard" - - colour = Colours.purple - description = "A season full of surprises!" - - months = {Month.AUGUST} - - -def get_all_seasons() -> t.List[t.Type[SeasonBase]]: - """Give all available season classes.""" - return [SeasonBase] + SeasonBase.__subclasses__() - - -def get_current_season() -> t.Type[SeasonBase]: - """Give active season, based on current UTC month.""" - current_month = Month(datetime.utcnow().month) - - active_seasons = tuple( - season - for season in SeasonBase.__subclasses__() - if current_month in season.months - ) - - if not active_seasons: - return SeasonBase - - return active_seasons[0] - - -def get_season(name: str) -> t.Optional[t.Type[SeasonBase]]: - """ - Give season such that its class name or its `season_name` attr match `name` (caseless). - - If no such season exists, return None. - """ - name = name.casefold() - - for season in get_all_seasons(): - matches = (season.__name__.casefold(), season.season_name.casefold()) - - if name in matches: - return season - - -def _validate_season_overlap() -> None: - """ - Raise BrandingError if there are any colliding seasons. - - This serves as a local test to ensure that seasons haven't been misconfigured. - """ - month_to_season = {} - - for season in SeasonBase.__subclasses__(): - for month in season.months: - colliding_season = month_to_season.get(month) - - if colliding_season: - raise BrandingError(f"Season {season} collides with {colliding_season} in {month.name}") - else: - month_to_season[month] = season - - -_validate_season_overlap() diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py index 9cb54cdab..76ab7dfc2 100644 --- a/bot/exts/backend/error_handler.py +++ b/bot/exts/backend/error_handler.py @@ -1,7 +1,6 @@ import contextlib import difflib import logging -import random import typing as t from discord import Embed @@ -10,10 +9,9 @@ from sentry_sdk import push_scope from bot.api import ResponseCodeError from bot.bot import Bot -from bot.constants import Colours, ERROR_REPLIES, Icons, MODERATION_ROLES +from bot.constants import Colours, Icons, MODERATION_ROLES from bot.converters import TagNameConverter from bot.errors import InvalidInfractedUser, LockedResourceError -from bot.exts.backend.branding._errors import BrandingError from bot.utils.checks import InWhitelistCheckFailure log = logging.getLogger(__name__) @@ -79,9 +77,6 @@ class ErrorHandler(Cog): await self.handle_api_error(ctx, e.original) elif isinstance(e.original, LockedResourceError): await ctx.send(f"{e.original} Please wait for it to finish and try again later.") - elif isinstance(e.original, BrandingError): - await ctx.send(embed=self._get_error_embed(random.choice(ERROR_REPLIES), str(e.original))) - return elif isinstance(e.original, InvalidInfractedUser): await ctx.send(f"Cannot infract that user. {e.original.reason}") else: -- cgit v1.2.3 From 65f3dd35ec7eca6160691c5cc339ba9462941c47 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Tue, 9 Mar 2021 20:27:52 +0100 Subject: Branding: initiate repository abstraction --- bot/exts/backend/branding/_cog.py | 3 +++ bot/exts/backend/branding/_repository.py | 12 ++++++++++++ 2 files changed, 15 insertions(+) create mode 100644 bot/exts/backend/branding/_repository.py diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index afe575e1a..cef17a614 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -3,6 +3,7 @@ import logging from discord.ext import commands from bot.bot import Bot +from bot.exts.backend.branding._repository import BrandingRepository log = logging.getLogger(__name__) @@ -11,4 +12,6 @@ class Branding(commands.Cog): """Guild branding management.""" def __init__(self, bot: Bot) -> None: + """Instantiate repository abstraction.""" self.bot = bot + self.repository = BrandingRepository(bot) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py new file mode 100644 index 000000000..de47fcd36 --- /dev/null +++ b/bot/exts/backend/branding/_repository.py @@ -0,0 +1,12 @@ +import logging + +from bot.bot import Bot + +log = logging.getLogger(__name__) + + +class BrandingRepository: + """Abstraction exposing the branding repository via convenient methods.""" + + def __init__(self, bot: Bot) -> None: + self.bot = bot -- cgit v1.2.3 From bcddc5cdaaa021af3fef9b0b3c2b30d11960083a Mon Sep 17 00:00:00 2001 From: kwzrd Date: Tue, 9 Mar 2021 20:37:19 +0100 Subject: Branding: migrate constants Constants will only be used in one place and there's not enough of them to warrant a separate module. --- bot/exts/backend/branding/_constants.py | 11 ----------- bot/exts/backend/branding/_repository.py | 11 +++++++++++ 2 files changed, 11 insertions(+), 11 deletions(-) delete mode 100644 bot/exts/backend/branding/_constants.py diff --git a/bot/exts/backend/branding/_constants.py b/bot/exts/backend/branding/_constants.py deleted file mode 100644 index 8afac6538..000000000 --- a/bot/exts/backend/branding/_constants.py +++ /dev/null @@ -1,11 +0,0 @@ -from bot.constants import Keys - -# Base URL for requests into the branding repository -BRANDING_URL = "https://api.github.com/repos/kwzrd/pydis-branding/contents" - -PARAMS = {"ref": "kwzrd/events-rework"} # Target branch -HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3 - -# A GitHub token is not necessary for the cog to operate, unauthorized requests are however limited to 60 per hour -if Keys.github: - HEADERS["Authorization"] = f"token {Keys.github}" diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index de47fcd36..3bdb632f8 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -1,6 +1,17 @@ import logging from bot.bot import Bot +from bot.constants import Keys + +# Base URL for requests into the branding repository +BRANDING_URL = "https://api.github.com/repos/kwzrd/pydis-branding/contents" + +PARAMS = {"ref": "kwzrd/events-rework"} # Target branch +HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3 + +# A GitHub token is not necessary for the cog to operate, unauthorized requests are however limited to 60 per hour +if Keys.github: + HEADERS["Authorization"] = f"token {Keys.github}" log = logging.getLogger(__name__) -- cgit v1.2.3 From ff8193ed19c2c5e5106fa2afee264f5ea700275f Mon Sep 17 00:00:00 2001 From: kwzrd Date: Tue, 9 Mar 2021 20:47:07 +0100 Subject: Branding: add HTTP fetch helper methods --- bot/exts/backend/branding/_repository.py | 54 ++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 3bdb632f8..bf38fccad 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -1,4 +1,5 @@ import logging +import typing as t from bot.bot import Bot from bot.constants import Keys @@ -16,8 +17,61 @@ if Keys.github: log = logging.getLogger(__name__) +class RemoteObject: + """ + Represent a remote file or directory on GitHub. + + The annotations match keys in the response JSON that we're interested in. + """ + + name: str # Filename + path: str # Path from repo root + type: str # Either 'file' or 'dir' + download_url: str + + def __init__(self, dictionary: t.Dict[str, t.Any]) -> None: + """Initialize by grabbing annotated attributes from `dictionary`.""" + for annotation in self.__annotations__: + setattr(self, annotation, dictionary[annotation]) + + class BrandingRepository: """Abstraction exposing the branding repository via convenient methods.""" def __init__(self, bot: Bot) -> None: self.bot = bot + + async def fetch_directory(self, path: str, types: t.Container[str] = ("file", "dir")) -> t.Dict[str, RemoteObject]: + """ + Fetch directory found at `path` in the branding repository. + + The directory will be represented by a mapping from file or sub-directory names to their corresponding + instances of `RemoteObject`. Passing a custom `types` value allows only getting files or directories. + + If the request fails, returns an empty dictionary. + """ + full_url = f"{BRANDING_URL}/{path}" + log.debug(f"Fetching directory from branding repository: {full_url}") + + async with self.bot.http_session.get(full_url, params=PARAMS, headers=HEADERS) as response: + if response.status == 200: + json_directory = await response.json() + else: + log.warning(f"Received non-200 response status: {response.status}") + return {} + + return {file["name"]: RemoteObject(file) for file in json_directory if file["type"] in types} + + async def fetch_file(self, file: RemoteObject) -> t.Optional[bytes]: + """ + Fetch `file` using its download URL. + + Returns the file as bytes unless the request fails, in which case None is given. + """ + log.debug(f"Fetching file from branding repository: {file.download_url}") + + async with self.bot.http_session.get(file.download_url, params=PARAMS, headers=HEADERS) as response: + if response.status == 200: + return await response.read() + else: + log.warning(f"Received non-200 response status: {response.status}") -- cgit v1.2.3 From cb3b80788bde2aba280de5370ee78abcaa39f613 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Tue, 9 Mar 2021 22:41:25 +0100 Subject: Branding: define event construction methodology --- bot/errors.py | 6 +++ bot/exts/backend/branding/_repository.py | 84 ++++++++++++++++++++++++++++++++ 2 files changed, 90 insertions(+) diff --git a/bot/errors.py b/bot/errors.py index ab0adcd42..3544c6320 100644 --- a/bot/errors.py +++ b/bot/errors.py @@ -35,3 +35,9 @@ class InvalidInfractedUser(Exception): self.reason = reason super().__init__(reason) + + +class BrandingMisconfiguration(RuntimeError): + """Raised by the Branding cog when a misconfigured event is encountered.""" + + pass diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index bf38fccad..9d32fdfb1 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -1,8 +1,12 @@ import logging import typing as t +from datetime import date, datetime + +import frontmatter from bot.bot import Bot from bot.constants import Keys +from bot.errors import BrandingMisconfiguration # Base URL for requests into the branding repository BRANDING_URL = "https://api.github.com/repos/kwzrd/pydis-branding/contents" @@ -14,6 +18,13 @@ HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3 if Keys.github: HEADERS["Authorization"] = f"token {Keys.github}" +# Since event periods are year-agnostic, we parse them into `datetime` objects with a manually inserted year +# Please note that this is intentionally a leap year in order to allow Feb 29 to be valid +ARBITRARY_YEAR = 2020 + +# Format used to parse date strings after we inject `ARBITRARY_YEAR` at the end +DATE_FMT = "%B %d %Y" # Ex: July 10 2020 + log = logging.getLogger(__name__) @@ -35,6 +46,23 @@ class RemoteObject: setattr(self, annotation, dictionary[annotation]) +class MetaFile(t.NamedTuple): + """Composition of attributes defined in a 'meta.md' file.""" + + is_fallback: bool + start_date: t.Optional[date] + end_date: t.Optional[date] + description: str # Markdown event description + + +class Event(t.NamedTuple): + """Represent an event defined in the branding repository.""" + + banner: RemoteObject + icons: t.List[RemoteObject] + meta: MetaFile + + class BrandingRepository: """Abstraction exposing the branding repository via convenient methods.""" @@ -75,3 +103,59 @@ class BrandingRepository: return await response.read() else: log.warning(f"Received non-200 response status: {response.status}") + + async def parse_meta_file(self, raw_file: bytes) -> MetaFile: + """ + Parse a 'meta.md' file from raw bytes. + + The caller is responsible for handling errors caused by misconfiguration. + """ + attrs, description = frontmatter.parse(raw_file) # Library automatically decodes using UTF-8 + + if not description: + raise BrandingMisconfiguration("No description found in 'meta.md'!") + + if attrs.get("fallback", False): + return MetaFile(is_fallback=True, start_date=None, end_date=None, description=description) + + start_date_raw = attrs.get("start_date") + end_date_raw = attrs.get("end_date") + + if None in (start_date_raw, end_date_raw): + raise BrandingMisconfiguration("Non-fallback event doesn't have start and end dates defined!") + + # We extend the configured month & day with an arbitrary leap year to allow a `datetime` repr to exist + # This may raise errors if configured in a wrong format ~ we let the caller handle such cases + start_date = datetime.strptime(f"{start_date_raw} {ARBITRARY_YEAR}", DATE_FMT).date() + end_date = datetime.strptime(f"{end_date_raw} {ARBITRARY_YEAR}", DATE_FMT).date() + + return MetaFile(is_fallback=False, start_date=start_date, end_date=end_date, description=description) + + async def construct_event(self, directory: RemoteObject) -> Event: + """ + Construct an `Event` instance from an event `directory`. + + The caller is responsible for handling errors caused by misconfiguration. + """ + contents = await self.fetch_directory(directory.path) + + missing_assets = {"meta.md", "banner.png", "server_icons"} - contents.keys() + + if missing_assets: + raise BrandingMisconfiguration(f"Directory is missing following assets: {missing_assets}") + + server_icons = await self.fetch_directory(contents["server_icons"].path, types=("file",)) + + if server_icons is None: + raise BrandingMisconfiguration("Failed to fetch server icons!") + if len(server_icons) == 0: + raise BrandingMisconfiguration("Found no server icons!") + + meta_bytes = await self.fetch_file(contents["meta.md"]) + + if meta_bytes is None: + raise BrandingMisconfiguration("Failed to fetch 'meta.md' file!") + + meta_file = await self.parse_meta_file(meta_bytes) + + return Event(contents["banner.png"], list(server_icons.values()), meta_file) -- cgit v1.2.3 From 305046418b4cb66b59d4592a02ad2613e75718aa Mon Sep 17 00:00:00 2001 From: kwzrd Date: Tue, 9 Mar 2021 23:06:00 +0100 Subject: Branding: add event getters These methods form the API to the repository abstraction. --- bot/exts/backend/branding/_repository.py | 51 ++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 9d32fdfb1..20e287504 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -159,3 +159,54 @@ class BrandingRepository: meta_file = await self.parse_meta_file(meta_bytes) return Event(contents["banner.png"], list(server_icons.values()), meta_file) + + async def get_events(self) -> t.List[Event]: + """ + Discover available events in the branding repository. + + Misconfigured events are skipped, the return value may therefore not contain a representation of each + directory in the repository. May return an empty list in the catastrophic case. + """ + log.debug("Discovering events in branding repository") + + event_directories = await self.fetch_directory("events", types=("dir",)) # Skip files + instances: t.List[Event] = [] + + for event_directory in event_directories.values(): + log.trace(f"Attempting to construct event from directory: {event_directory.path}") + try: + instance = await self.construct_event(event_directory) + except Exception as exc: + log.warning(f"Could not construct event: {exc}") + else: + instances.append(instance) + + log.trace(f"Found {len(instances)} correctly configured events") + return instances + + async def get_current_event(self) -> t.Optional[Event]: + """ + Get the currently active event, or the fallback event. + + Returns None in the case that no event is active, and no fallback event is found. + """ + utc_now = datetime.utcnow() + log.debug(f"Finding active event for: {utc_now}") + + # As all events exist in the arbitrary year, we construct a separate object for the purposes of comparison + lookup_now = date(year=ARBITRARY_YEAR, month=utc_now.month, day=utc_now.day) + + events = await self.get_events() + + for event in events: + meta = event.meta + if not meta.is_fallback and (meta.start_date <= lookup_now <= meta.end_date): + return event + + log.debug("No active event found, looking for fallback") + + for event in events: + if event.meta.is_fallback: + return event + + log.warning("No event is currently active and no fallback event was found!") -- cgit v1.2.3 From d831086f0f5b21138283e6165f3efe0c42ba2530 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Tue, 9 Mar 2021 23:23:44 +0100 Subject: Branding: make event instances aware of their location This allows us to add a neat string representation. --- bot/exts/backend/branding/_repository.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 20e287504..e9d44417f 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -58,9 +58,13 @@ class MetaFile(t.NamedTuple): class Event(t.NamedTuple): """Represent an event defined in the branding repository.""" + path: str # Path from repo root where event lives + meta: MetaFile banner: RemoteObject icons: t.List[RemoteObject] - meta: MetaFile + + def __str__(self) -> str: + return f"" class BrandingRepository: @@ -158,7 +162,7 @@ class BrandingRepository: meta_file = await self.parse_meta_file(meta_bytes) - return Event(contents["banner.png"], list(server_icons.values()), meta_file) + return Event(directory.path, meta_file, contents["banner.png"], list(server_icons.values())) async def get_events(self) -> t.List[Event]: """ -- cgit v1.2.3 From d3f3ba9d999091bae1d455afa9f1b94eea47f778 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Tue, 9 Mar 2021 23:42:51 +0100 Subject: Branding: correctly annotate optional attribute --- bot/exts/backend/branding/_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index e9d44417f..133231968 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -38,7 +38,7 @@ class RemoteObject: name: str # Filename path: str # Path from repo root type: str # Either 'file' or 'dir' - download_url: str + download_url: t.Optional[str] # If type is 'dir', this is None! def __init__(self, dictionary: t.Dict[str, t.Any]) -> None: """Initialize by grabbing annotated attributes from `dictionary`.""" -- cgit v1.2.3 From 0810b76e448f8b13b760eb29c080a7d576959821 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Wed, 10 Mar 2021 22:03:59 +0100 Subject: Branding: do not require 'RemoteObject' instance to fetch file --- bot/exts/backend/branding/_repository.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 133231968..1a6b13c8b 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -94,15 +94,15 @@ class BrandingRepository: return {file["name"]: RemoteObject(file) for file in json_directory if file["type"] in types} - async def fetch_file(self, file: RemoteObject) -> t.Optional[bytes]: + async def fetch_file(self, download_url: str) -> t.Optional[bytes]: """ - Fetch `file` using its download URL. + Fetch file from `download_url`. Returns the file as bytes unless the request fails, in which case None is given. """ - log.debug(f"Fetching file from branding repository: {file.download_url}") + log.debug(f"Fetching file from branding repository: {download_url}") - async with self.bot.http_session.get(file.download_url, params=PARAMS, headers=HEADERS) as response: + async with self.bot.http_session.get(download_url, params=PARAMS, headers=HEADERS) as response: if response.status == 200: return await response.read() else: @@ -155,7 +155,7 @@ class BrandingRepository: if len(server_icons) == 0: raise BrandingMisconfiguration("Found no server icons!") - meta_bytes = await self.fetch_file(contents["meta.md"]) + meta_bytes = await self.fetch_file(contents["meta.md"].download_url) if meta_bytes is None: raise BrandingMisconfiguration("Failed to fetch 'meta.md' file!") -- cgit v1.2.3 From 53db28b0a2c126efd1ead201d9053eac81d95758 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Wed, 10 Mar 2021 22:27:30 +0100 Subject: Branding: implement asset application logic --- bot/exts/backend/branding/_cog.py | 51 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index cef17a614..79106d694 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -1,13 +1,30 @@ +import asyncio import logging +from enum import Enum +import async_timeout +import discord from discord.ext import commands from bot.bot import Bot +from bot.constants import Guild +from bot.decorators import mock_in_debug from bot.exts.backend.branding._repository import BrandingRepository log = logging.getLogger(__name__) +class AssetType(Enum): + """ + Recognised Discord guild asset types. + + The value of each member corresponds exactly to a kwarg that can be passed to `Guild.edit`. + """ + + BANNER = "banner" + ICON = "icon" + + class Branding(commands.Cog): """Guild branding management.""" @@ -15,3 +32,37 @@ class Branding(commands.Cog): """Instantiate repository abstraction.""" self.bot = bot self.repository = BrandingRepository(bot) + + # region: Internal utility + + @mock_in_debug(return_value=None) + async def apply_asset(self, asset_type: AssetType, download_url: str) -> None: + """ + Download asset from `download_url` and apply it to PyDis as `asset_type`. + + This function is mocked in the development environment in order to prevent API spam during testing. + Decorator should be temporarily removed in order to test internal methodology. + """ + log.info(f"Applying {asset_type.value} asset to the guild") + + file = await self.repository.fetch_file(download_url) + + if file is None: + log.error(f"Failed to download {asset_type.value} from branding repository!") + return + + await self.bot.wait_until_guild_available() + pydis: discord.Guild = self.bot.get_guild(Guild.id) + + timeout = 10 # Seconds + try: + with async_timeout.timeout(timeout): + await pydis.edit(**{asset_type.value: file}) + except discord.HTTPException as http_exc: + log.error(f"Asset upload to Discord failed: {http_exc}") + except asyncio.TimeoutError: + log.error(f"Asset upload to Discord timed out after {timeout} seconds!") + else: + log.debug("Asset uploaded successfully!") + + # endregion -- cgit v1.2.3 From 83dfb2a7a0389e53d61a30bd93aba2c749a4aa48 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Thu, 11 Mar 2021 21:51:13 +0100 Subject: Branding: expose SHA on remote objects --- bot/exts/backend/branding/_repository.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 1a6b13c8b..ef292619e 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -35,6 +35,7 @@ class RemoteObject: The annotations match keys in the response JSON that we're interested in. """ + sha: str # Hash helps us detect asset change name: str # Filename path: str # Path from repo root type: str # Either 'file' or 'dir' -- cgit v1.2.3 From ac4399a4b19dfa5ae0e9856c8df546d00a7d473e Mon Sep 17 00:00:00 2001 From: kwzrd Date: Thu, 11 Mar 2021 22:09:25 +0100 Subject: Branding: implement internal utility This adds the core logic of branding management. In comparison with the previous version, we now maintain all state in Redis, which allows the bot to seamlessly restart without losing any information. The 'send_info_embed' function is intentionally implemented with the consideration of allowing users to invoke it on-demand. It always reads information from the cache, even if the caller could pass a 'MetaFile' instance. So while this may look needlessly indirect right now, it should begin to make sense once the command API is implemented. --- bot/exts/backend/branding/_cog.py | 176 +++++++++++++++++++++++++++++++++++++- 1 file changed, 174 insertions(+), 2 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 79106d694..ddd91b5f8 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -1,15 +1,19 @@ import asyncio import logging +import random +import typing as t +from datetime import datetime, timedelta from enum import Enum import async_timeout import discord +from async_rediscache import RedisCache from discord.ext import commands from bot.bot import Bot -from bot.constants import Guild +from bot.constants import Branding as BrandingConfig, Channels, Guild from bot.decorators import mock_in_debug -from bot.exts.backend.branding._repository import BrandingRepository +from bot.exts.backend.branding._repository import BrandingRepository, Event, RemoteObject log = logging.getLogger(__name__) @@ -25,9 +29,28 @@ class AssetType(Enum): ICON = "icon" +def compound_hash(objects: t.Iterable[RemoteObject]) -> str: + """Compound hashes are cached to check for change in any of the member `objects`.""" + return "-".join(item.sha for item in objects) + + class Branding(commands.Cog): """Guild branding management.""" + # RedisCache[ + # "event_path": Path from root in the branding repo (str) + # "event_description": Markdown description (str) + # "event_duration": Human-readable date range or 'Fallback' (str) + # "banner_hash": Hash of the last applied banner (str) + # "icons_hash": Compound hash of icons in rotation (str) + # "last_rotation_timestamp": POSIX timestamp (float) + # ] + cache_information = RedisCache() + + # Cache holding icons in current rotation ~ the keys are download URLs (str) and the values are integers + # corresponding to the amount of times each icon has been used in the current rotation + cache_icons = RedisCache() + def __init__(self, bot: Bot) -> None: """Instantiate repository abstraction.""" self.bot = bot @@ -65,4 +88,153 @@ class Branding(commands.Cog): else: log.debug("Asset uploaded successfully!") + async def apply_banner(self, banner: RemoteObject) -> None: + """ + Apply `banner` to the guild and cache its hash. + + Banners should always be applied via this method in order to ensure that the last hash is cached. + """ + await self.apply_asset(AssetType.BANNER, banner.download_url) + await self.cache_information.set("banner_hash", banner.sha) + + async def rotate_icons(self) -> None: + """ + Choose and apply the next-up icon in rotation. + + We keep track of the amount of times each icon has been used. The values in `cache_icons` can be understood + to be iteration IDs. When an icon is chosen & applied, we bump its count, pushing it into the next iteration. + + Once the current iteration (lowest count in the cache) depletes, we move onto the next iteration. + + In the case that there is only 1 icon in the rotation and has already been applied, do nothing. + """ + log.debug("Rotating icons") + + state = await self.cache_icons.to_dict() + log.trace(f"Total icons in rotation: {len(state)}") + + if len(state) == 1 and 1 in state.values(): + log.debug("Aborting icon rotation: only 1 icon is available and has already been applied") + return + + current_iteration = min(state.values()) # Choose iteration to draw from + options = [download_url for download_url, times_used in state.items() if times_used == current_iteration] + + log.trace(f"Choosing from {len(options)} icons in iteration {current_iteration}") + next_icon = random.choice(options) + + await self.apply_asset(AssetType.ICON, next_icon) + await self.cache_icons.increment(next_icon) # Push the icon into the next iteration + + timestamp = datetime.utcnow().timestamp() + await self.cache_information.set("last_rotation_timestamp", timestamp) + + async def maybe_rotate_icons(self) -> None: + """ + Call `rotate_icons` if the configured amount of time has passed since last rotation. + + We offset the calculated time difference into the future in order to avoid off-by-a-little-bit errors. + Because there is work to be done before the timestamp is read and written, the next read will likely + commence slightly under 24 hours after the last write. + """ + log.debug("Checking if icons should rotate") + + last_rotation_timestamp = await self.cache_information.get("last_rotation_timestamp") + + if last_rotation_timestamp is None: # Maiden case ~ never rotated + await self.rotate_icons() + + last_rotation = datetime.fromtimestamp(last_rotation_timestamp) + difference = (datetime.utcnow() - last_rotation) + timedelta(minutes=5) + + log.trace(f"Icons last rotated at {last_rotation} (difference: {difference})") + + if difference.days >= BrandingConfig.cycle_frequency: + await self.rotate_icons() + + async def initiate_icon_rotation(self, available_icons: t.List[RemoteObject]) -> None: + """ + Set up a new icon rotation. + + This function should be called whenever the set of `available_icons` changes. This is generally the case + when we enter a new event, but potentially also when the assets of an on-going event change. In such cases, + a reset of `cache_icons` is necessary, because it contains download URLs which may have gotten stale. + """ + log.debug("Initiating new icon rotation") + + await self.cache_icons.clear() + + new_state = {icon.download_url: 0 for icon in available_icons} + await self.cache_icons.update(new_state) + + log.trace(f"Icon rotation initiated for {len(new_state)} icons") + + await self.rotate_icons() + await self.cache_information.set("icons_hash", compound_hash(available_icons)) + + async def send_info_embed(self, channel_id: int) -> None: + """ + Send the currently cached event description to `channel_id`. + + This function is called when entering a new event with the destination being #changelog. However, it can + also be invoked on-demand by users. + + To support either case, we read information about the current event from `cache_information`. The caller + is therefore responsible for making sure that the cache is up-to-date before calling this function. + """ + log.debug(f"Sending event information event to channel id: {channel_id}") + + await self.bot.wait_until_guild_available() + channel: t.Optional[discord.TextChannel] = self.bot.get_channel(channel_id) + + if channel is None: + log.warning(f"Cannot send event information: channel {channel_id} not found!") + return + + log.debug(f"Destination channel: #{channel.name}") + + embed = discord.Embed( + description=await self.cache_information.get("event_description"), + colour=discord.Colour.blurple(), + ) + embed.set_footer(text=await self.cache_information.get("event_duration")) + + await channel.send(embed=embed) + + async def enter_event(self, event: Event) -> None: + """ + Enter `event` and update information cache. + + From the outside, entering a new event is as simple as applying its branding to the guild and dispatching + a notification to #changelog. + + However, internally we cache information to ensure that we: + * Remember which event we're currently in across restarts + * Provide an on-demand information embed without re-querying the branding repository + + An event change should always be handled via this function, as it ensures that the cache is populated. + """ + log.debug(f"Entering new event: {event.path}") + + await self.apply_banner(event.banner) # Only one asset ~ apply directly + await self.initiate_icon_rotation(event.icons) # Extra layer of abstraction to handle multiple assets + + # Cache event identity to avoid re-entry in case of restart + await self.cache_information.set("event_path", event.path) + + # The following values are only stored for the purpose of presenting them to the users + if event.meta.is_fallback: + event_duration = "Fallback" + else: + fmt = "%B %d" # Ex: August 23 + start_date = event.meta.start_date.strftime(fmt) + end_date = event.meta.end_date.strftime(fmt) + event_duration = f"{start_date} - {end_date}" + + await self.cache_information.set("event_duration", event_duration) + await self.cache_information.set("event_description", event.meta.description) + + # Notify guild of new event ~ this reads the information that we cached above! + await self.send_info_embed(Channels.change_log) + # endregion -- cgit v1.2.3 From 8c05cf3a2202fd06ba15636f998c222694e6085e Mon Sep 17 00:00:00 2001 From: kwzrd Date: Fri, 12 Mar 2021 16:52:49 +0100 Subject: Branding: implement daemon --- bot/exts/backend/branding/_cog.py | 112 +++++++++++++++++++++++++++++++++++++- 1 file changed, 109 insertions(+), 3 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index ddd91b5f8..4387e68b4 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -2,13 +2,13 @@ import asyncio import logging import random import typing as t -from datetime import datetime, timedelta +from datetime import datetime, time, timedelta from enum import Enum import async_timeout import discord from async_rediscache import RedisCache -from discord.ext import commands +from discord.ext import commands, tasks from bot.bot import Bot from bot.constants import Branding as BrandingConfig, Channels, Guild @@ -38,6 +38,7 @@ class Branding(commands.Cog): """Guild branding management.""" # RedisCache[ + # "daemon_active": If True, daemon auto-starts; controlled via commands (bool) # "event_path": Path from root in the branding repo (str) # "event_description": Markdown description (str) # "event_duration": Human-readable date range or 'Fallback' (str) @@ -52,10 +53,12 @@ class Branding(commands.Cog): cache_icons = RedisCache() def __init__(self, bot: Bot) -> None: - """Instantiate repository abstraction.""" + """Instantiate repository abstraction & allow daemon to start.""" self.bot = bot self.repository = BrandingRepository(bot) + self.bot.loop.create_task(self.maybe_start_daemon()) # Start depending on cache + # region: Internal utility @mock_in_debug(return_value=None) @@ -238,3 +241,106 @@ class Branding(commands.Cog): await self.send_info_embed(Channels.change_log) # endregion + # region: Daemon + + async def maybe_start_daemon(self) -> None: + """ + Start the daemon depending on cache state. + + The daemon will only start if it's been previously explicitly enabled via a command. + """ + log.debug("Checking whether daemon is enabled") + + should_begin: t.Optional[bool] = await self.cache_information.get("daemon_active") # None if never set! + + if should_begin: + self.daemon_main.start() + + async def cog_unload(self) -> None: + """ + Cancel the daemon in case of cog unload. + + This is **not** done automatically! The daemon otherwise remains active in the background. + """ + log.debug("Cog unload: cancelling daemon") + + self.daemon_main.cancel() + + @tasks.loop(hours=24) + async def daemon_main(self) -> None: + """ + Periodically synchronise guild & caches with branding repository. + + This function executes every 24 hours at midnight. We pull the currently active event from the branding + repository and check whether it matches the currently active event. If not, we apply the new event. + + However, it is also possible that an event's assets change as it's active. To account for such cases, + we check the banner & icons hashes against the currently cached values. If there is a mismatch, the + specific asset is re-applied. + + As such, the guild should always remain synchronised with the branding repository. However, the #changelog + notification is only sent in the case of entering a new event ~ no change in an on-going event will trigger + a new notification to be sent. + """ + log.debug("Daemon awakens: checking current event") + + new_event = await self.repository.get_current_event() + + if new_event is None: + log.warning("Failed to get current event from the branding repository, daemon will do nothing!") + return + + if new_event.path != await self.cache_information.get("event_path"): + log.debug("New event detected!") + await self.enter_event(new_event) + return + + log.debug("Event has not changed, checking for change in assets") + + if new_event.banner.sha != await self.cache_information.get("banner_hash"): + log.debug("Detected same-event banner change!") + await self.apply_banner(new_event.banner) + + if compound_hash(new_event.icons) != await self.cache_information.get("icons_hash"): + log.debug("Detected same-event icon change!") + await self.initiate_icon_rotation(new_event.icons) + else: + await self.maybe_rotate_icons() + + @daemon_main.before_loop + async def daemon_before(self) -> None: + """ + Wait until the next-up UTC midnight before letting `daemon_main` begin. + + This function allows the daemon to keep a consistent schedule across restarts. + + We check for a special case in which the cog's cache is empty. This indicates that we have never entered + an event (on first start-up), or that there was a cache loss. In either case, the current event gets + applied immediately, to avoid leaving the cog in an empty state. + """ + log.debug("Calculating time for daemon to sleep before first awakening") + + current_event = await self.cache_information.get("event_path") + + if current_event is None: # Maiden case ~ first start or cache loss + log.debug("Applying event immediately as cache is empty (indicating maiden case)") + + event = await self.repository.get_current_event() + + if event is None: + log.warning("Failed to fetch event ~ cache will remain empty!") + else: + await self.enter_event(event) + + now = datetime.utcnow() + + # The actual midnight moment is offset into the future in order to prevent issues with imprecise sleep + tomorrow = now + timedelta(days=1) + midnight = datetime.combine(tomorrow, time(minute=1)) + + sleep_secs = (midnight - now).total_seconds() + + log.debug(f"Sleeping {sleep_secs} seconds before next-up midnight at {midnight}") + await asyncio.sleep(sleep_secs) + + # endregion -- cgit v1.2.3 From 64469430d0a1a4ed6ca41c696622e5d6e46d52a4 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 13 Mar 2021 12:11:29 +0100 Subject: Branding: gate sync via helper function Sync make also be invoked with a command; avoid logic duplication. --- bot/exts/backend/branding/_cog.py | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 4387e68b4..dce2b7bc0 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -240,6 +240,23 @@ class Branding(commands.Cog): # Notify guild of new event ~ this reads the information that we cached above! await self.send_info_embed(Channels.change_log) + async def synchronise(self) -> None: + """ + Fetch the current event and delegate to `enter_event`. + + This is a convenience wrapper to force synchronisation either via a command, or when the daemon starts + with an empty cache. It is generally only used in a recovery scenario. In the usual case, the daemon + already has an `Event` instance and can pass it to `enter_event` directly. + """ + log.debug("Synchronise: fetching current event") + + event = await self.repository.get_current_event() + + if event is None: + log.error("Failed to fetch event ~ cannot synchronise!") + else: + await self.enter_event(event) + # endregion # region: Daemon @@ -323,14 +340,8 @@ class Branding(commands.Cog): current_event = await self.cache_information.get("event_path") if current_event is None: # Maiden case ~ first start or cache loss - log.debug("Applying event immediately as cache is empty (indicating maiden case)") - - event = await self.repository.get_current_event() - - if event is None: - log.warning("Failed to fetch event ~ cache will remain empty!") - else: - await self.enter_event(event) + log.debug("Event cache is empty (indicating maiden case), invoking synchronisation") + await self.synchronise() now = datetime.utcnow() -- cgit v1.2.3 From 9b3e5d4ec761c15c1fc6a261bbe8796c6ab0a50d Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 13 Mar 2021 13:38:56 +0100 Subject: Branding: implement command interface --- bot/exts/backend/branding/_cog.py | 86 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 85 insertions(+), 1 deletion(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index dce2b7bc0..0fd694bca 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -11,7 +11,7 @@ from async_rediscache import RedisCache from discord.ext import commands, tasks from bot.bot import Bot -from bot.constants import Branding as BrandingConfig, Channels, Guild +from bot.constants import Branding as BrandingConfig, Channels, Colours, Guild from bot.decorators import mock_in_debug from bot.exts.backend.branding._repository import BrandingRepository, Event, RemoteObject @@ -34,6 +34,18 @@ def compound_hash(objects: t.Iterable[RemoteObject]) -> str: return "-".join(item.sha for item in objects) +def make_embed(title: str, description: str, *, success: bool) -> discord.Embed: + """ + Construct simple response embed. + + If `success` is True, use green colour, otherwise red. + + For both `title` and `description`, empty string are valid values ~ fields will be empty. + """ + colour = Colours.soft_green if success else Colours.soft_red + return discord.Embed(title=title, description=description, colour=colour) + + class Branding(commands.Cog): """Guild branding management.""" @@ -355,3 +367,75 @@ class Branding(commands.Cog): await asyncio.sleep(sleep_secs) # endregion + # region: Command interface (branding) + + @commands.group(name="branding") + async def branding_group(self, ctx: commands.Context) -> None: + """Control the branding cog.""" + if not ctx.invoked_subcommand: + await ctx.send_help(ctx.command) + + @branding_group.command(name="about") + async def branding_about_cmd(self, ctx: commands.Context) -> None: + """Show the current event description.""" + await self.send_info_embed(ctx.channel.id) + + @branding_group.command(name="sync") + async def branding_sync_cmd(self, ctx: commands.Context) -> None: + """Force branding synchronisation.""" + async with ctx.typing(): + await self.synchronise() + + resp = make_embed( + "Synchronisation complete", + "If something doesn't look right, check log for errors.", + success=True, + ) + await ctx.send(embed=resp) + + # endregion + # region: Command interface (branding daemon) + + @branding_group.group(name="daemon", aliases=("d",)) + async def branding_daemon_group(self, ctx: commands.Context) -> None: + """Control the branding cog's daemon.""" + if not ctx.invoked_subcommand: + await ctx.send_help(ctx.command) + + @branding_daemon_group.command(name="enable", aliases=("start", "on")) + async def branding_daemon_enable_cmd(self, ctx: commands.Context) -> None: + """Enable the branding daemon.""" + await self.cache_information.set("daemon_active", True) + + if self.daemon_main.is_running(): + resp = make_embed("Daemon is already enabled!", "", success=False) + else: + self.daemon_main.start() + resp = make_embed("Daemon enabled!", "It will now automatically awaken on start-up.", success=True) + + await ctx.send(embed=resp) + + @branding_daemon_group.command(name="disable", aliases=("stop", "off")) + async def branding_daemon_disable_cmd(self, ctx: commands.Context) -> None: + """Disable the branding daemon.""" + await self.cache_information.set("daemon_active", False) + + if self.daemon_main.is_running(): + self.daemon_main.cancel() + resp = make_embed("Daemon disabled!", "It will not awaken on start-up.", success=True) + else: + resp = make_embed("Daemon is already disabled!", "", success=False) + + await ctx.send(embed=resp) + + @branding_daemon_group.command(name="status") + async def branding_daemon_status_cmd(self, ctx: commands.Context) -> None: + """Check whether the daemon is currently enabled.""" + if self.daemon_main.is_running(): + resp = make_embed("Daemon is enabled", "Use `branding daemon disable` to stop.", success=True) + else: + resp = make_embed("Daemon is disabled", "Use `branding daemon enable` to start.", success=False) + + await ctx.send(embed=resp) + + # endregion -- cgit v1.2.3 From c7ecc27bb576f7fb717259702e8f7212eef92b29 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 13 Mar 2021 13:48:07 +0100 Subject: Branding: lock commands to mods+ where necessary --- bot/exts/backend/branding/_cog.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 0fd694bca..61ae46923 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -11,7 +11,7 @@ from async_rediscache import RedisCache from discord.ext import commands, tasks from bot.bot import Bot -from bot.constants import Branding as BrandingConfig, Channels, Colours, Guild +from bot.constants import Branding as BrandingConfig, Channels, Colours, Guild, MODERATION_ROLES from bot.decorators import mock_in_debug from bot.exts.backend.branding._repository import BrandingRepository, Event, RemoteObject @@ -380,6 +380,7 @@ class Branding(commands.Cog): """Show the current event description.""" await self.send_info_embed(ctx.channel.id) + @commands.has_any_role(*MODERATION_ROLES) @branding_group.command(name="sync") async def branding_sync_cmd(self, ctx: commands.Context) -> None: """Force branding synchronisation.""" @@ -396,6 +397,7 @@ class Branding(commands.Cog): # endregion # region: Command interface (branding daemon) + @commands.has_any_role(*MODERATION_ROLES) @branding_group.group(name="daemon", aliases=("d",)) async def branding_daemon_group(self, ctx: commands.Context) -> None: """Control the branding cog's daemon.""" -- cgit v1.2.3 From 88111ccab42e6ff686cb1dc4ff2416f0b409c14a Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 13 Mar 2021 14:08:11 +0100 Subject: Branding: add missing early exit --- bot/exts/backend/branding/_cog.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 61ae46923..f5bad21ac 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -158,6 +158,7 @@ class Branding(commands.Cog): if last_rotation_timestamp is None: # Maiden case ~ never rotated await self.rotate_icons() + return last_rotation = datetime.fromtimestamp(last_rotation_timestamp) difference = (datetime.utcnow() - last_rotation) + timedelta(minutes=5) -- cgit v1.2.3 From 4b5af57b4ed4eac18bf3c368f99e848e10a33cab Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Sat, 13 Mar 2021 16:20:47 +0200 Subject: Use log instead of erroneous self.log --- bot/exts/recruitment/talentpool/_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index 56b51925e..b84499d98 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -276,7 +276,7 @@ class Reviewer: nomination = await self.bot.api_client.get(f"{self._pool.api_endpoint}/{nomination_id}") except ResponseCodeError as e: if e.response.status == 404: - self.log.trace(f"Nomination API 404: Can't find nomination with id {nomination_id}") + log.trace(f"Nomination API 404: Can't find nomination with id {nomination_id}") await ctx.send(f"❌ Can't find a nomination with id `{nomination_id}`") return None else: -- cgit v1.2.3 From 4324b3f6ac80bbcbd2eef80303bf7caf1dfa8cca Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Sat, 13 Mar 2021 16:27:28 +0200 Subject: Apply requested grammar and style changes. --- bot/exts/recruitment/talentpool/_cog.py | 11 ++++++++--- bot/exts/recruitment/talentpool/_review.py | 10 +++++----- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index 070a4fd83..7b21dcd53 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -1,4 +1,3 @@ - import logging import textwrap from collections import ChainMap @@ -47,7 +46,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): @nomination_group.command(name='watched', aliases=('all', 'list'), root_aliases=("nominees",)) @has_any_role(*MODERATION_ROLES) async def watched_command( - self, ctx: Context, oldest_first: bool = False, update_cache: bool = True + self, + ctx: Context, + oldest_first: bool = False, + update_cache: bool = True ) -> None: """ Shows the users that are currently being monitored in the talent pool. @@ -60,7 +62,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) async def list_watched_users( - self, ctx: Context, oldest_first: bool = False, update_cache: bool = True + self, + ctx: Context, + oldest_first: bool = False, + update_cache: bool = True ) -> None: """ Gives an overview of the nominated users list. diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index b84499d98..682a32918 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -278,16 +278,16 @@ class Reviewer: if e.response.status == 404: log.trace(f"Nomination API 404: Can't find nomination with id {nomination_id}") await ctx.send(f"❌ Can't find a nomination with id `{nomination_id}`") - return None + return else: raise if nomination["reviewed"]: await ctx.send("❌ This nomination was already reviewed, but here's a cookie 🍪") - return None + return elif not nomination["active"]: await ctx.send("❌ This nomination is inactive") - return None + return await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) if nomination["user"] in self._review_scheduler: @@ -302,7 +302,7 @@ class Reviewer: Cancels the review of the nominee with ID user_id. It's important to note that this applies only until reschedule_reviews is called again. - To permenantly cancel someone's review, either remove them from the pool, or use mark_reviewed. + To permanently cancel someone's review, either remove them from the pool, or use mark_reviewed. """ self._review_scheduler.cancel(user_id) @@ -311,6 +311,6 @@ class Reviewer: Cancels all reviews. It's important to note that this applies only until reschedule_reviews is called again. - To permenantly cancel someone's review, either remove them from the pool, or use mark_reviewed. + To permanently cancel someone's review, either remove them from the pool, or use mark_reviewed. """ self._review_scheduler.cancel_all() -- cgit v1.2.3 From 80e037772150148c8aee9f49fd34a834e778b6c2 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sat, 13 Mar 2021 18:08:54 +0100 Subject: Add leads to the constants --- bot/constants.py | 2 ++ config-default.yml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/bot/constants.py b/bot/constants.py index 394d59a73..3918dfdc7 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -486,6 +486,8 @@ class Roles(metaclass=YAMLGetter): voice_verified: int admins: int + domain_leads: int + project_leads: int core_developers: int devops: int helpers: int diff --git a/config-default.yml b/config-default.yml index 3dbc7bd6b..a961df85b 100644 --- a/config-default.yml +++ b/config-default.yml @@ -262,6 +262,8 @@ guild: # Staff admins: &ADMINS_ROLE 267628507062992896 + domain_leads: 807415650778742785 + project_leads: 807415650778742785 core_developers: 587606783669829632 devops: 409416496733880320 helpers: &HELPERS_ROLE 267630620367257601 -- cgit v1.2.3 From 6724505ea01cf1bb345a8272643da00a6f28f272 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sat, 13 Mar 2021 18:22:44 +0100 Subject: Allow the !poll command to leads --- bot/exts/utils/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index a5d6f69b9..c45f73b88 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -9,7 +9,7 @@ from discord.ext.commands import BadArgument, Cog, Context, clean_content, comma from discord.utils import snowflake_time from bot.bot import Bot -from bot.constants import Channels, MODERATION_ROLES, STAFF_ROLES +from bot.constants import Channels, MODERATION_ROLES, STAFF_ROLES, Roles from bot.converters import Snowflake from bot.decorators import in_whitelist from bot.pagination import LinePaginator @@ -175,7 +175,7 @@ class Utils(Cog): await ctx.send(embed=embed) @command(aliases=("poll",)) - @has_any_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES, Roles.project_leads, Roles.domain_leads) async def vote(self, ctx: Context, title: clean_content(fix_channel_mentions=True), *options: str) -> None: """ Build a quick voting poll with matching reactions with the provided options. -- cgit v1.2.3 From 35e538ab9ab8ac0d479825aa8da9a42f885820a5 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 13 Mar 2021 16:18:03 +0100 Subject: Branding: extract duration string in helper function --- bot/exts/backend/branding/_cog.py | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index f5bad21ac..332d4ad58 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -46,6 +46,22 @@ def make_embed(title: str, description: str, *, success: bool) -> discord.Embed: return discord.Embed(title=title, description=description, colour=colour) +def extract_event_duration(event: Event) -> str: + """ + Extract a human-readable, year-agnostic duration string from `event`. + + In the case that `event` is a fallback event, resolves to 'Fallback'. + """ + if event.meta.is_fallback: + return "Fallback" + + fmt = "%B %d" # Ex: August 23 + start_date = event.meta.start_date.strftime(fmt) + end_date = event.meta.end_date.strftime(fmt) + + return f"{start_date} - {end_date}" + + class Branding(commands.Cog): """Guild branding management.""" @@ -239,15 +255,7 @@ class Branding(commands.Cog): await self.cache_information.set("event_path", event.path) # The following values are only stored for the purpose of presenting them to the users - if event.meta.is_fallback: - event_duration = "Fallback" - else: - fmt = "%B %d" # Ex: August 23 - start_date = event.meta.start_date.strftime(fmt) - end_date = event.meta.end_date.strftime(fmt) - event_duration = f"{start_date} - {end_date}" - - await self.cache_information.set("event_duration", event_duration) + await self.cache_information.set("event_duration", extract_event_duration(event)) await self.cache_information.set("event_description", event.meta.description) # Notify guild of new event ~ this reads the information that we cached above! -- cgit v1.2.3 From 81e48983c7408e6a8dd4c6131eb5633be7c53825 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 13 Mar 2021 17:58:25 +0100 Subject: Branding: cache all available events This is a prequel to adding a calendar command. To avoid re-querying the branding repo on command invocation, event information will be cached whenever we make requests. The command can then simply get an up-to-date event schedule from the cache, with the option of forcing an update via the 'populate_cache_events' function. Since we cannot easily serialize entire 'Event' instances, we simply store what's needed - the event name, and its duration. The author has verified that the cache maintains order; in this case chronological order based on event start date. --- bot/exts/backend/branding/_cog.py | 49 +++++++++++++++++++++++++++++--- bot/exts/backend/branding/_repository.py | 18 +++++++----- 2 files changed, 56 insertions(+), 11 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 332d4ad58..50ae11b11 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -62,6 +62,18 @@ def extract_event_duration(event: Event) -> str: return f"{start_date} - {end_date}" +def extract_event_name(event: Event) -> str: + """ + Extract title-cased event name from the path of `event`. + + An event with a path of 'events/black_history_month' will resolve to 'Black History Month'. + """ + name = event.path.split("/")[-1] # Inner-most directory name + words = name.split("_") # Words from snake case + + return " ".join(word.title() for word in words) + + class Branding(commands.Cog): """Guild branding management.""" @@ -80,6 +92,10 @@ class Branding(commands.Cog): # corresponding to the amount of times each icon has been used in the current rotation cache_icons = RedisCache() + # Cache holding all available event names & their durations; this is cached by the daemon and read by + # the calendar command with the intention of preventing API spam; doesn't contain the fallback event + cache_events = RedisCache() + def __init__(self, bot: Bot) -> None: """Instantiate repository abstraction & allow daemon to start.""" self.bot = bot @@ -271,12 +287,35 @@ class Branding(commands.Cog): """ log.debug("Synchronise: fetching current event") - event = await self.repository.get_current_event() + current_event, available_events = await self.repository.get_current_event() - if event is None: + await self.populate_cache_events(available_events) + + if current_event is None: log.error("Failed to fetch event ~ cannot synchronise!") else: - await self.enter_event(event) + await self.enter_event(current_event) + + async def populate_cache_events(self, events: t.List[Event]) -> None: + """ + Clear `cache_events` and re-populate with names and durations of `events`. + + For each event, we store its name and duration string. This is the information presented to users in the + calendar command. If a format change is needed, it has to be done here. + + The cache does not store the fallback event, as it is not shown in the calendar. + """ + log.debug(f"Populating events cache with {len(events)} events") + + await self.cache_events.clear() + + no_fallback = [event for event in events if not event.meta.is_fallback] + chronological_events = sorted(no_fallback, key=lambda event_: event_.meta.start_date) + + await self.cache_events.update({ + extract_event_name(event): extract_event_duration(event) + for event in chronological_events + }) # endregion # region: Daemon @@ -322,7 +361,9 @@ class Branding(commands.Cog): """ log.debug("Daemon awakens: checking current event") - new_event = await self.repository.get_current_event() + new_event, available_events = await self.repository.get_current_event() + + await self.populate_cache_events(available_events) if new_event is None: log.warning("Failed to get current event from the branding repository, daemon will do nothing!") diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index ef292619e..b1368c59e 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -189,11 +189,14 @@ class BrandingRepository: log.trace(f"Found {len(instances)} correctly configured events") return instances - async def get_current_event(self) -> t.Optional[Event]: + async def get_current_event(self) -> t.Tuple[t.Optional[Event], t.List[Event]]: """ Get the currently active event, or the fallback event. - Returns None in the case that no event is active, and no fallback event is found. + The second return value is a list of all available events. The caller may discard it, if not needed. + Returning all events alongside the current one prevents having to query the API twice in some cases. + + The current event may be None in the case that no event is active, and no fallback event is found. """ utc_now = datetime.utcnow() log.debug(f"Finding active event for: {utc_now}") @@ -201,17 +204,18 @@ class BrandingRepository: # As all events exist in the arbitrary year, we construct a separate object for the purposes of comparison lookup_now = date(year=ARBITRARY_YEAR, month=utc_now.month, day=utc_now.day) - events = await self.get_events() + available_events = await self.get_events() - for event in events: + for event in available_events: meta = event.meta if not meta.is_fallback and (meta.start_date <= lookup_now <= meta.end_date): - return event + return event, available_events log.debug("No active event found, looking for fallback") - for event in events: + for event in available_events: if event.meta.is_fallback: - return event + return event, available_events log.warning("No event is currently active and no fallback event was found!") + return None, available_events -- cgit v1.2.3 From c047af35cbd32104f94f8619b1e861393a2ad4ce Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 13 Mar 2021 18:09:13 +0100 Subject: Branding: add calendar command group --- bot/exts/backend/branding/_cog.py | 63 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 50ae11b11..0640ca243 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -444,6 +444,69 @@ class Branding(commands.Cog): ) await ctx.send(embed=resp) + # endregion + # region: Command interface (branding calendar) + + @branding_group.group(name="calendar", aliases=("schedule",)) + async def branding_calendar_group(self, ctx: commands.Context) -> None: + """ + Show the current event calendar. + + We draw event information from `cache_events` and use each key-value pair to create a field in the response + embed. As such, we do not need to query the API to get event information. The cache is automatically + re-populated by the daemon whenever it makes a request. A moderator+ can also explicitly request a cache + refresh using the 'refresh' subcommand. + + Due to Discord limitations, we only show up to 25 events. This is entirely sufficient at the time of writing. + In the case that we find ourselves with more than 25 events, a warning log will alert core devs. + + In the future, we may be interested in a field-paginating solution. + """ + if ctx.invoked_subcommand: + # If you're wondering why this works: when the 'refresh' subcommand eventually re-invokes + # this group, the attribute will be automatically set to None by the framework + return + + available_events = await self.cache_events.to_dict() + log.debug(f"Found {len(available_events)} cached events available for calendar view") + + if not available_events: + resp = make_embed("No events found!", "Cache may be empty, try `branding calendar refresh`.", success=False) + await ctx.send(embed=resp) + return + + embed = discord.Embed(title="Current event calendar", colour=discord.Colour.blurple()) + + # Because a Discord embed can only contain up to 25 fields, we only show the first 25 + first_25 = list(available_events.items())[:25] + + if len(first_25) != len(available_events): # Alert core devs that a paginating solution is now necessary + log.warning(f"There are {len(available_events)} events, but the calendar view can only display 25!") + + for name, duration in first_25: + embed.add_field(name=name, value=duration) + + embed.set_footer(text="Otherwise, the fallback season is used.") + + await ctx.send(embed=embed) + + @commands.has_any_role(*MODERATION_ROLES) + @branding_calendar_group.command(name="refresh") + async def branding_calendar_refresh_cmd(self, ctx: commands.Context) -> None: + """ + Refresh event cache and show current event calendar. + + Supplementary subcommand allowing force-refreshing the event cache. Implemented as a subcommand because + unlike the supergroup, it requires moderator privileges. + """ + log.debug("Performing command-requested event cache refresh") + + async with ctx.typing(): + available_events = await self.repository.get_events() + await self.populate_cache_events(available_events) + + await ctx.invoke(self.branding_calendar_group) + # endregion # region: Command interface (branding daemon) -- cgit v1.2.3 From a394c42f32f07c2932e641a48a51e16f949f36ee Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Sat, 13 Mar 2021 19:51:04 +0000 Subject: master => main --- .github/workflows/build.yml | 2 +- .github/workflows/deploy.yml | 2 +- .github/workflows/lint-test.yml | 2 +- .github/workflows/sentry_release.yml | 4 ++-- CONTRIBUTING.md | 6 +++--- README.md | 14 +++++++------- bot/exts/backend/branding/_constants.py | 2 +- bot/exts/backend/logging.py | 2 +- bot/exts/info/source.py | 2 +- config-default.yml | 8 ++++---- 10 files changed, 22 insertions(+), 22 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6c97e8784..e6826e09b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,7 +4,7 @@ on: workflow_run: workflows: ["Lint & Test"] branches: - - master + - main types: - completed diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 0caf02308..8b809b777 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -4,7 +4,7 @@ on: workflow_run: workflows: ["Build"] branches: - - master + - main types: - completed diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index 6fa8e8333..95bed2e14 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -3,7 +3,7 @@ name: Lint & Test on: push: branches: - - master + - main pull_request: diff --git a/.github/workflows/sentry_release.yml b/.github/workflows/sentry_release.yml index b8d92e90a..f6a1e1f0e 100644 --- a/.github/workflows/sentry_release.yml +++ b/.github/workflows/sentry_release.yml @@ -3,14 +3,14 @@ name: Create Sentry release on: push: branches: - - master + - main jobs: create_sentry_release: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@master + uses: actions/checkout@main - name: Create a Sentry.io release uses: tclindner/sentry-releases-action@v1.2.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index be591d17e..addab32ff 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,6 @@ # Contributing to one of Our Projects -Our projects are open-source and are automatically deployed whenever commits are pushed to the `master` branch on each repository, so we've created a set of guidelines in order to keep everything clean and in working order. +Our projects are open-source and are automatically deployed whenever commits are pushed to the `main` branch on each repository, so we've created a set of guidelines in order to keep everything clean and in working order. Note that contributions may be rejected on the basis of a contributor failing to follow these guidelines. @@ -8,7 +8,7 @@ Note that contributions may be rejected on the basis of a contributor failing to 1. **No force-pushes** or modifying the Git history in any way. 2. If you have direct access to the repository, **create a branch for your changes** and create a pull request for that branch. If not, create a branch on a fork of the repository and create a pull request from there. - * It's common practice for a repository to reject direct pushes to `master`, so make branching a habit! + * It's common practice for a repository to reject direct pushes to `main`, so make branching a habit! * If PRing from your own fork, **ensure that "Allow edits from maintainers" is checked**. This gives permission for maintainers to commit changes directly to your fork, speeding up the review process. 3. **Adhere to the prevailing code style**, which we enforce using [`flake8`](http://flake8.pycqa.org/en/latest/index.html) and [`pre-commit`](https://pre-commit.com/). * Run `flake8` and `pre-commit` against your code [**before** you push it](https://soundcloud.com/lemonsaurusrex/lint-before-you-push). Your commit will be rejected by the build server if it fails to lint. @@ -18,7 +18,7 @@ Note that contributions may be rejected on the basis of a contributor failing to * Avoid making minor commits for fixing typos or linting errors. Since you've already set up a `pre-commit` hook to run the linting pipeline before a commit, you shouldn't be committing linting issues anyway. * A more in-depth guide to writing great commit messages can be found in Chris Beam's [*How to Write a Git Commit Message*](https://chris.beams.io/posts/git-commit/) 5. **Avoid frequent pushes to the main repository**. This goes for PRs opened against your fork as well. Our test build pipelines are triggered every time a push to the repository (or PR) is made. Try to batch your commits until you've finished working for that session, or you've reached a point where collaborators need your commits to continue their own work. This also provides you the opportunity to amend commits for minor changes rather than having to commit them on their own because you've already pushed. - * This includes merging master into your branch. Try to leave merging from master for after your PR passes review; a maintainer will bring your PR up to date before merging. Exceptions to this include: resolving merge conflicts, needing something that was pushed to master for your branch, or something was pushed to master that could potentionally affect the functionality of what you're writing. + * This includes merging main into your branch. Try to leave merging from main for after your PR passes review; a maintainer will bring your PR up to date before merging. Exceptions to this include: resolving merge conflicts, needing something that was pushed to main for your branch, or something was pushed to main that could potentionally affect the functionality of what you're writing. 6. **Don't fight the framework**. Every framework has its flaws, but the frameworks we've picked out have been carefully chosen for their particular merits. If you can avoid it, please resist reimplementing swathes of framework logic - the work has already been done for you! 7. If someone is working on an issue or pull request, **do not open your own pull request for the same task**. Instead, collaborate with the author(s) of the existing pull request. Duplicate PRs opened without communicating with the other author(s) and/or PyDis staff will be closed. Communication is key, and there's no point in two separate implementations of the same thing. * One option is to fork the other contributor's repository and submit your changes to their branch with your own pull request. We suggest following these guidelines when interacting with their repository as well. diff --git a/README.md b/README.md index ac45e6340..9df905dc8 100644 --- a/README.md +++ b/README.md @@ -12,11 +12,11 @@ and other tools to help keep the server running like a well-oiled machine. Read the [Contributing Guide](https://pythondiscord.com/pages/contributing/bot/) on our website if you're interested in helping out. -[1]: https://github.com/python-discord/bot/workflows/Lint%20&%20Test/badge.svg?branch=master -[2]: https://github.com/python-discord/bot/actions?query=workflow%3A%22Lint+%26+Test%22+branch%3Amaster -[3]: https://github.com/python-discord/bot/workflows/Build/badge.svg?branch=master -[4]: https://github.com/python-discord/bot/actions?query=workflow%3ABuild+branch%3Amaster -[5]: https://github.com/python-discord/bot/workflows/Deploy/badge.svg?branch=master -[6]: https://github.com/python-discord/bot/actions?query=workflow%3ADeploy+branch%3Amaster -[7]: https://raw.githubusercontent.com/python-discord/branding/master/logos/badge/badge_github.svg +[1]: https://github.com/python-discord/bot/workflows/Lint%20&%20Test/badge.svg?branch=main +[2]: https://github.com/python-discord/bot/actions?query=workflow%3A%22Lint+%26+Test%22+branch%3Amain +[3]: https://github.com/python-discord/bot/workflows/Build/badge.svg?branch=main +[4]: https://github.com/python-discord/bot/actions?query=workflow%3ABuild+branch%3Amain +[5]: https://github.com/python-discord/bot/workflows/Deploy/badge.svg?branch=main +[6]: https://github.com/python-discord/bot/actions?query=workflow%3ADeploy+branch%3Amain +[7]: https://raw.githubusercontent.com/python-discord/branding/main/logos/badge/badge_github.svg [8]: https://discord.gg/python diff --git a/bot/exts/backend/branding/_constants.py b/bot/exts/backend/branding/_constants.py index dbc7615f2..ca8e8c5f5 100644 --- a/bot/exts/backend/branding/_constants.py +++ b/bot/exts/backend/branding/_constants.py @@ -42,7 +42,7 @@ SERVER_ICONS = "server_icons" BRANDING_URL = "https://api.github.com/repos/python-discord/branding/contents" -PARAMS = {"ref": "master"} # Target branch +PARAMS = {"ref": "main"} # Target branch HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3 # A GitHub token is not necessary for the cog to operate, diff --git a/bot/exts/backend/logging.py b/bot/exts/backend/logging.py index 94fa2b139..823f14ea4 100644 --- a/bot/exts/backend/logging.py +++ b/bot/exts/backend/logging.py @@ -29,7 +29,7 @@ class Logging(Cog): url="https://github.com/python-discord/bot", icon_url=( "https://raw.githubusercontent.com/" - "python-discord/branding/master/logos/logo_circle/logo_circle_large.png" + "python-discord/branding/main/logos/logo_circle/logo_circle_large.png" ) ) diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py index 7b41352d4..49e74f204 100644 --- a/bot/exts/info/source.py +++ b/bot/exts/info/source.py @@ -97,7 +97,7 @@ class BotSource(commands.Cog): else: file_location = Path(filename).relative_to(Path.cwd()).as_posix() - url = f"{URLs.github_bot_repo}/blob/master/{file_location}{lines_extension}" + url = f"{URLs.github_bot_repo}/blob/main/{file_location}{lines_extension}" return url, file_location, first_line_no or None diff --git a/config-default.yml b/config-default.yml index 3dbc7bd6b..49d7f84ac 100644 --- a/config-default.yml +++ b/config-default.yml @@ -89,8 +89,8 @@ style: filtering: "https://cdn.discordapp.com/emojis/472472638594482195.png" - green_checkmark: "https://raw.githubusercontent.com/python-discord/branding/master/icons/checkmark/green-checkmark-dist.png" - green_questionmark: "https://raw.githubusercontent.com/python-discord/branding/master/icons/checkmark/green-question-mark-dist.png" + green_checkmark: "https://raw.githubusercontent.com/python-discord/branding/main/icons/checkmark/green-checkmark-dist.png" + green_questionmark: "https://raw.githubusercontent.com/python-discord/branding/main/icons/checkmark/green-question-mark-dist.png" guild_update: "https://cdn.discordapp.com/emojis/469954765141442561.png" hash_blurple: "https://cdn.discordapp.com/emojis/469950142942806017.png" @@ -360,8 +360,8 @@ urls: discord_api: &DISCORD_API "https://discordapp.com/api/v7/" discord_invite_api: !JOIN [*DISCORD_API, "invites"] - # Misc URLs - bot_avatar: "https://raw.githubusercontent.com/discord-python/branding/master/logos/logo_circle/logo_circle.png" + # Misc URLsw + bot_avatar: "https://raw.githubusercontent.com/python-discord/branding/main/logos/logo_circle/logo_circle.png" github_bot_repo: "https://github.com/python-discord/bot" -- cgit v1.2.3 From eff113e5c93474d295079b92a1c28650b51f8e7a Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 13 Mar 2021 21:56:37 +0100 Subject: Branding: replace ugly lambda with 'attrgetter' --- bot/exts/backend/branding/_cog.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 0640ca243..df0ef2a5f 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -4,6 +4,7 @@ import random import typing as t from datetime import datetime, time, timedelta from enum import Enum +from operator import attrgetter import async_timeout import discord @@ -310,7 +311,7 @@ class Branding(commands.Cog): await self.cache_events.clear() no_fallback = [event for event in events if not event.meta.is_fallback] - chronological_events = sorted(no_fallback, key=lambda event_: event_.meta.start_date) + chronological_events = sorted(no_fallback, key=attrgetter("meta.start_date")) await self.cache_events.update({ extract_event_name(event): extract_event_duration(event) -- cgit v1.2.3 From c0d8b0e781fdd7638d0a0f31d7a3317cdc797e5a Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 13 Mar 2021 14:11:59 -0800 Subject: Use .gitattributes to normalise line endings on check-in Remove the mixed line endings pre-commit hook because it is obsolete. Relying on git to handle line endings means contributors have more flexibility with which line endings they want to use on check-out. The settings in .gitattributes only impose which line endings will be used upon check-in (LF), which should not impact local development; git will still respect the core.eol and core.autocrlf settings. --- .gitattributes | 1 + .pre-commit-config.yaml | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..176a458f9 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +* text=auto diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1597592ca..52500a282 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,8 +7,6 @@ repos: - id: check-yaml args: [--unsafe] # Required due to custom constructors (e.g. !ENV) - id: end-of-file-fixer - - id: mixed-line-ending - args: [--fix=lf] - id: trailing-whitespace args: [--markdown-linebreak-ext=md] - repo: https://github.com/pre-commit/pygrep-hooks -- cgit v1.2.3 From a48e79a5ee108b47914029513a4d5cd1fa4b72a6 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 00:07:14 +0100 Subject: Branding: do not call 'rotate_icons' from rotation init It makes more sense for the init and the rotation to be separate operations. In a subsequent commit, the separation of responsibility will allow the `rotate_icons` function to have a meaningful return value. --- bot/exts/backend/branding/_cog.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index df0ef2a5f..cd645fba4 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -208,6 +208,8 @@ class Branding(commands.Cog): This function should be called whenever the set of `available_icons` changes. This is generally the case when we enter a new event, but potentially also when the assets of an on-going event change. In such cases, a reset of `cache_icons` is necessary, because it contains download URLs which may have gotten stale. + + This function does not upload a new icon! """ log.debug("Initiating new icon rotation") @@ -218,7 +220,6 @@ class Branding(commands.Cog): log.trace(f"Icon rotation initiated for {len(new_state)} icons") - await self.rotate_icons() await self.cache_information.set("icons_hash", compound_hash(available_icons)) async def send_info_embed(self, channel_id: int) -> None: @@ -266,7 +267,9 @@ class Branding(commands.Cog): log.debug(f"Entering new event: {event.path}") await self.apply_banner(event.banner) # Only one asset ~ apply directly - await self.initiate_icon_rotation(event.icons) # Extra layer of abstraction to handle multiple assets + + await self.initiate_icon_rotation(event.icons) # Prepare a new rotation + await self.rotate_icons() # Apply an icon from the new rotation # Cache event identity to avoid re-entry in case of restart await self.cache_information.set("event_path", event.path) @@ -384,6 +387,7 @@ class Branding(commands.Cog): if compound_hash(new_event.icons) != await self.cache_information.get("icons_hash"): log.debug("Detected same-event icon change!") await self.initiate_icon_rotation(new_event.icons) + await self.rotate_icons() else: await self.maybe_rotate_icons() -- cgit v1.2.3 From d7bd0c348d6dd8be18174bb67ecf210362070b20 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 00:32:30 +0100 Subject: Branding: propagate success-indicating boolean from 'apply_asset' The sync command will now be able to use present this information to the invoking user. This commit also prevents the cached banner & icon hash from being overwritten in the case of asset upload failure. As a result, the daemon will attempt to re-apply the assets the following day. --- bot/exts/backend/branding/_cog.py | 62 +++++++++++++++++++++++++++------------ 1 file changed, 43 insertions(+), 19 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index cd645fba4..dd19832af 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -106,13 +106,15 @@ class Branding(commands.Cog): # region: Internal utility - @mock_in_debug(return_value=None) - async def apply_asset(self, asset_type: AssetType, download_url: str) -> None: + @mock_in_debug(return_value=True) + async def apply_asset(self, asset_type: AssetType, download_url: str) -> bool: """ Download asset from `download_url` and apply it to PyDis as `asset_type`. This function is mocked in the development environment in order to prevent API spam during testing. Decorator should be temporarily removed in order to test internal methodology. + + Returns a boolean indicating whether the application was successful. """ log.info(f"Applying {asset_type.value} asset to the guild") @@ -120,7 +122,7 @@ class Branding(commands.Cog): if file is None: log.error(f"Failed to download {asset_type.value} from branding repository!") - return + return False await self.bot.wait_until_guild_available() pydis: discord.Guild = self.bot.get_guild(Guild.id) @@ -131,21 +133,30 @@ class Branding(commands.Cog): await pydis.edit(**{asset_type.value: file}) except discord.HTTPException as http_exc: log.error(f"Asset upload to Discord failed: {http_exc}") + return False except asyncio.TimeoutError: log.error(f"Asset upload to Discord timed out after {timeout} seconds!") + return False else: log.debug("Asset uploaded successfully!") + return True - async def apply_banner(self, banner: RemoteObject) -> None: + async def apply_banner(self, banner: RemoteObject) -> bool: """ - Apply `banner` to the guild and cache its hash. + Apply `banner` to the guild and cache its hash if successful. Banners should always be applied via this method in order to ensure that the last hash is cached. + + Returns a boolean indicating whether the application was successful. """ - await self.apply_asset(AssetType.BANNER, banner.download_url) - await self.cache_information.set("banner_hash", banner.sha) + success = await self.apply_asset(AssetType.BANNER, banner.download_url) + + if success: + await self.cache_information.set("banner_hash", banner.sha) - async def rotate_icons(self) -> None: + return success + + async def rotate_icons(self) -> bool: """ Choose and apply the next-up icon in rotation. @@ -155,6 +166,8 @@ class Branding(commands.Cog): Once the current iteration (lowest count in the cache) depletes, we move onto the next iteration. In the case that there is only 1 icon in the rotation and has already been applied, do nothing. + + Returns a boolean indicating whether a new icon was applied successfully. """ log.debug("Rotating icons") @@ -163,7 +176,7 @@ class Branding(commands.Cog): if len(state) == 1 and 1 in state.values(): log.debug("Aborting icon rotation: only 1 icon is available and has already been applied") - return + return False current_iteration = min(state.values()) # Choose iteration to draw from options = [download_url for download_url, times_used in state.items() if times_used == current_iteration] @@ -171,11 +184,15 @@ class Branding(commands.Cog): log.trace(f"Choosing from {len(options)} icons in iteration {current_iteration}") next_icon = random.choice(options) - await self.apply_asset(AssetType.ICON, next_icon) - await self.cache_icons.increment(next_icon) # Push the icon into the next iteration + success = await self.apply_asset(AssetType.ICON, next_icon) - timestamp = datetime.utcnow().timestamp() - await self.cache_information.set("last_rotation_timestamp", timestamp) + if success: + await self.cache_icons.increment(next_icon) # Push the icon into the next iteration + + timestamp = datetime.utcnow().timestamp() + await self.cache_information.set("last_rotation_timestamp", timestamp) + + return success async def maybe_rotate_icons(self) -> None: """ @@ -251,7 +268,7 @@ class Branding(commands.Cog): await channel.send(embed=embed) - async def enter_event(self, event: Event) -> None: + async def enter_event(self, event: Event) -> t.Tuple[bool, bool]: """ Enter `event` and update information cache. @@ -263,13 +280,15 @@ class Branding(commands.Cog): * Provide an on-demand information embed without re-querying the branding repository An event change should always be handled via this function, as it ensures that the cache is populated. + + Returns a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ log.debug(f"Entering new event: {event.path}") - await self.apply_banner(event.banner) # Only one asset ~ apply directly + banner_success = await self.apply_banner(event.banner) # Only one asset ~ apply directly await self.initiate_icon_rotation(event.icons) # Prepare a new rotation - await self.rotate_icons() # Apply an icon from the new rotation + icon_success = await self.rotate_icons() # Apply an icon from the new rotation # Cache event identity to avoid re-entry in case of restart await self.cache_information.set("event_path", event.path) @@ -281,13 +300,17 @@ class Branding(commands.Cog): # Notify guild of new event ~ this reads the information that we cached above! await self.send_info_embed(Channels.change_log) - async def synchronise(self) -> None: + return banner_success, icon_success + + async def synchronise(self) -> t.Tuple[bool, bool]: """ Fetch the current event and delegate to `enter_event`. This is a convenience wrapper to force synchronisation either via a command, or when the daemon starts with an empty cache. It is generally only used in a recovery scenario. In the usual case, the daemon already has an `Event` instance and can pass it to `enter_event` directly. + + Returns a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ log.debug("Synchronise: fetching current event") @@ -297,8 +320,9 @@ class Branding(commands.Cog): if current_event is None: log.error("Failed to fetch event ~ cannot synchronise!") - else: - await self.enter_event(current_event) + return False, False + + return await self.enter_event(current_event) async def populate_cache_events(self, events: t.List[Event]) -> None: """ -- cgit v1.2.3 From 7900c630b81c0800c2bd7cb9e205cab8eeeac4ed Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 09:49:55 +0100 Subject: Branding: show success information in 'sync' response Now that the boolean flags are propagating from 'apply_asset', we can present them to the user. --- bot/exts/backend/branding/_cog.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index dd19832af..0664a5c6c 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -462,15 +462,26 @@ class Branding(commands.Cog): @commands.has_any_role(*MODERATION_ROLES) @branding_group.command(name="sync") async def branding_sync_cmd(self, ctx: commands.Context) -> None: - """Force branding synchronisation.""" + """ + Force branding synchronisation. + + Shows which assets have failed to synchronise, if any. + """ async with ctx.typing(): - await self.synchronise() + banner_success, icon_success = await self.synchronise() - resp = make_embed( - "Synchronisation complete", - "If something doesn't look right, check log for errors.", - success=True, + failed_assets = ", ".join( + name + for name, status in [("banner", banner_success), ("icon", icon_success)] + if status is False ) + + if failed_assets: + resp = make_embed("Synchronisation unsuccessful", f"Failed to apply: {failed_assets}.", success=False) + resp.set_footer(text="Check log for details.") + else: + resp = make_embed("Synchronisation successful", "Assets have been applied.", success=True) + await ctx.send(embed=resp) # endregion -- cgit v1.2.3 From 49720c72df05703168756d8fcadd017e11dd6ece Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 09:52:25 +0100 Subject: Branding: make 'cog_unload' synchronous Discord.py doesn't await the return value. --- bot/exts/backend/branding/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 0664a5c6c..269aa6ad2 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -361,7 +361,7 @@ class Branding(commands.Cog): if should_begin: self.daemon_main.start() - async def cog_unload(self) -> None: + def cog_unload(self) -> None: """ Cancel the daemon in case of cog unload. -- cgit v1.2.3 From 71839ab8b15450201db1dfd8a321dd84b4dd140f Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 10:05:08 +0100 Subject: Branding: avoid sending #changelog notification on resync The notification is now sent conditionally depending on whether we're entering a new event. This prevents sending a repeating notification in the case of a manual resynchronisation. A practical example of when this may trigger is when a staff member temporarily applies custom assets & then uses the sync command to reapply the current event. --- bot/exts/backend/branding/_cog.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 269aa6ad2..df1c2d61c 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -270,26 +270,28 @@ class Branding(commands.Cog): async def enter_event(self, event: Event) -> t.Tuple[bool, bool]: """ - Enter `event` and update information cache. + Apply `event` assets and update information cache. - From the outside, entering a new event is as simple as applying its branding to the guild and dispatching - a notification to #changelog. - - However, internally we cache information to ensure that we: + We cache `event` information to ensure that we: * Remember which event we're currently in across restarts * Provide an on-demand information embed without re-querying the branding repository An event change should always be handled via this function, as it ensures that the cache is populated. + The #changelog notification is sent only if `event` differs from the currently cached event. + Returns a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ - log.debug(f"Entering new event: {event.path}") + log.debug(f"Entering event: {event.path}") banner_success = await self.apply_banner(event.banner) # Only one asset ~ apply directly await self.initiate_icon_rotation(event.icons) # Prepare a new rotation icon_success = await self.rotate_icons() # Apply an icon from the new rotation + # This will only be False in the case of a manual same-event re-synchronisation + event_changed = event.path != await self.cache_information.get("event_path") + # Cache event identity to avoid re-entry in case of restart await self.cache_information.set("event_path", event.path) @@ -298,7 +300,10 @@ class Branding(commands.Cog): await self.cache_information.set("event_description", event.meta.description) # Notify guild of new event ~ this reads the information that we cached above! - await self.send_info_embed(Channels.change_log) + if event_changed: + await self.send_info_embed(Channels.change_log) + else: + log.trace("Omitted #changelog notification as event has not changed (indicating manual re-sync)") return banner_success, icon_success -- cgit v1.2.3 From 8f61115cd9773997cb79b7df799318c83f6b7651 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 10:08:18 +0100 Subject: Branding: log event path alongside error Knowing which event failed would probably be quite useful. --- bot/exts/backend/branding/_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index b1368c59e..2f96396c0 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -182,7 +182,7 @@ class BrandingRepository: try: instance = await self.construct_event(event_directory) except Exception as exc: - log.warning(f"Could not construct event: {exc}") + log.warning(f"Could not construct event '{event_directory.path}': {exc}") else: instances.append(instance) -- cgit v1.2.3 From 5baf7fea618cc486e5d380f95f645d75d5e2048a Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 10:10:16 +0100 Subject: Branding: log correct amount of cached events --- bot/exts/backend/branding/_cog.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index df1c2d61c..1c8ea1f05 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -338,13 +338,15 @@ class Branding(commands.Cog): The cache does not store the fallback event, as it is not shown in the calendar. """ - log.debug(f"Populating events cache with {len(events)} events") + log.debug("Populating events cache") await self.cache_events.clear() no_fallback = [event for event in events if not event.meta.is_fallback] chronological_events = sorted(no_fallback, key=attrgetter("meta.start_date")) + log.trace(f"Writing {len(chronological_events)} events (fallback omitted)") + await self.cache_events.update({ extract_event_name(event): extract_event_duration(event) for event in chronological_events -- cgit v1.2.3 From 6a5d14be71e3453f7040602e64bfe9bd77e3555d Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 15:21:29 +0100 Subject: Branding: provide class documentation --- bot/exts/backend/branding/_cog.py | 24 +++++++++++++++++++++++- bot/exts/backend/branding/_repository.py | 21 ++++++++++++++++++++- 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 1c8ea1f05..7519f029e 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -76,7 +76,29 @@ def extract_event_name(event: Event) -> str: class Branding(commands.Cog): - """Guild branding management.""" + """ + Guild branding management. + + This cog is responsible for automatic management of the guild's branding while sourcing assets directly from + the branding repository. + + We utilize multiple Redis caches to persist state. As a result, the cog should seamlessly transition across + restarts without having to query either the Discord or GitHub APIs, as it will always remember which + assets are currently applied. + + Additionally, the state of the icon rotation is persisted. As a result, the rotation doesn't reset unless + the current event or its icons change. + + The cog is designed to be autonomous. The daemon, unless disabled, will poll the branding repository at + midnight every day and respond to detected changes. Since we persist SHA hashes of tracked assets, + changes in an on-going event will trigger automatic resynchronisation. + + A #changelog notification is automatically sent when entering a new event. Changes in the branding of + an on-going event do not trigger a repeated notification. + + The command interface allows moderators+ to control the daemon or request an asset synchronisation, + while regular users can see information about the current event and the overall event schedule. + """ # RedisCache[ # "daemon_active": If True, daemon auto-starts; controlled via commands (bool) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 2f96396c0..a612b6752 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -69,7 +69,26 @@ class Event(t.NamedTuple): class BrandingRepository: - """Abstraction exposing the branding repository via convenient methods.""" + """ + Branding repository abstraction. + + This class represents the branding repository's main branch and exposes available events and assets as objects. + + The API is primarily formed by the `get_current_event` function. It performs the necessary amount of validation + to ensure that a misconfigured event isn't returned. Such events are simply ignored, and will be substituted + with the fallback event, if available. + + Warning logs will inform core developers if a misconfigured event is encountered. + + Colliding events cause no special behaviour - in such cases, the first found active event is returned. + We work with the assumption that the branding repository checks for such conflicts and prevents them + from reaching the main branch. + + This class keeps no internal state. All `get_current_event` calls will result in GitHub API requests. + The caller is therefore responsible for being responsible and caching information to prevent API abuse. + + Requests are made using the HTTP session looked up on the bot instance. + """ def __init__(self, bot: Bot) -> None: self.bot = bot -- cgit v1.2.3 From ba01289d12f047bdd0465daaa95f12d4cf1eddb9 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 16:11:04 +0100 Subject: Branding: add embed length cut-offs for safety This should never do anything, but it's better to be safe. Values taken from Discord developer docs. --- bot/exts/backend/branding/_cog.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 7519f029e..75d912530 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -44,7 +44,7 @@ def make_embed(title: str, description: str, *, success: bool) -> discord.Embed: For both `title` and `description`, empty string are valid values ~ fields will be empty. """ colour = Colours.soft_green if success else Colours.soft_red - return discord.Embed(title=title, description=description, colour=colour) + return discord.Embed(title=title[:256], description=description[:2048], colour=colour) def extract_event_duration(event: Event) -> str: @@ -282,11 +282,14 @@ class Branding(commands.Cog): log.debug(f"Destination channel: #{channel.name}") - embed = discord.Embed( - description=await self.cache_information.get("event_description"), - colour=discord.Colour.blurple(), - ) - embed.set_footer(text=await self.cache_information.get("event_duration")) + description = await self.cache_information.get("event_description") + duration = await self.cache_information.get("event_duration") + + if None in (description, duration): + embed = make_embed("No event in cache", "Is the daemon enabled?", success=False) + else: + embed = discord.Embed(description=description[:2048], colour=discord.Colour.blurple()) + embed.set_footer(text=duration[:2048]) await channel.send(embed=embed) @@ -553,7 +556,7 @@ class Branding(commands.Cog): log.warning(f"There are {len(available_events)} events, but the calendar view can only display 25!") for name, duration in first_25: - embed.add_field(name=name, value=duration) + embed.add_field(name=name[:256], value=duration[:1024]) embed.set_footer(text="Otherwise, the fallback season is used.") -- cgit v1.2.3 From 8aacf079cf44259b88562bda0b9e78d43ba3fd68 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 16:30:28 +0100 Subject: Branding: check for empty icon cache --- bot/exts/backend/branding/_cog.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 75d912530..43f0d742b 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -196,6 +196,10 @@ class Branding(commands.Cog): state = await self.cache_icons.to_dict() log.trace(f"Total icons in rotation: {len(state)}") + if not state: # This would only happen if rotation not initiated, but we can handle gracefully + log.warning("Attempted icon rotation with an empty icon cache!") + return False + if len(state) == 1 and 1 in state.values(): log.debug("Aborting icon rotation: only 1 icon is available and has already been applied") return False -- cgit v1.2.3 From 081169a91bde21700195a51ca1f6fec7dcda76ba Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sun, 14 Mar 2021 21:54:26 +0100 Subject: Branding: suppress 'ValueError' on empty-dict update --- bot/exts/backend/branding/_cog.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 43f0d742b..025a609b5 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -1,4 +1,5 @@ import asyncio +import contextlib import logging import random import typing as t @@ -376,10 +377,11 @@ class Branding(commands.Cog): log.trace(f"Writing {len(chronological_events)} events (fallback omitted)") - await self.cache_events.update({ - extract_event_name(event): extract_event_duration(event) - for event in chronological_events - }) + with contextlib.suppress(ValueError): # Cache raises when updated with an empty dict + await self.cache_events.update({ + extract_event_name(event): extract_event_duration(event) + for event in chronological_events + }) # endregion # region: Daemon -- cgit v1.2.3 From 290a082207faa94dea0f468ef0cab793e1e2cae9 Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Sun, 14 Mar 2021 21:13:36 +0000 Subject: feat: add new discord.py tags --- bot/resources/tags/customhelp.md | 3 +++ bot/resources/tags/intents.md | 19 +++++++++++++++++++ 2 files changed, 22 insertions(+) create mode 100644 bot/resources/tags/customhelp.md create mode 100644 bot/resources/tags/intents.md diff --git a/bot/resources/tags/customhelp.md b/bot/resources/tags/customhelp.md new file mode 100644 index 000000000..b787fe673 --- /dev/null +++ b/bot/resources/tags/customhelp.md @@ -0,0 +1,3 @@ +**Custom help commands in discord.py** + +To learn more about how to create custom help commands in discord.py by subclassing the help command, please see [this tutorial](https://gist.github.com/InterStella0/b78488fb28cadf279dfd3164b9f0cf96#embed-minimalhelpcommand) by Stella#2000 \ No newline at end of file diff --git a/bot/resources/tags/intents.md b/bot/resources/tags/intents.md new file mode 100644 index 000000000..642e65764 --- /dev/null +++ b/bot/resources/tags/intents.md @@ -0,0 +1,19 @@ +**Using intents in discord.py** + +Intents are a feature of Discord that tells the gateway exactly which events to send your bot. By default discord.py has all intents enabled, except for the `Members` and `Presences` intents, which are needed for events such as `on_member` and to get members' statuses. + +To enable one of these intents you need to first to to the [Discord developer portal](https://discord.com/developers/applications), then to the bot page of your bot's application. Scroll down to the `Privileged Gateway Intents` section, and enable the intents that you need. + +Next, in your bot you need to set the intents you want to connect with in the bot's constructor using the `intents` keyword argument, like this: + +```py +from discord import Intents +from discord.ext import commands + +intents = Intents.default() +intents.members = True + +bot = commands.Bot(command_prefix="!", intents=intents) +``` + +For more info about using intents, see the [discord.py docs on intents.](https://discordpy.readthedocs.io/en/latest/intents.html) \ No newline at end of file -- cgit v1.2.3 From 39b4da6a242a96ac298119d60f89bf2af69a952f Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Sun, 14 Mar 2021 21:16:42 +0000 Subject: fix: add newline file endings --- bot/resources/tags/customhelp.md | 2 +- bot/resources/tags/intents.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/resources/tags/customhelp.md b/bot/resources/tags/customhelp.md index b787fe673..6f0b17642 100644 --- a/bot/resources/tags/customhelp.md +++ b/bot/resources/tags/customhelp.md @@ -1,3 +1,3 @@ **Custom help commands in discord.py** -To learn more about how to create custom help commands in discord.py by subclassing the help command, please see [this tutorial](https://gist.github.com/InterStella0/b78488fb28cadf279dfd3164b9f0cf96#embed-minimalhelpcommand) by Stella#2000 \ No newline at end of file +To learn more about how to create custom help commands in discord.py by subclassing the help command, please see [this tutorial](https://gist.github.com/InterStella0/b78488fb28cadf279dfd3164b9f0cf96#embed-minimalhelpcommand) by Stella#2000 diff --git a/bot/resources/tags/intents.md b/bot/resources/tags/intents.md index 642e65764..9171b2314 100644 --- a/bot/resources/tags/intents.md +++ b/bot/resources/tags/intents.md @@ -16,4 +16,4 @@ intents.members = True bot = commands.Bot(command_prefix="!", intents=intents) ``` -For more info about using intents, see the [discord.py docs on intents.](https://discordpy.readthedocs.io/en/latest/intents.html) \ No newline at end of file +For more info about using intents, see the [discord.py docs on intents.](https://discordpy.readthedocs.io/en/latest/intents.html) -- cgit v1.2.3 From b8a74372c6f37c2eda28272195a96668d324844d Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Sun, 14 Mar 2021 21:44:13 +0000 Subject: fix: minor spelling correction --- bot/resources/tags/intents.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/resources/tags/intents.md b/bot/resources/tags/intents.md index 9171b2314..0e94520a8 100644 --- a/bot/resources/tags/intents.md +++ b/bot/resources/tags/intents.md @@ -2,7 +2,7 @@ Intents are a feature of Discord that tells the gateway exactly which events to send your bot. By default discord.py has all intents enabled, except for the `Members` and `Presences` intents, which are needed for events such as `on_member` and to get members' statuses. -To enable one of these intents you need to first to to the [Discord developer portal](https://discord.com/developers/applications), then to the bot page of your bot's application. Scroll down to the `Privileged Gateway Intents` section, and enable the intents that you need. +To enable one of these intents you need to first go to the [Discord developer portal](https://discord.com/developers/applications), then to the bot page of your bot's application. Scroll down to the `Privileged Gateway Intents` section, and enable the intents that you need. Next, in your bot you need to set the intents you want to connect with in the bot's constructor using the `intents` keyword argument, like this: -- cgit v1.2.3 From 4fc4d1c0d0303ec7c207165bd812aeb1387e58ac Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Sun, 14 Mar 2021 21:51:38 +0000 Subject: fix: more minor spelling/grammar corrections --- bot/resources/tags/intents.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/resources/tags/intents.md b/bot/resources/tags/intents.md index 0e94520a8..6a282bc17 100644 --- a/bot/resources/tags/intents.md +++ b/bot/resources/tags/intents.md @@ -1,8 +1,8 @@ **Using intents in discord.py** -Intents are a feature of Discord that tells the gateway exactly which events to send your bot. By default discord.py has all intents enabled, except for the `Members` and `Presences` intents, which are needed for events such as `on_member` and to get members' statuses. +Intents are a feature of Discord that tells the gateway exactly which events to send your bot. By default, discord.py has all intents enabled, except for the `Members` and `Presences` intents, which are needed for events such as `on_member` and to get members' statuses. -To enable one of these intents you need to first go to the [Discord developer portal](https://discord.com/developers/applications), then to the bot page of your bot's application. Scroll down to the `Privileged Gateway Intents` section, and enable the intents that you need. +To enable one of these intents, you need to first go to the [Discord developer portal](https://discord.com/developers/applications), then to the bot page of your bot's application. Scroll down to the `Privileged Gateway Intents` section, then enable the intents that you need. Next, in your bot you need to set the intents you want to connect with in the bot's constructor using the `intents` keyword argument, like this: -- cgit v1.2.3 From e475181b5c51d6363c74e0e36f51f174bfa6ba2c Mon Sep 17 00:00:00 2001 From: kwzrd Date: Mon, 15 Mar 2021 18:40:35 +0100 Subject: Branding: decode 'meta.md' using UTF-8 --- bot/exts/backend/branding/_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index a612b6752..e14ff4226 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -134,7 +134,7 @@ class BrandingRepository: The caller is responsible for handling errors caused by misconfiguration. """ - attrs, description = frontmatter.parse(raw_file) # Library automatically decodes using UTF-8 + attrs, description = frontmatter.parse(raw_file, encoding="UTF-8") if not description: raise BrandingMisconfiguration("No description found in 'meta.md'!") -- cgit v1.2.3 From 10ce75f0d0af36e2ebf87c95d9d0b2095b541d2d Mon Sep 17 00:00:00 2001 From: kwzrd Date: Mon, 15 Mar 2021 22:30:44 +0100 Subject: Pipenv: bump 'python-frontmatter' pin & re-lock Fresh stable release, just in time! --- Pipfile | 2 +- Pipfile.lock | 52 ++++++++++++++++++++++++++++++++++------------------ 2 files changed, 35 insertions(+), 19 deletions(-) diff --git a/Pipfile b/Pipfile index e222a2108..56ece7611 100644 --- a/Pipfile +++ b/Pipfile @@ -21,7 +21,7 @@ lxml = "~=4.4" markdownify = "==0.5.3" more_itertools = "~=8.2" python-dateutil = "~=2.8" -python-frontmatter = "~=0.5.0" +python-frontmatter = "~=1.0.0" pyyaml = "~=5.1" requests = "~=2.22" sentry-sdk = "~=0.19" diff --git a/Pipfile.lock b/Pipfile.lock index 01a78af9b..589af71b7 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "e8b1d8e8a3b258f482c25fe396aaa3255c749fdeae26770fccd7ce1a35f41180" + "sha256": "bf99bc953819e8c890d5482834bf243b3c3bc4f5f637fcff4a94b0a046e7593e" }, "pipfile-spec": 6, "requires": { @@ -621,11 +621,11 @@ }, "python-frontmatter": { "hashes": [ - "sha256:a7dcdfdaf498d488dce98bfa9452f8b70f803a923760ceab1ebd99291d98d28a", - "sha256:a9c2e90fc38e9f0c68d8b82299040f331ca3b8525ac7fa5f6beffef52b26c426" + "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08", + "sha256:e98152e977225ddafea6f01f40b4b0f1de175766322004c826ca99842d19a7cd" ], "index": "pypi", - "version": "==0.5.0" + "version": "==1.0.0" }, "python-json-logger": { "hashes": [ @@ -655,15 +655,23 @@ "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", - "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", + "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", + "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", + "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" ], "index": "pypi", "version": "==5.4.1" @@ -796,11 +804,11 @@ }, "urllib3": { "hashes": [ - "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", - "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" + "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", + "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.26.3" + "version": "==1.26.4" }, "yarl": { "hashes": [ @@ -1043,11 +1051,11 @@ }, "identify": { "hashes": [ - "sha256:2179e7359471ab55729f201b3fdf7dc2778e221f868410fedcb0987b791ba552", - "sha256:2a5fdf2f5319cc357eda2550bea713a404392495961022cf2462624ce62f0f46" + "sha256:46d1816c6a4fc2d1e8758f293a5dcc1ae6404ab344179d7c1e73637bf283beb1", + "sha256:ed4a05fb80e3cbd12e83c959f9ff7f729ba6b66ab8d6178850fd5cb4c1cf6c5d" ], "markers": "python_full_version >= '3.6.1'", - "version": "==2.1.0" + "version": "==2.1.3" }, "idna": { "hashes": [ @@ -1081,11 +1089,11 @@ }, "pre-commit": { "hashes": [ - "sha256:16212d1fde2bed88159287da88ff03796863854b04dc9f838a55979325a3d20e", - "sha256:399baf78f13f4de82a29b649afd74bef2c4e28eb4f021661fc7f29246e8c7a3a" + "sha256:94c82f1bf5899d56edb1d926732f4e75a7df29a0c8c092559c77420c9d62428b", + "sha256:de55c5c72ce80d79106e48beb1b54104d16495ce7f95b0c7b13d4784193a00af" ], "index": "pypi", - "version": "==2.10.1" + "version": "==2.11.1" }, "pycodestyle": { "hashes": [ @@ -1125,15 +1133,23 @@ "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", - "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", + "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", + "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", + "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" ], "index": "pypi", "version": "==5.4.1" @@ -1171,11 +1187,11 @@ }, "urllib3": { "hashes": [ - "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", - "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" + "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", + "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.26.3" + "version": "==1.26.4" }, "virtualenv": { "hashes": [ -- cgit v1.2.3 From e7302f0e50dfe158d3f4771d3e6d2181f5ac0351 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 15 Mar 2021 15:07:02 -0700 Subject: Code block: remove null bytes before parsing AST `ast.parse` raises a ValueError complaining that source code strings cannot contain null bytes. It seems like they may accidentally get pasted into Discord by users sometimes. --- bot/exts/info/codeblock/_parsing.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bot/exts/info/codeblock/_parsing.py b/bot/exts/info/codeblock/_parsing.py index e35fbca22..73fd11b94 100644 --- a/bot/exts/info/codeblock/_parsing.py +++ b/bot/exts/info/codeblock/_parsing.py @@ -103,6 +103,9 @@ def _is_python_code(content: str) -> bool: """Return True if `content` is valid Python consisting of more than just expressions.""" log.trace("Checking if content is Python code.") try: + # Remove null bytes because they cause ast.parse to raise a ValueError. + content = content.replace("\x00", "") + # Attempt to parse the message into an AST node. # Invalid Python code will raise a SyntaxError. tree = ast.parse(content) -- cgit v1.2.3 From 69ddce47076ef611cd250f6291d3dd0530b05790 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 15 Mar 2021 15:27:45 -0700 Subject: Defcon: fix naming conflict between threshold cmd and attribute --- bot/exts/moderation/defcon.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index bd16289b9..bab95405c 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -157,9 +157,9 @@ class Defcon(Cog): await ctx.send(embed=embed) - @defcon_group.command(aliases=('t', 'd')) + @defcon_group.command(name="threshold", aliases=('t', 'd')) @has_any_role(*MODERATION_ROLES) - async def threshold( + async def threshold_command( self, ctx: Context, threshold: Union[DurationDelta, int], expiry: Optional[Expiry] = None ) -> None: """ -- cgit v1.2.3 From 089e4aaa6ac067b40d70b8cbbb95f9d26845d71f Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 15 Mar 2021 15:32:31 -0700 Subject: Info: account for defcon threshold being None Fixes BOT-XK --- bot/exts/info/information.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index 92ddf0fbd..c54ca96bf 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -64,7 +64,8 @@ class Information(Cog): defcon_info = "" if cog := self.bot.get_cog("Defcon"): - defcon_info = f"Defcon threshold: {humanize_delta(cog.threshold)}\n" + threshold = humanize_delta(cog.threshold) if cog.threshold else "-" + defcon_info = f"Defcon threshold: {threshold}\n" verification = f"Verification level: {ctx.guild.verification_level.name}\n" -- cgit v1.2.3 From 3beebb973f3cceb5281d1901535185276c9f4714 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 16 Mar 2021 02:07:28 +0100 Subject: Update the symbol_name when fetching the DocItem from get_symbol_item Moving the block handling the fetching into a separate method meant that symbol_name was no longer updated inside the create_symbol_embed method, causing the whole message to be included in the embed title in case the space shortcut was used --- bot/exts/info/doc/_cog.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index bf49e0aee..5af95717b 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -7,7 +7,7 @@ import textwrap from collections import defaultdict from contextlib import suppress from types import SimpleNamespace -from typing import Dict, NamedTuple, Optional, Union +from typing import Dict, NamedTuple, Optional, Tuple, Union import aiohttp import discord @@ -219,11 +219,11 @@ class DocCog(commands.Cog): log.debug("Finished inventory refresh.") self.refresh_event.set() - def get_symbol_item(self, symbol_name: str) -> Optional[DocItem]: + def get_symbol_item(self, symbol_name: str) -> Tuple[str, Optional[DocItem]]: """ - Get the `DocItem` associated with `symbol_name` from the `doc_symbols` dict. + Get the `DocItem` and the symbol name used to fetch it from the `doc_symbols` dict. - If the doc item is not found directly from the name and the name contains a space, + If the doc item is not found directly from the passed in name and the name contains a space, the first word of the name will be attempted to be used to get the item. """ doc_item = self.doc_symbols.get(symbol_name) @@ -231,7 +231,7 @@ class DocCog(commands.Cog): symbol_name = symbol_name.split(" ", maxsplit=1)[0] doc_item = self.doc_symbols.get(symbol_name) - return doc_item + return symbol_name, doc_item async def get_symbol_markdown(self, doc_item: DocItem) -> str: """ @@ -273,7 +273,7 @@ class DocCog(commands.Cog): await self.refresh_event.wait() # Ensure a refresh can't run in case of a context switch until the with block is exited with self.symbol_get_event: - doc_item = self.get_symbol_item(symbol_name) + symbol_name, doc_item = self.get_symbol_item(symbol_name) if doc_item is None: log.debug("Symbol does not exist.") return None -- cgit v1.2.3 From ba91d5a530aa9958b7549cc03fecfb95112d52ca Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Tue, 16 Mar 2021 02:35:20 +0100 Subject: Replace shorten with custom algo to find good cutoff points shorten collapses the whitespace, causing issues with codeblocks --- bot/exts/info/doc/_parsing.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index b06aebd45..b3402f655 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -188,8 +188,21 @@ def _get_truncated_description( # Determine the actual truncation index. possible_truncation_indices = [cut for cut in markdown_element_ends if cut < truncate_index] if not possible_truncation_indices: - # In case there is no Markdown element ending before the truncation index, use shorten as a fallback. - truncated_result = textwrap.shorten(result, truncate_index, placeholder="") + # In case there is no Markdown element ending before the truncation index, try to find a good cutoff point. + force_truncated = result[:truncate_index] + # If there is an incomplete codeblock, cut it out. + if force_truncated.count("```") % 2: + force_truncated = force_truncated[:force_truncated.rfind("```")] + # Search for substrings to truncate at, with decreasing desirability. + for string_ in ("\n\n", "\n", ". ", ", ", ",", " "): + cutoff = force_truncated.rfind(string_) + + if cutoff != -1: + truncated_result = force_truncated[:cutoff] + break + else: + truncated_result = force_truncated + else: # Truncate at the last Markdown element that comes before the truncation index. markdown_truncate_index = possible_truncation_indices[-1] -- cgit v1.2.3 From 6b9c95458f5f89496741f187d187c2b93561c839 Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 16 Mar 2021 17:21:28 +0000 Subject: Split out help channel idle time constants This allows us to configure the idle time allowed for claiments seperate from tohers. --- bot/constants.py | 3 ++- config-default.yml | 8 ++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index 394d59a73..b4d702e1d 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -592,7 +592,8 @@ class HelpChannels(metaclass=YAMLGetter): enable: bool claim_minutes: int cmd_whitelist: List[int] - idle_minutes: int + idle_minutes_claimant: int + idle_minutes_others: int deleted_idle_minutes: int max_available: int max_total_channels: int diff --git a/config-default.yml b/config-default.yml index 49d7f84ac..9388ecb99 100644 --- a/config-default.yml +++ b/config-default.yml @@ -468,8 +468,12 @@ help_channels: cmd_whitelist: - *HELPERS_ROLE - # Allowed duration of inactivity before making a channel dormant - idle_minutes: 30 + # Allowed duration of inactivity by claimant before making a channel dormant + idle_minutes_claimant: 30 + + # Allowed duration of inactivity by others before making a channel dormant + # `idle_minutes_claimant` must also be met, before a channel is closed + idle_minutes_others: 30 # Allowed duration of inactivity when channel is empty (due to deleted messages) # before message making a channel dormant -- cgit v1.2.3 From ce8616988d2e499f8dc79b22e8287c5a0ed50c19 Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 16 Mar 2021 20:06:25 +0000 Subject: Set a reasonable default for `idle_minutes_others`. --- config-default.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config-default.yml b/config-default.yml index 9388ecb99..38144c90c 100644 --- a/config-default.yml +++ b/config-default.yml @@ -473,7 +473,7 @@ help_channels: # Allowed duration of inactivity by others before making a channel dormant # `idle_minutes_claimant` must also be met, before a channel is closed - idle_minutes_others: 30 + idle_minutes_others: 10 # Allowed duration of inactivity when channel is empty (due to deleted messages) # before message making a channel dormant -- cgit v1.2.3 From 243d4657bfeafe31bc3ba9666b35a88eeef74a92 Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 16 Mar 2021 20:07:34 +0000 Subject: Update help availible footer As we have complicated this logic, we now don't specify exactly how long until the channel will close. --- bot/exts/help_channels/_message.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 36388f9bd..011b4cdbe 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -28,7 +28,7 @@ For more tips, check out our guide on **[asking good questions]({ASKING_GUIDE_UR AVAILABLE_TITLE = "Available help channel" -AVAILABLE_FOOTER = f"Closes after {constants.HelpChannels.idle_minutes} minutes of inactivity or when you send !close." +AVAILABLE_FOOTER = "Closes after being idle for some time, or when you send !close." DORMANT_MSG = f""" This help channel has been marked as **dormant**, and has been moved into the **Help: Dormant** \ -- cgit v1.2.3 From b3c66a6fb07ebc92c0b53d946cf10df6c1107303 Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 16 Mar 2021 20:08:53 +0000 Subject: Extend close time logic to differentiate between the claimant and other users. --- bot/exts/help_channels/_caches.py | 8 ++++++ bot/exts/help_channels/_channel.py | 54 ++++++++++++++++++++++++++------------ bot/exts/help_channels/_cog.py | 42 ++++++++++++++++++++--------- 3 files changed, 75 insertions(+), 29 deletions(-) diff --git a/bot/exts/help_channels/_caches.py b/bot/exts/help_channels/_caches.py index 4cea385b7..c790a37b1 100644 --- a/bot/exts/help_channels/_caches.py +++ b/bot/exts/help_channels/_caches.py @@ -8,6 +8,14 @@ claim_times = RedisCache(namespace="HelpChannels.claim_times") # RedisCache[discord.TextChannel.id, t.Union[discord.User.id, discord.Member.id]] claimants = RedisCache(namespace="HelpChannels.help_channel_claimants") +# Stores the timestamp of the last message in a help channel +# RedisCache[discord.TextChannel.id, UtcPosixTimestamp] +last_message = RedisCache(namespace="HelpChannels.last_message") + +# Stores the timestamp of the last message from the claimant of a help channel +# RedisCache[discord.TextChannel.id, UtcPosixTimestamp] +claimant_last_message = RedisCache(namespace="HelpChannels.claimant_last_message") + # This cache maps a help channel to original question message in same channel. # RedisCache[discord.TextChannel.id, discord.Message.id] question_messages = RedisCache(namespace="HelpChannels.question_messages") diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 224214b00..95b8cdc1f 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -25,23 +25,43 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco yield channel -async def get_idle_time(channel: discord.TextChannel) -> t.Optional[int]: - """ - Return the time elapsed, in seconds, since the last message sent in the `channel`. - - Return None if the channel has no messages. - """ - log.trace(f"Getting the idle time for #{channel} ({channel.id}).") - - msg = await _message.get_last_message(channel) - if not msg: - log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages.") - return None - - idle_time = (datetime.utcnow() - msg.created_at).seconds - - log.trace(f"#{channel} ({channel.id}) has been idle for {idle_time} seconds.") - return idle_time +async def get_closing_time(channel: discord.TextChannel) -> t.Optional[int]: + """Return the timestamp at which the given help `channel` should be closed.""" + log.trace(f"Getting the closing time for #{channel} ({channel.id}).") + + if await _message.is_empty(channel): + idle_minutes = constants.HelpChannels.deleted_idle_minutes + else: + idle_minutes = constants.HelpChannels.idle_minutes_others + + last_message = await _caches.last_message.get(channel.id) + claimant_last_message = await _caches.claimant_last_message.get(channel.id) + + if not (last_message or claimant_last_message): + # Using the old method if we can't get cached info. + msg = await _message.get_last_message(channel) + if not msg: + log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages.") + return datetime.min + + # We want to get the time at which a channel should be closed. + closing_time = msg.created_at + closing_time += timedelta(minutes=idle_minutes) + + return closing_time + + # We want to get the time at which a channel should be closed. + last_message = datetime.fromtimestamp(last_message) + claimant = datetime.fromtimestamp(claimant_last_message) + + last_message += timedelta(minutes=idle_minutes) + claimant += timedelta(minutes=constants.HelpChannels.idle_minutes_claimant) + + # The further away closing time is what we should use. + closing_time = max(claimant, last_message) + log.trace(f"claimant: {claimant}, last_message: {last_message}") + log.trace(f"#{channel} ({channel.id}) should be closed at {closing_time}.") + return closing_time async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 1c730dce9..db14ce0ef 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -43,7 +43,9 @@ class HelpChannels(commands.Cog): In Use Category * Contains all channels which are occupied by someone needing help - * Channel moves to dormant category after `constants.HelpChannels.idle_minutes` of being idle + * Channel moves to dormant category after `constants.HelpChannels.idle_minutes_other` minutes + since the last user message, or `constants.HelpChannels.idle_minutes_claimant` minutes + since the last claimant message. * Command can prematurely mark a channel as dormant * Channel claimant is allowed to use the command * Allowed roles for the command are configurable with `constants.HelpChannels.cmd_whitelist` @@ -293,16 +295,12 @@ class HelpChannels(commands.Cog): """ log.trace(f"Handling in-use channel #{channel} ({channel.id}).") - if not await _message.is_empty(channel): - idle_seconds = constants.HelpChannels.idle_minutes * 60 - else: - idle_seconds = constants.HelpChannels.deleted_idle_minutes * 60 - - time_elapsed = await _channel.get_idle_time(channel) + closing_time = await _channel.get_closing_time(channel) + # The time at which the channel should be closed, based on messages sent. + if closing_time < datetime.utcnow(): - if time_elapsed is None or time_elapsed >= idle_seconds: log.info( - f"#{channel} ({channel.id}) is idle longer than {idle_seconds} seconds " + f"#{channel} ({channel.id}) is idle past {closing_time} " f"and will be made dormant." ) @@ -312,7 +310,7 @@ class HelpChannels(commands.Cog): if has_task: self.scheduler.cancel(channel.id) - delay = idle_seconds - time_elapsed + delay = (closing_time - datetime.utcnow()).seconds log.info( f"#{channel} ({channel.id}) is still active; " f"scheduling it to be moved after {delay} seconds." @@ -410,7 +408,7 @@ class HelpChannels(commands.Cog): category_id=constants.Categories.help_in_use, ) - timeout = constants.HelpChannels.idle_minutes * 60 + timeout = constants.HelpChannels.idle_minutes_others * 60 log.trace(f"Scheduling #{channel} ({channel.id}) to become dormant in {timeout} sec.") self.scheduler.schedule_later(timeout, channel.id, self.move_idle_channel(channel)) @@ -418,7 +416,12 @@ class HelpChannels(commands.Cog): @commands.Cog.listener() async def on_message(self, message: discord.Message) -> None: - """Move an available channel to the In Use category and replace it with a dormant one.""" + """ + Move an available channel to the In Use category and replace it with a dormant one. + + Also updates the `message_times` cache based on the current timestamp. If the message + author is the claimant of this channel, also update the claimant_last_message. + """ if message.author.bot: return # Ignore messages sent by bots. @@ -427,6 +430,21 @@ class HelpChannels(commands.Cog): if channel_utils.is_in_category(message.channel, constants.Categories.help_available): if not _channel.is_excluded_channel(message.channel): await self.claim_channel(message) + # Initialise the cache for this channel + await _caches.claimant_last_message.set( + message.channel.id, + message.created_at.timestamp() + ) + await _caches.last_message.set( + message.channel.id, + message.created_at.timestamp() + ) + elif channel_utils.is_in_category(message.channel, constants.Categories.help_in_use): + # Overwrite the claimant message time, if its from the claimant. + if message.author == await _caches.claimants.get(message.channel.id): + await _caches.claimant_last_message(message.channel.id, message.created_at.timestamp()) + + await _caches.last_message.set(message.channel.id, message.created_at.timestamp()) else: await _message.check_for_answer(message) -- cgit v1.2.3 From 9503a2edc7b9609da6f96a65c9742259f106bbb1 Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 16 Mar 2021 20:33:35 +0000 Subject: Improve trace message. --- bot/exts/help_channels/_channel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 95b8cdc1f..8db6e7617 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -59,7 +59,7 @@ async def get_closing_time(channel: discord.TextChannel) -> t.Optional[int]: # The further away closing time is what we should use. closing_time = max(claimant, last_message) - log.trace(f"claimant: {claimant}, last_message: {last_message}") + log.trace(f"claimant closing time: {claimant}, last_message closing time: {last_message}") log.trace(f"#{channel} ({channel.id}) should be closed at {closing_time}.") return closing_time -- cgit v1.2.3 From 61e17c893d2bfa8969396b6212c2b3a0190bd636 Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 17 Mar 2021 18:33:07 +0000 Subject: Improve availible channel embed footer --- bot/exts/help_channels/_message.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 011b4cdbe..ec2daab45 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -28,7 +28,7 @@ For more tips, check out our guide on **[asking good questions]({ASKING_GUIDE_UR AVAILABLE_TITLE = "Available help channel" -AVAILABLE_FOOTER = "Closes after being idle for some time, or when you send !close." +AVAILABLE_FOOTER = "Closes after a period of inactivity, or when you send !close." DORMANT_MSG = f""" This help channel has been marked as **dormant**, and has been moved into the **Help: Dormant** \ -- cgit v1.2.3 From 12a73f2b29ac2b2f43b488109fc66216a739f056 Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 17 Mar 2021 18:45:51 +0000 Subject: Improve docs strings and variable names within the help channel system --- bot/exts/help_channels/_caches.py | 4 ++-- bot/exts/help_channels/_channel.py | 18 +++++++++--------- bot/exts/help_channels/_cog.py | 18 +++++++++--------- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/bot/exts/help_channels/_caches.py b/bot/exts/help_channels/_caches.py index c790a37b1..d4676df87 100644 --- a/bot/exts/help_channels/_caches.py +++ b/bot/exts/help_channels/_caches.py @@ -10,11 +10,11 @@ claimants = RedisCache(namespace="HelpChannels.help_channel_claimants") # Stores the timestamp of the last message in a help channel # RedisCache[discord.TextChannel.id, UtcPosixTimestamp] -last_message = RedisCache(namespace="HelpChannels.last_message") +last_message_times = RedisCache(namespace="HelpChannels.last_message_times") # Stores the timestamp of the last message from the claimant of a help channel # RedisCache[discord.TextChannel.id, UtcPosixTimestamp] -claimant_last_message = RedisCache(namespace="HelpChannels.claimant_last_message") +claimant_last_message_times = RedisCache(namespace="HelpChannels.claimant_last_message_times") # This cache maps a help channel to original question message in same channel. # RedisCache[discord.TextChannel.id, discord.Message.id] diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 8db6e7617..6c17a26e0 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -34,10 +34,10 @@ async def get_closing_time(channel: discord.TextChannel) -> t.Optional[int]: else: idle_minutes = constants.HelpChannels.idle_minutes_others - last_message = await _caches.last_message.get(channel.id) - claimant_last_message = await _caches.claimant_last_message.get(channel.id) + last_message_time = await _caches.last_message_times.get(channel.id) + claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) - if not (last_message or claimant_last_message): + if not (last_message_time or claimant_last_message_time): # Using the old method if we can't get cached info. msg = await _message.get_last_message(channel) if not msg: @@ -51,15 +51,15 @@ async def get_closing_time(channel: discord.TextChannel) -> t.Optional[int]: return closing_time # We want to get the time at which a channel should be closed. - last_message = datetime.fromtimestamp(last_message) - claimant = datetime.fromtimestamp(claimant_last_message) + last_message_time = datetime.fromtimestamp(last_message_time) + claimant_last_message_time = datetime.fromtimestamp(claimant_last_message_time) - last_message += timedelta(minutes=idle_minutes) - claimant += timedelta(minutes=constants.HelpChannels.idle_minutes_claimant) + last_message_time += timedelta(minutes=idle_minutes) + claimant_last_message_time += timedelta(minutes=constants.HelpChannels.idle_minutes_claimant) # The further away closing time is what we should use. - closing_time = max(claimant, last_message) - log.trace(f"claimant closing time: {claimant}, last_message closing time: {last_message}") + closing_time = max(claimant_last_message_time, last_message_time) + log.trace(f"claimant closing time: {claimant_last_message_time}, last_message closing time: {last_message_time}") log.trace(f"#{channel} ({channel.id}) should be closed at {closing_time}.") return closing_time diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index db14ce0ef..bac9aa9dd 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -43,9 +43,9 @@ class HelpChannels(commands.Cog): In Use Category * Contains all channels which are occupied by someone needing help - * Channel moves to dormant category after `constants.HelpChannels.idle_minutes_other` minutes - since the last user message, or `constants.HelpChannels.idle_minutes_claimant` minutes - since the last claimant message. + * Channel moves to dormant category after + - `constants.HelpChannels.idle_minutes_other` minutes since the last user message, or + - `constants.HelpChannels.idle_minutes_claimant` minutes since the last claimant message. * Command can prematurely mark a channel as dormant * Channel claimant is allowed to use the command * Allowed roles for the command are configurable with `constants.HelpChannels.cmd_whitelist` @@ -419,8 +419,8 @@ class HelpChannels(commands.Cog): """ Move an available channel to the In Use category and replace it with a dormant one. - Also updates the `message_times` cache based on the current timestamp. If the message - author is the claimant of this channel, also update the claimant_last_message. + Also updates the `last_message_times` cache based on the current timestamp. If the message + author is the claimant of this channel, also update the `claimant_last_message_times` cache. """ if message.author.bot: return # Ignore messages sent by bots. @@ -431,20 +431,20 @@ class HelpChannels(commands.Cog): if not _channel.is_excluded_channel(message.channel): await self.claim_channel(message) # Initialise the cache for this channel - await _caches.claimant_last_message.set( + await _caches.claimant_last_message_times.set( message.channel.id, message.created_at.timestamp() ) - await _caches.last_message.set( + await _caches.last_message_times.set( message.channel.id, message.created_at.timestamp() ) elif channel_utils.is_in_category(message.channel, constants.Categories.help_in_use): # Overwrite the claimant message time, if its from the claimant. if message.author == await _caches.claimants.get(message.channel.id): - await _caches.claimant_last_message(message.channel.id, message.created_at.timestamp()) + await _caches.claimant_last_message_times(message.channel.id, message.created_at.timestamp()) - await _caches.last_message.set(message.channel.id, message.created_at.timestamp()) + await _caches.last_message_times.set(message.channel.id, message.created_at.timestamp()) else: await _message.check_for_answer(message) -- cgit v1.2.3 From 1d5625a2f47a1d4d050f9eb0eb7a18e7d6fe171b Mon Sep 17 00:00:00 2001 From: kwzrd Date: Wed, 17 Mar 2021 22:19:55 +0100 Subject: Branding: adjust daemon start-up behaviour The daemon will now perform a sync iteration immediately when started, and then every UTC midnight. Previously, it would only perform the initial iteration when started for the first time, which is odd. It is also believed that splitting the daemon's logic into three separate functions is beneficial: before, loop, and main. This commit makes log and doc adjustments where appropriate. --- bot/exts/backend/branding/_cog.py | 71 ++++++++++++++++++++------------------- 1 file changed, 36 insertions(+), 35 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 025a609b5..cbd61a751 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -397,7 +397,7 @@ class Branding(commands.Cog): should_begin: t.Optional[bool] = await self.cache_information.get("daemon_active") # None if never set! if should_begin: - self.daemon_main.start() + self.daemon_loop.start() def cog_unload(self) -> None: """ @@ -407,71 +407,72 @@ class Branding(commands.Cog): """ log.debug("Cog unload: cancelling daemon") - self.daemon_main.cancel() + self.daemon_loop.cancel() - @tasks.loop(hours=24) async def daemon_main(self) -> None: """ - Periodically synchronise guild & caches with branding repository. + Synchronise guild & caches with branding repository. - This function executes every 24 hours at midnight. We pull the currently active event from the branding - repository and check whether it matches the currently active event. If not, we apply the new event. + Pull the currently active event from the branding repository and check whether it matches the currently + active event in the cache. If not, apply the new event. However, it is also possible that an event's assets change as it's active. To account for such cases, - we check the banner & icons hashes against the currently cached values. If there is a mismatch, the + we check the banner & icons hashes against the currently cached values. If there is a mismatch, each specific asset is re-applied. - - As such, the guild should always remain synchronised with the branding repository. However, the #changelog - notification is only sent in the case of entering a new event ~ no change in an on-going event will trigger - a new notification to be sent. """ - log.debug("Daemon awakens: checking current event") + log.trace("Daemon main: checking current event") new_event, available_events = await self.repository.get_current_event() await self.populate_cache_events(available_events) if new_event is None: - log.warning("Failed to get current event from the branding repository, daemon will do nothing!") + log.warning("Daemon main: failed to get current event from branding repository, will do nothing") return if new_event.path != await self.cache_information.get("event_path"): - log.debug("New event detected!") + log.debug("Daemon main: new event detected!") await self.enter_event(new_event) return - log.debug("Event has not changed, checking for change in assets") + log.trace("Daemon main: event has not changed, checking for change in assets") if new_event.banner.sha != await self.cache_information.get("banner_hash"): - log.debug("Detected same-event banner change!") + log.debug("Daemon main: detected banner change!") await self.apply_banner(new_event.banner) if compound_hash(new_event.icons) != await self.cache_information.get("icons_hash"): - log.debug("Detected same-event icon change!") + log.debug("Daemon main: detected icon change!") await self.initiate_icon_rotation(new_event.icons) await self.rotate_icons() else: await self.maybe_rotate_icons() - @daemon_main.before_loop - async def daemon_before(self) -> None: + @tasks.loop(hours=24) + async def daemon_loop(self) -> None: """ - Wait until the next-up UTC midnight before letting `daemon_main` begin. + Call `daemon_main` every 24 hours. - This function allows the daemon to keep a consistent schedule across restarts. + The scheduler maintains an exact 24-hour frequency even if this coroutine takes time to complete. If the + coroutine is started at 00:01 and completes at 00:05, it will still be started at 00:01 the next day. + """ + log.trace("Daemon loop: calling daemon main") + + await self.daemon_main() - We check for a special case in which the cog's cache is empty. This indicates that we have never entered - an event (on first start-up), or that there was a cache loss. In either case, the current event gets - applied immediately, to avoid leaving the cog in an empty state. + @daemon_loop.before_loop + async def daemon_before(self) -> None: """ - log.debug("Calculating time for daemon to sleep before first awakening") + Call `daemon_main` immediately, then block `daemon_loop` until the next-up UTC midnight. - current_event = await self.cache_information.get("event_path") + The first iteration will be invoked manually such that synchronisation happens immediately after daemon start. + We then calculate the time until the next-up midnight and sleep before letting `daemon_loop` begin. + """ + log.info("Daemon before: synchronising guild") - if current_event is None: # Maiden case ~ first start or cache loss - log.debug("Event cache is empty (indicating maiden case), invoking synchronisation") - await self.synchronise() + await self.daemon_main() + log.trace("Daemon before: calculating time to sleep before loop begins") now = datetime.utcnow() # The actual midnight moment is offset into the future in order to prevent issues with imprecise sleep @@ -479,8 +480,8 @@ class Branding(commands.Cog): midnight = datetime.combine(tomorrow, time(minute=1)) sleep_secs = (midnight - now).total_seconds() + log.trace(f"Daemon before: sleeping {sleep_secs} seconds before next-up midnight: {midnight}") - log.debug(f"Sleeping {sleep_secs} seconds before next-up midnight at {midnight}") await asyncio.sleep(sleep_secs) # endregion @@ -600,10 +601,10 @@ class Branding(commands.Cog): """Enable the branding daemon.""" await self.cache_information.set("daemon_active", True) - if self.daemon_main.is_running(): + if self.daemon_loop.is_running(): resp = make_embed("Daemon is already enabled!", "", success=False) else: - self.daemon_main.start() + self.daemon_loop.start() resp = make_embed("Daemon enabled!", "It will now automatically awaken on start-up.", success=True) await ctx.send(embed=resp) @@ -613,8 +614,8 @@ class Branding(commands.Cog): """Disable the branding daemon.""" await self.cache_information.set("daemon_active", False) - if self.daemon_main.is_running(): - self.daemon_main.cancel() + if self.daemon_loop.is_running(): + self.daemon_loop.cancel() resp = make_embed("Daemon disabled!", "It will not awaken on start-up.", success=True) else: resp = make_embed("Daemon is already disabled!", "", success=False) @@ -624,7 +625,7 @@ class Branding(commands.Cog): @branding_daemon_group.command(name="status") async def branding_daemon_status_cmd(self, ctx: commands.Context) -> None: """Check whether the daemon is currently enabled.""" - if self.daemon_main.is_running(): + if self.daemon_loop.is_running(): resp = make_embed("Daemon is enabled", "Use `branding daemon disable` to stop.", success=True) else: resp = make_embed("Daemon is disabled", "Use `branding daemon enable` to start.", success=False) -- cgit v1.2.3 From 9f65bfd0985331b5974011feca30b02f01548de5 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Wed, 17 Mar 2021 22:36:20 +0100 Subject: Branding: extend command aliases --- bot/exts/backend/branding/_cog.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index cbd61a751..57b762d1f 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -493,7 +493,7 @@ class Branding(commands.Cog): if not ctx.invoked_subcommand: await ctx.send_help(ctx.command) - @branding_group.command(name="about") + @branding_group.command(name="about", aliases=("current", "event")) async def branding_about_cmd(self, ctx: commands.Context) -> None: """Show the current event description.""" await self.send_info_embed(ctx.channel.id) @@ -526,7 +526,7 @@ class Branding(commands.Cog): # endregion # region: Command interface (branding calendar) - @branding_group.group(name="calendar", aliases=("schedule",)) + @branding_group.group(name="calendar", aliases=("schedule", "events")) async def branding_calendar_group(self, ctx: commands.Context) -> None: """ Show the current event calendar. -- cgit v1.2.3 From 1b3c23a7d89d64a899a67fbfd0e69e2fa3acd911 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Thu, 18 Mar 2021 01:11:59 +0100 Subject: Branding: update 'synchronise' docs After previous changes, the docstring was no longer accurate. See: 1d5625a2f47a1d4d050f9eb0eb7a18e7d6fe171b --- bot/exts/backend/branding/_cog.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 57b762d1f..0723458c2 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -341,9 +341,9 @@ class Branding(commands.Cog): """ Fetch the current event and delegate to `enter_event`. - This is a convenience wrapper to force synchronisation either via a command, or when the daemon starts - with an empty cache. It is generally only used in a recovery scenario. In the usual case, the daemon - already has an `Event` instance and can pass it to `enter_event` directly. + This is a convenience function to force synchronisation via a command. It should generally only be used + in a recovery scenario. In the usual case, the daemon already has an `Event` instance and can pass it + to `enter_event` directly. Returns a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ -- cgit v1.2.3 From fce7fb99e810f92187f5f10d1e681b8367336c6d Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 18 Mar 2021 20:08:54 +0000 Subject: Repurpose unanswered cache for storing non-claimant last message times The unanswered cache was previously just a boolen of whether a non-claimant every replied to a help channel. With us now needing to know the time at which a non-claimant messaged in a given channel, we infer the answered status from this cache instead. --- bot/exts/help_channels/_caches.py | 16 ++++++---------- bot/exts/help_channels/_channel.py | 33 +++++++++++++++++++-------------- bot/exts/help_channels/_cog.py | 21 +++------------------ bot/exts/help_channels/_message.py | 31 +++++++++++++++++-------------- bot/exts/help_channels/_stats.py | 6 +++--- 5 files changed, 48 insertions(+), 59 deletions(-) diff --git a/bot/exts/help_channels/_caches.py b/bot/exts/help_channels/_caches.py index d4676df87..834c5f4c2 100644 --- a/bot/exts/help_channels/_caches.py +++ b/bot/exts/help_channels/_caches.py @@ -8,20 +8,16 @@ claim_times = RedisCache(namespace="HelpChannels.claim_times") # RedisCache[discord.TextChannel.id, t.Union[discord.User.id, discord.Member.id]] claimants = RedisCache(namespace="HelpChannels.help_channel_claimants") -# Stores the timestamp of the last message in a help channel -# RedisCache[discord.TextChannel.id, UtcPosixTimestamp] -last_message_times = RedisCache(namespace="HelpChannels.last_message_times") - # Stores the timestamp of the last message from the claimant of a help channel # RedisCache[discord.TextChannel.id, UtcPosixTimestamp] claimant_last_message_times = RedisCache(namespace="HelpChannels.claimant_last_message_times") +# This cache maps a help channel to the timestamp of the last, non-claimant, +# message. This cache being empty for a given help channel indicates the +# question is unanswered. +# RedisCache[discord.TextChannel.id, UtcPosixTimestamp] +non_claimant_last_message_times = RedisCache(namespace="HelpChannels.non_claimant_last_message_times") + # This cache maps a help channel to original question message in same channel. # RedisCache[discord.TextChannel.id, discord.Message.id] question_messages = RedisCache(namespace="HelpChannels.question_messages") - -# This cache maps a help channel to whether it has had any -# activity other than the original claimant. True being no other -# activity and False being other activity. -# RedisCache[discord.TextChannel.id, bool] -unanswered = RedisCache(namespace="HelpChannels.unanswered") diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 6c17a26e0..3e3749041 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -25,8 +25,13 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco yield channel -async def get_closing_time(channel: discord.TextChannel) -> t.Optional[int]: - """Return the timestamp at which the given help `channel` should be closed.""" +async def get_closing_time(channel: discord.TextChannel) -> datetime: + """ + Return the timestamp at which the given help `channel` should be closed. + + If either cache is empty, use the last message in the channel to determine closign time. + If the last message connt be retreived, return datetime.min, I.E close right now. + """ log.trace(f"Getting the closing time for #{channel} ({channel.id}).") if await _message.is_empty(channel): @@ -34,32 +39,32 @@ async def get_closing_time(channel: discord.TextChannel) -> t.Optional[int]: else: idle_minutes = constants.HelpChannels.idle_minutes_others - last_message_time = await _caches.last_message_times.get(channel.id) + non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) - if not (last_message_time or claimant_last_message_time): + if not (non_claimant_last_message_time or claimant_last_message_time): # Using the old method if we can't get cached info. msg = await _message.get_last_message(channel) if not msg: log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages.") return datetime.min - # We want to get the time at which a channel should be closed. - closing_time = msg.created_at - closing_time += timedelta(minutes=idle_minutes) + # The time at which a channel should be closed. + return msg.created_at + timedelta(minutes=idle_minutes) - return closing_time - - # We want to get the time at which a channel should be closed. - last_message_time = datetime.fromtimestamp(last_message_time) + # Get the later time at which a channel should be closed + non_claimant_last_message_time = datetime.fromtimestamp(non_claimant_last_message_time) claimant_last_message_time = datetime.fromtimestamp(claimant_last_message_time) - last_message_time += timedelta(minutes=idle_minutes) + non_claimant_last_message_time += timedelta(minutes=idle_minutes) claimant_last_message_time += timedelta(minutes=constants.HelpChannels.idle_minutes_claimant) # The further away closing time is what we should use. - closing_time = max(claimant_last_message_time, last_message_time) - log.trace(f"claimant closing time: {claimant_last_message_time}, last_message closing time: {last_message_time}") + closing_time = max(claimant_last_message_time, non_claimant_last_message_time) + log.trace( + f"Claimant closing time: {claimant_last_message_time}, " + f"last_message closing time: {non_claimant_last_message_time}" + ) log.trace(f"#{channel} ({channel.id}) should be closed at {closing_time}.") return closing_time diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index bac9aa9dd..8c97ef2d0 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -117,8 +117,7 @@ class HelpChannels(commands.Cog): # Must use a timezone-aware datetime to ensure a correct POSIX timestamp. timestamp = datetime.now(timezone.utc).timestamp() await _caches.claim_times.set(message.channel.id, timestamp) - - await _caches.unanswered.set(message.channel.id, True) + await _caches.claimant_last_message_times.set(message.channel.id, timestamp) # Not awaited because it may indefinitely hold the lock while waiting for a channel. scheduling.create_task(self.move_to_available(), name=f"help_claim_{message.id}") @@ -378,6 +377,7 @@ class HelpChannels(commands.Cog): async def _unclaim_channel(self, channel: discord.TextChannel, claimant_id: int, is_auto: bool) -> None: """Actual implementation of `unclaim_channel`. See that for full documentation.""" await _caches.claimants.delete(channel.id) + await _caches.non_claimant_last_message_times.delete(channel.id) # Ignore missing tasks because a channel may still be dormant after the cooldown expires. if claimant_id in self.scheduler: @@ -419,7 +419,7 @@ class HelpChannels(commands.Cog): """ Move an available channel to the In Use category and replace it with a dormant one. - Also updates the `last_message_times` cache based on the current timestamp. If the message + Update the `last_message_times` cache based on the current timestamp. If the message author is the claimant of this channel, also update the `claimant_last_message_times` cache. """ if message.author.bot: @@ -430,21 +430,6 @@ class HelpChannels(commands.Cog): if channel_utils.is_in_category(message.channel, constants.Categories.help_available): if not _channel.is_excluded_channel(message.channel): await self.claim_channel(message) - # Initialise the cache for this channel - await _caches.claimant_last_message_times.set( - message.channel.id, - message.created_at.timestamp() - ) - await _caches.last_message_times.set( - message.channel.id, - message.created_at.timestamp() - ) - elif channel_utils.is_in_category(message.channel, constants.Categories.help_in_use): - # Overwrite the claimant message time, if its from the claimant. - if message.author == await _caches.claimants.get(message.channel.id): - await _caches.claimant_last_message_times(message.channel.id, message.created_at.timestamp()) - - await _caches.last_message_times.set(message.channel.id, message.created_at.timestamp()) else: await _message.check_for_answer(message) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index ec2daab45..9ba019550 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -1,7 +1,7 @@ import logging import textwrap import typing as t -from datetime import datetime +from datetime import datetime, timezone import discord @@ -48,19 +48,22 @@ async def check_for_answer(message: discord.Message) -> None: # Confirm the channel is an in use help channel if is_in_category(channel, constants.Categories.help_in_use): - log.trace(f"Checking if #{channel} ({channel.id}) has been answered.") - - # Check if there is an entry in unanswered - if await _caches.unanswered.contains(channel.id): - claimant_id = await _caches.claimants.get(channel.id) - if not claimant_id: - # The mapping for this channel doesn't exist, we can't do anything. - return - - # Check the message did not come from the claimant - if claimant_id != message.author.id: - # Mark the channel as answered - await _caches.unanswered.set(channel.id, False) + log.trace(f"Checking if #{channel} ({channel.id}) has had a reply.") + # Must use a timezone-aware datetime to ensure a correct POSIX timestamp. + timestamp = datetime.now(timezone.utc).timestamp() + + # Overwrite the claimant message time, if its from the claimant. + if message.author == await _caches.claimants.get(channel.id): + await _caches.claimant_last_message_times.set(channel.id, timestamp) + return + + claimant_id = await _caches.claimants.get(channel.id) + if not claimant_id: + # The mapping for this channel doesn't exist, we can't do anything. + return + + # Cache the timestamp of the non-claimants message + await _caches.non_claimant_last_message_times.set(channel.id, timestamp) async def get_last_message(channel: discord.TextChannel) -> t.Optional[discord.Message]: diff --git a/bot/exts/help_channels/_stats.py b/bot/exts/help_channels/_stats.py index b8778e7d9..e212c495d 100644 --- a/bot/exts/help_channels/_stats.py +++ b/bot/exts/help_channels/_stats.py @@ -35,8 +35,8 @@ async def report_complete_session(channel_id: int, is_auto: bool) -> None: if in_use_time: bot.instance.stats.timing("help.in_use_time", in_use_time) - unanswered = await _caches.unanswered.get(channel_id) - if unanswered: + non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel_id) + if non_claimant_last_message_time is None: bot.instance.stats.incr("help.sessions.unanswered") - elif unanswered is not None: + else: bot.instance.stats.incr("help.sessions.answered") -- cgit v1.2.3 From 50030597819e1be2787ecc52db3d41c8155ede40 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 18 Mar 2021 20:12:25 +0000 Subject: Update function name to describe new behaviour. Also updates the doc string to reflect this new behaviour. --- bot/exts/help_channels/_cog.py | 2 +- bot/exts/help_channels/_message.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 8c97ef2d0..3f453edd3 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -431,7 +431,7 @@ class HelpChannels(commands.Cog): if not _channel.is_excluded_channel(message.channel): await self.claim_channel(message) else: - await _message.check_for_answer(message) + await _message.update_message_caches(message) @commands.Cog.listener() async def on_message_delete(self, msg: discord.Message) -> None: diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 9ba019550..c20af2946 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -42,8 +42,8 @@ through our guide for **[asking a good question]({ASKING_GUIDE_URL})**. """ -async def check_for_answer(message: discord.Message) -> None: - """Checks for whether new content in a help channel comes from non-claimants.""" +async def update_message_caches(message: discord.Message) -> None: + """Checks the source of new content in a help channel and updates the appropriate cache.""" channel = message.channel # Confirm the channel is an in use help channel -- cgit v1.2.3 From 9a3c946d5fd390c422cc83b57fdf29afb0e635b7 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 18 Mar 2021 20:13:59 +0000 Subject: Delete cached claimant message time. --- bot/exts/help_channels/_cog.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 3f453edd3..a372fa868 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -377,6 +377,7 @@ class HelpChannels(commands.Cog): async def _unclaim_channel(self, channel: discord.TextChannel, claimant_id: int, is_auto: bool) -> None: """Actual implementation of `unclaim_channel`. See that for full documentation.""" await _caches.claimants.delete(channel.id) + await _caches.claimant_last_message_times.delete(channel.id) await _caches.non_claimant_last_message_times.delete(channel.id) # Ignore missing tasks because a channel may still be dormant after the cooldown expires. -- cgit v1.2.3 From 6a954de33cbfa36be45dfb61b05141c0ced23256 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 18 Mar 2021 20:26:12 +0000 Subject: Move return behaviour comments to in-line rather than docstring. --- bot/exts/help_channels/_channel.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 3e3749041..13726d234 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -26,12 +26,7 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco async def get_closing_time(channel: discord.TextChannel) -> datetime: - """ - Return the timestamp at which the given help `channel` should be closed. - - If either cache is empty, use the last message in the channel to determine closign time. - If the last message connt be retreived, return datetime.min, I.E close right now. - """ + """Return the timestamp at which the given help `channel` should be closed.""" log.trace(f"Getting the closing time for #{channel} ({channel.id}).") if await _message.is_empty(channel): @@ -43,10 +38,11 @@ async def get_closing_time(channel: discord.TextChannel) -> datetime: claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) if not (non_claimant_last_message_time or claimant_last_message_time): - # Using the old method if we can't get cached info. + # One of the caches is empty, use the last message in the channel to determine closing time instead. msg = await _message.get_last_message(channel) if not msg: - log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages.") + # last message can't be retreived, return datetime.min so channel closes right now. + log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages, closing now.") return datetime.min # The time at which a channel should be closed. -- cgit v1.2.3 From e962aa5d10e990ccaba6640aeca1c8b84bbb7ce4 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 18 Mar 2021 21:17:54 +0000 Subject: If the channel is empty, determine closing time based on last message. --- bot/exts/help_channels/_channel.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 13726d234..454e41157 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -26,10 +26,10 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco async def get_closing_time(channel: discord.TextChannel) -> datetime: - """Return the timestamp at which the given help `channel` should be closed.""" + """Return the timestamp at which the given help channel, `channel`, should be closed.""" log.trace(f"Getting the closing time for #{channel} ({channel.id}).") - if await _message.is_empty(channel): + if is_empty := await _message.is_empty(channel): idle_minutes = constants.HelpChannels.deleted_idle_minutes else: idle_minutes = constants.HelpChannels.idle_minutes_others @@ -37,8 +37,9 @@ async def get_closing_time(channel: discord.TextChannel) -> datetime: non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) - if not (non_claimant_last_message_time or claimant_last_message_time): - # One of the caches is empty, use the last message in the channel to determine closing time instead. + if is_empty or not (non_claimant_last_message_time or claimant_last_message_time): + # Current help session has no messages, or one of the caches is empty. + # Use the last message in the channel to determine closing time instead. msg = await _message.get_last_message(channel) if not msg: # last message can't be retreived, return datetime.min so channel closes right now. -- cgit v1.2.3 From 3fe9a39a3d262f32bc5dc8d18de5075aad8b0e5c Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 18 Mar 2021 21:19:34 +0000 Subject: Determine closing time from last message if either cache is empty, rather than if both are empty --- bot/exts/help_channels/_channel.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 454e41157..c65b87986 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -26,7 +26,7 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco async def get_closing_time(channel: discord.TextChannel) -> datetime: - """Return the timestamp at which the given help channel, `channel`, should be closed.""" + """Return the timestamp at which the given help `channel` should be closed.""" log.trace(f"Getting the closing time for #{channel} ({channel.id}).") if is_empty := await _message.is_empty(channel): @@ -37,7 +37,7 @@ async def get_closing_time(channel: discord.TextChannel) -> datetime: non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) - if is_empty or not (non_claimant_last_message_time or claimant_last_message_time): + if is_empty or not (non_claimant_last_message_time and claimant_last_message_time): # Current help session has no messages, or one of the caches is empty. # Use the last message in the channel to determine closing time instead. msg = await _message.get_last_message(channel) -- cgit v1.2.3 From 72eb71d073e19d34d5a31c6e7c7af26a3be5f746 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 18 Mar 2021 21:20:36 +0000 Subject: More descriptive comment for when an if block is entered --- bot/exts/help_channels/_channel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index c65b87986..ad683b218 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -38,7 +38,7 @@ async def get_closing_time(channel: discord.TextChannel) -> datetime: claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) if is_empty or not (non_claimant_last_message_time and claimant_last_message_time): - # Current help session has no messages, or one of the caches is empty. + # Current help session has no messages, or at least one of the caches is empty. # Use the last message in the channel to determine closing time instead. msg = await _message.get_last_message(channel) if not msg: -- cgit v1.2.3 From 7aa572752ff24541b203f85fca1b74a66d226782 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 19 Mar 2021 16:50:34 +0200 Subject: Apply requested style and grammar changes --- bot/exts/recruitment/talentpool/_review.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index 682a32918..b4e425187 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -271,7 +271,7 @@ class Reviewer: On success, returns the user ID. """ - log.trace(f"Updating nomination #{nomination_id} as review") + log.trace(f"Updating nomination #{nomination_id} as reviewed") try: nomination = await self.bot.api_client.get(f"{self._pool.api_endpoint}/{nomination_id}") except ResponseCodeError as e: @@ -299,7 +299,7 @@ class Reviewer: def cancel(self, user_id: int) -> None: """ - Cancels the review of the nominee with ID user_id. + Cancels the review of the nominee with ID `user_id`. It's important to note that this applies only until reschedule_reviews is called again. To permanently cancel someone's review, either remove them from the pool, or use mark_reviewed. -- cgit v1.2.3 From 09d7f0775109224faa3a437bc65546d24ae3576f Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 19 Mar 2021 17:04:13 +0200 Subject: Add additional logging to _review.py --- bot/exts/recruitment/talentpool/_review.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index b4e425187..920728544 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -127,11 +127,14 @@ class Reviewer: user_activity = await self.bot.api_client.get(f"bot/users/{member.id}/metricity_review_data") except ResponseCodeError as e: if e.status == 404: + log.trace(f"The user {member.id} seems to have no activity logged in Metricity.") messages = "no" channels = "" else: + log.trace(f"An unexpected error occured while fetching information of user {member.id}.") raise else: + log.trace(f"Activity found for {member.id}, formatting review.") messages = user_activity["total_messages"] # Making this part flexible to the amount of expected and returned channels. first_channel = user_activity["top_channel_activity"][0] @@ -164,6 +167,7 @@ class Reviewer: params={'user__id': str(member.id), 'ordering': '-inserted_at'} ) + log.trace(f"{len(infraction_list)} infractions found for {member.id}, formatting review.") if not infraction_list: return "They have no infractions." @@ -224,6 +228,7 @@ class Reviewer: } ) + log.trace(f"{len(history)} previous nominations found for {member.id}, formatting review.") if not history: return @@ -257,6 +262,7 @@ class Reviewer: Returns the resulting message objects. """ messages = textwrap.wrap(text, width=MAX_MESSAGE_SIZE, replace_whitespace=False) + log.trace(f"The provided string will be sent to the channel {channel.id} as {len(messages)} messages.") results = [] for message in messages: @@ -304,6 +310,7 @@ class Reviewer: It's important to note that this applies only until reschedule_reviews is called again. To permanently cancel someone's review, either remove them from the pool, or use mark_reviewed. """ + log.trace(f"Canceling the review of user {user_id}.") self._review_scheduler.cancel(user_id) def cancel_all(self) -> None: @@ -313,4 +320,5 @@ class Reviewer: It's important to note that this applies only until reschedule_reviews is called again. To permanently cancel someone's review, either remove them from the pool, or use mark_reviewed. """ + log.trace("Canceling all reviews.") self._review_scheduler.cancel_all() -- cgit v1.2.3 From 69c49a8ca9aaf552719e1045c7a4c99f73185d62 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 19 Mar 2021 17:55:44 +0200 Subject: Use ctx.send instead of ctx.channel.send Co-authored-by: ToxicKidz <78174417+ToxicKidz@users.noreply.github.com> --- bot/exts/recruitment/talentpool/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index 7b21dcd53..f3e3539b6 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -306,7 +306,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): """Mark a nomination as reviewed and cancel the review task.""" if not await self.reviewer.mark_reviewed(ctx, nomination_id): return - await ctx.channel.send(f"✅ The nomination with ID `{nomination_id}` was marked as reviewed.") + await ctx.send(f"✅ The nomination with ID `{nomination_id}` was marked as reviewed.") @nomination_group.command(aliases=('review',)) @has_any_role(*MODERATION_ROLES) -- cgit v1.2.3 From 96003d7e388587a77d6f6424a1aa1c93d059be99 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 19 Mar 2021 17:57:29 +0200 Subject: Properly await coroutine in post_review --- bot/exts/recruitment/talentpool/_review.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index 920728544..5fb1a505f 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -76,7 +76,9 @@ class Reviewer: channel = guild.get_channel(Channels.mod_announcements) member = guild.get_member(user_id) if not member: - channel.send(f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server 😔") + await channel.send( + f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server 😔" + ) return if update_database: -- cgit v1.2.3 From 2d9c47180157e7b6667340abc241e0d65cdb9cc5 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 19 Mar 2021 18:20:56 +0200 Subject: Replace mentions for ID's in watchlist lists Uncached mentions render as 'invalid' users on mobile, and with the list now showing the user's name we can now just show the ID without many problems. --- bot/exts/moderation/watchchannels/_watchchannel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py index b121243ce..9f26c34f2 100644 --- a/bot/exts/moderation/watchchannels/_watchchannel.py +++ b/bot/exts/moderation/watchchannels/_watchchannel.py @@ -353,7 +353,7 @@ class WatchChannel(metaclass=CogABCMeta): list_data["info"] = {} for user_id, user_data in watched_iter: member = ctx.guild.get_member(user_id) - line = f"• <@{user_id}>" + line = f"• `{user_id}`" if member: line += f" ({member.name}#{member.discriminator})" inserted_at = user_data['inserted_at'] -- cgit v1.2.3 From 1127da5c9a50bd01155b993eb0bac3e540410df9 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 19 Mar 2021 18:53:42 +0200 Subject: Default message in review when no nomination reason given --- bot/exts/recruitment/talentpool/_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index 5fb1a505f..db710c278 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -87,7 +87,7 @@ class Reviewer: opening = f"<@&{Roles.moderators}> <@&{Roles.admins}>\n{member.mention} ({member}) for Helper!" current_nominations = "\n\n".join( - f"**<@{entry['actor']}>:** {entry['reason']}" for entry in nomination['entries'] + f"**<@{entry['actor']}>:** {entry['reason'] or '*no reason given*'}" for entry in nomination['entries'] ) current_nominations = f"**Nominated by:**\n{current_nominations}" -- cgit v1.2.3 From 94fc1cc0d4c7a69433c74eb555621374ac71ee22 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 19 Mar 2021 20:21:16 +0200 Subject: Mark as reviewed when nominee is off server This is necessary as otherwise the bot would try to review them every time it restarts --- bot/exts/recruitment/talentpool/_review.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index db710c278..49aee8970 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -75,15 +75,16 @@ class Reviewer: guild = self.bot.get_guild(Guild.id) channel = guild.get_channel(Channels.mod_announcements) member = guild.get_member(user_id) + + if update_database: + await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) + if not member: await channel.send( f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server 😔" ) return - if update_database: - await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) - opening = f"<@&{Roles.moderators}> <@&{Roles.admins}>\n{member.mention} ({member}) for Helper!" current_nominations = "\n\n".join( -- cgit v1.2.3 From 3bf532b4ba499fc276c94f1cd6d3d859afbb925e Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 19 Mar 2021 20:50:57 +0200 Subject: Don't reschedule reviews that are long overdue If it's been over a day overdue for a review, don't reschedule it. This is done in order to not fire reviews for all nominations which are over 30 days old when the auto-reviewing feature is merged. --- bot/exts/recruitment/talentpool/_review.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index 49aee8970..ba1564602 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -61,7 +61,9 @@ class Reviewer: inserted_at = isoparse(user_data['inserted_at']).replace(tzinfo=None) review_at = inserted_at + timedelta(days=MAX_DAYS_IN_POOL) - self._review_scheduler.schedule_at(review_at, user_id, self.post_review(user_id, update_database=True)) + # If it's over a day overdue, it's probably an old nomination and shouldn't be automatically reviewed. + if datetime.utcnow() - review_at < timedelta(days=1): + self._review_scheduler.schedule_at(review_at, user_id, self.post_review(user_id, update_database=True)) async def post_review(self, user_id: int, update_database: bool) -> None: """Format a generic review of a user and post it to the mod announcements channel.""" -- cgit v1.2.3 From a7c85564d90a3dc556a9582e925b33adc303de8f Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 19 Mar 2021 22:35:00 +0200 Subject: Review commands now use the user ID instead of nomination ID The user ID is much more accessible, and is usually what is used to obtain the nomination ID. --- bot/exts/recruitment/talentpool/_cog.py | 12 +++++------ bot/exts/recruitment/talentpool/_review.py | 34 ++++++++++++------------------ 2 files changed, 19 insertions(+), 27 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index f3e3539b6..b809cea17 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -302,17 +302,17 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): @nomination_group.command(aliases=('mr',)) @has_any_role(*MODERATION_ROLES) - async def mark_reviewed(self, ctx: Context, nomination_id: int) -> None: - """Mark a nomination as reviewed and cancel the review task.""" - if not await self.reviewer.mark_reviewed(ctx, nomination_id): + async def mark_reviewed(self, ctx: Context, user_id: int) -> None: + """Mark a user's nomination as reviewed and cancel the review task.""" + if not await self.reviewer.mark_reviewed(ctx, user_id): return - await ctx.send(f"✅ The nomination with ID `{nomination_id}` was marked as reviewed.") + await ctx.send(f"✅ The user with ID `{user_id}` was marked as reviewed.") @nomination_group.command(aliases=('review',)) @has_any_role(*MODERATION_ROLES) - async def post_review(self, ctx: Context, nomination_id: int) -> None: + async def post_review(self, ctx: Context, user_id: int) -> None: """Post the automatic review for the user ahead of time.""" - if not (user_id := await self.reviewer.mark_reviewed(ctx, nomination_id)): + if not await self.reviewer.mark_reviewed(ctx, user_id): return await self.reviewer.post_review(user_id, update_database=False) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index ba1564602..c2c1312d9 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -276,37 +276,29 @@ class Reviewer: return results - async def mark_reviewed(self, ctx: Context, nomination_id: int) -> Optional[int]: + async def mark_reviewed(self, ctx: Context, user_id: int) -> bool: """ Mark an active nomination as reviewed, updating the database and canceling the review task. - On success, returns the user ID. + Returns True if the user was successfully marked as reviewed, False otherwise. """ - log.trace(f"Updating nomination #{nomination_id} as reviewed") - try: - nomination = await self.bot.api_client.get(f"{self._pool.api_endpoint}/{nomination_id}") - except ResponseCodeError as e: - if e.response.status == 404: - log.trace(f"Nomination API 404: Can't find nomination with id {nomination_id}") - await ctx.send(f"❌ Can't find a nomination with id `{nomination_id}`") - return - else: - raise + log.trace(f"Updating user {user_id} as reviewed") + await self._pool.fetch_user_cache() + if user_id not in self._pool.watched_users: + log.trace(f"Can't find a nominated user with id {user_id}") + await ctx.send(f"❌ Can't find a currently nominated user with id `{user_id}`") + return False + nomination = self._pool.watched_users[user_id] if nomination["reviewed"]: await ctx.send("❌ This nomination was already reviewed, but here's a cookie 🍪") - return - elif not nomination["active"]: - await ctx.send("❌ This nomination is inactive") - return + return False await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) - if nomination["user"] in self._review_scheduler: - self._review_scheduler.cancel(nomination["user"]) - - await self._pool.fetch_user_cache() + if user_id in self._review_scheduler: + self._review_scheduler.cancel(user_id) - return nomination["user"] + return True def cancel(self, user_id: int) -> None: """ -- cgit v1.2.3 From e69e918a4309c04c3786da9c0d81e81540ffe411 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Fri, 19 Mar 2021 23:35:10 +0200 Subject: Fix review formatting when there's only one infraction type --- bot/exts/recruitment/talentpool/_review.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index c2c1312d9..57e18af9a 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -180,11 +180,14 @@ class Reviewer: infr_stats = list(Counter(infr["type"] for infr in infraction_list).items()) # Format into a sentence. - infractions = ", ".join( - f"{count} {self._format_infr_name(infr_type, count)}" - for infr_type, count in infr_stats[:-1] - ) - if len(infr_stats) > 1: + if len(infr_stats) == 1: + infr_type, count = infr_stats[0] + infractions = f"{count} {self._format_infr_name(infr_type, count)}" + else: # We already made sure they have infractions. + infractions = ", ".join( + f"{count} {self._format_infr_name(infr_type, count)}" + for infr_type, count in infr_stats[:-1] + ) last_infr, last_count = infr_stats[-1] infractions += f", and {last_count} {self._format_infr_name(last_infr, last_count)}" -- cgit v1.2.3 From 457cb14deade3d023d01ee0342bf3c84911f6d0b Mon Sep 17 00:00:00 2001 From: laundmo Date: Fri, 19 Mar 2021 22:56:50 +0100 Subject: reset cache on bot start, stats for different close reasons --- bot/exts/help_channels/_channel.py | 28 +++++++++++++++------------- bot/exts/help_channels/_cog.py | 35 ++++++++++++++++++++++------------- bot/exts/help_channels/_stats.py | 4 ++-- 3 files changed, 39 insertions(+), 28 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index ad683b218..1e152eaa3 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -25,11 +25,13 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco yield channel -async def get_closing_time(channel: discord.TextChannel) -> datetime: - """Return the timestamp at which the given help `channel` should be closed.""" +async def get_closing_time(channel: discord.TextChannel) -> t.Tuple[datetime, str]: + """Return the timestamp at which the given help `channel` should be closed along with the reason.""" log.trace(f"Getting the closing time for #{channel} ({channel.id}).") - if is_empty := await _message.is_empty(channel): + is_empty = await _message.is_empty(channel) + + if is_empty: idle_minutes = constants.HelpChannels.deleted_idle_minutes else: idle_minutes = constants.HelpChannels.idle_minutes_others @@ -40,14 +42,16 @@ async def get_closing_time(channel: discord.TextChannel) -> datetime: if is_empty or not (non_claimant_last_message_time and claimant_last_message_time): # Current help session has no messages, or at least one of the caches is empty. # Use the last message in the channel to determine closing time instead. + msg = await _message.get_last_message(channel) + if not msg: # last message can't be retreived, return datetime.min so channel closes right now. log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages, closing now.") - return datetime.min + return datetime.min, "deleted" # The time at which a channel should be closed. - return msg.created_at + timedelta(minutes=idle_minutes) + return msg.created_at + timedelta(minutes=idle_minutes), "latest_message" # Get the later time at which a channel should be closed non_claimant_last_message_time = datetime.fromtimestamp(non_claimant_last_message_time) @@ -57,14 +61,12 @@ async def get_closing_time(channel: discord.TextChannel) -> datetime: claimant_last_message_time += timedelta(minutes=constants.HelpChannels.idle_minutes_claimant) # The further away closing time is what we should use. - closing_time = max(claimant_last_message_time, non_claimant_last_message_time) - log.trace( - f"Claimant closing time: {claimant_last_message_time}, " - f"last_message closing time: {non_claimant_last_message_time}" - ) - log.trace(f"#{channel} ({channel.id}) should be closed at {closing_time}.") - return closing_time - + if claimant_last_message_time >= non_claimant_last_message_time: + log.trace(f"#{channel} ({channel.id}) should be closed at {claimant_last_message_time} due to claimant timeout.") + return claimant_last_message_time, "claimant_timeout" + else: + log.trace(f"#{channel} ({channel.id}) should be closed at {non_claimant_last_message_time} due to others timeout.") + return non_claimant_last_message_time, "others_timeout" async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: """Return the duration `channel_id` has been in use. Return None if it's not in use.""" diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index a372fa868..ef6a286d6 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -118,6 +118,9 @@ class HelpChannels(commands.Cog): timestamp = datetime.now(timezone.utc).timestamp() await _caches.claim_times.set(message.channel.id, timestamp) await _caches.claimant_last_message_times.set(message.channel.id, timestamp) + # non_claimant needs to be set too, to satisfy the condition in `_channel.get_closing_time` the first time. + # Otherwise it will fall back to the old method if no other messages are sent. + await _caches.non_claimant_last_message_times.set(message.channel.id, timestamp) # Not awaited because it may indefinitely hold the lock while waiting for a channel. scheduling.create_task(self.move_to_available(), name=f"help_claim_{message.id}") @@ -188,7 +191,7 @@ class HelpChannels(commands.Cog): # Don't use a discord.py check because the check needs to fail silently. if await self.close_check(ctx): log.info(f"Close command invoked by {ctx.author} in #{ctx.channel}.") - await self.unclaim_channel(ctx.channel, is_auto=False) + await self.unclaim_channel(ctx.channel, closed_on="command") async def get_available_candidate(self) -> discord.TextChannel: """ @@ -234,7 +237,7 @@ class HelpChannels(commands.Cog): elif missing < 0: log.trace(f"Moving {abs(missing)} superfluous available channels over to the Dormant category.") for channel in channels[:abs(missing)]: - await self.unclaim_channel(channel) + await self.unclaim_channel(channel, closed_on="cleanup") async def init_categories(self) -> None: """Get the help category objects. Remove the cog if retrieval fails.""" @@ -272,6 +275,8 @@ class HelpChannels(commands.Cog): log.trace("Moving or rescheduling in-use channels.") for channel in _channel.get_category_channels(self.in_use_category): + # clear the cache here so moving doesn't rely on old cached messages. + await self._delete_message_time_caches(channel) await self.move_idle_channel(channel, has_task=False) # Prevent the command from being used until ready. @@ -294,16 +299,16 @@ class HelpChannels(commands.Cog): """ log.trace(f"Handling in-use channel #{channel} ({channel.id}).") - closing_time = await _channel.get_closing_time(channel) + closing_time, closed_on = await _channel.get_closing_time(channel) # The time at which the channel should be closed, based on messages sent. if closing_time < datetime.utcnow(): log.info( f"#{channel} ({channel.id}) is idle past {closing_time} " - f"and will be made dormant." + f"and will be made dormant. Reason: {closed_on}" ) - await self.unclaim_channel(channel) + await self.unclaim_channel(channel, closed_on=closed_on) else: # Cancel the existing task, if any. if has_task: @@ -353,7 +358,7 @@ class HelpChannels(commands.Cog): _stats.report_counts() @lock.lock_arg(f"{NAMESPACE}.unclaim", "channel") - async def unclaim_channel(self, channel: discord.TextChannel, *, is_auto: bool = True) -> None: + async def unclaim_channel(self, channel: discord.TextChannel, *, closed_on: str) -> None: """ Unclaim an in-use help `channel` to make it dormant. @@ -361,7 +366,7 @@ class HelpChannels(commands.Cog): Remove the cooldown role from the channel claimant if they have no other channels claimed. Cancel the scheduled cooldown role removal task. - Set `is_auto` to True if the channel was automatically closed or False if manually closed. + `closed_on` is the reason that the channel was closed for. Examples: "cleanup", "command", "claimant_timeout" """ claimant_id = await _caches.claimants.get(channel.id) _unclaim_channel = self._unclaim_channel @@ -372,13 +377,17 @@ class HelpChannels(commands.Cog): decorator = lock.lock_arg(f"{NAMESPACE}.unclaim", "claimant_id", wait=True) _unclaim_channel = decorator(_unclaim_channel) - return await _unclaim_channel(channel, claimant_id, is_auto) + return await _unclaim_channel(channel, claimant_id, closed_on) + + async def _delete_message_time_caches(self, channel: discord.TextChannel) -> None: + """Delete message time caches """ + await _caches.claimant_last_message_times.delete(channel.id) + await _caches.non_claimant_last_message_times.delete(channel.id) - async def _unclaim_channel(self, channel: discord.TextChannel, claimant_id: int, is_auto: bool) -> None: + async def _unclaim_channel(self, channel: discord.TextChannel, claimant_id: int, closed_on: str) -> None: """Actual implementation of `unclaim_channel`. See that for full documentation.""" await _caches.claimants.delete(channel.id) - await _caches.claimant_last_message_times.delete(channel.id) - await _caches.non_claimant_last_message_times.delete(channel.id) + await self._delete_message_time_caches(channel) # Ignore missing tasks because a channel may still be dormant after the cooldown expires. if claimant_id in self.scheduler: @@ -392,12 +401,12 @@ class HelpChannels(commands.Cog): await _cooldown.remove_cooldown_role(claimant) await _message.unpin(channel) - await _stats.report_complete_session(channel.id, is_auto) + await _stats.report_complete_session(channel.id, closed_on) await self.move_to_dormant(channel) # Cancel the task that makes the channel dormant only if called by the close command. # In other cases, the task is either already done or not-existent. - if not is_auto: + if not closed_on: self.scheduler.cancel(channel.id) async def move_to_in_use(self, channel: discord.TextChannel) -> None: diff --git a/bot/exts/help_channels/_stats.py b/bot/exts/help_channels/_stats.py index e212c495d..cc9a053c4 100644 --- a/bot/exts/help_channels/_stats.py +++ b/bot/exts/help_channels/_stats.py @@ -22,13 +22,13 @@ def report_counts() -> None: log.warning(f"Couldn't find category {name!r} to track channel count stats.") -async def report_complete_session(channel_id: int, is_auto: bool) -> None: +async def report_complete_session(channel_id: int, closed_on: str) -> None: """ Report stats for a completed help session channel `channel_id`. Set `is_auto` to True if the channel was automatically closed or False if manually closed. """ - caller = "auto" if is_auto else "command" + caller = f"auto.{closed_on}" if closed_on else "command" bot.instance.stats.incr(f"help.dormant_calls.{caller}") in_use_time = await _channel.get_in_use_time(channel_id) -- cgit v1.2.3 From 350f02fab382810824b464889a8e9d29fb8407ce Mon Sep 17 00:00:00 2001 From: wookie184 Date: Sat, 20 Mar 2021 15:40:42 +0000 Subject: Added nomination voting channel to config Also changed talentpool review cog to post there instead of mod-announcements --- bot/constants.py | 1 + bot/exts/recruitment/talentpool/_review.py | 4 ++-- config-default.yml | 1 + 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index 394d59a73..467a4a2c4 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -438,6 +438,7 @@ class Channels(metaclass=YAMLGetter): mods: int mod_alerts: int mod_spam: int + nomination_voting: int organisation: int admin_announcements: int diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index 57e18af9a..fb3461238 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -66,7 +66,7 @@ class Reviewer: self._review_scheduler.schedule_at(review_at, user_id, self.post_review(user_id, update_database=True)) async def post_review(self, user_id: int, update_database: bool) -> None: - """Format a generic review of a user and post it to the mod announcements channel.""" + """Format a generic review of a user and post it to the nomination voting channel.""" log.trace(f"Posting the review of {user_id}") nomination = self._pool.watched_users[user_id] @@ -75,7 +75,7 @@ class Reviewer: return guild = self.bot.get_guild(Guild.id) - channel = guild.get_channel(Channels.mod_announcements) + channel = guild.get_channel(Channels.nomination_voting) member = guild.get_member(user_id) if update_database: diff --git a/config-default.yml b/config-default.yml index 49d7f84ac..502f0f861 100644 --- a/config-default.yml +++ b/config-default.yml @@ -199,6 +199,7 @@ guild: mod_meta: &MOD_META 775412552795947058 mod_spam: &MOD_SPAM 620607373828030464 mod_tools: &MOD_TOOLS 775413915391098921 + nomination_voting: 822853512709931008 organisation: &ORGANISATION 551789653284356126 staff_lounge: &STAFF_LOUNGE 464905259261755392 -- cgit v1.2.3 From cfdff9e9268b599dcc476800df64120b371604a9 Mon Sep 17 00:00:00 2001 From: laundmo Date: Sat, 20 Mar 2021 19:20:17 +0100 Subject: passing pre-commit --- bot/exts/help_channels/_channel.py | 11 +++++++++-- bot/exts/help_channels/_cog.py | 4 ++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 1e152eaa3..986d3f28b 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -62,12 +62,19 @@ async def get_closing_time(channel: discord.TextChannel) -> t.Tuple[datetime, st # The further away closing time is what we should use. if claimant_last_message_time >= non_claimant_last_message_time: - log.trace(f"#{channel} ({channel.id}) should be closed at {claimant_last_message_time} due to claimant timeout.") + log.trace( + f"#{channel} ({channel.id}) should be closed at " + f"{claimant_last_message_time} due to claimant timeout." + ) return claimant_last_message_time, "claimant_timeout" else: - log.trace(f"#{channel} ({channel.id}) should be closed at {non_claimant_last_message_time} due to others timeout.") + log.trace( + f"#{channel} ({channel.id}) should be closed at " + f"{non_claimant_last_message_time} due to others timeout." + ) return non_claimant_last_message_time, "others_timeout" + async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: """Return the duration `channel_id` has been in use. Return None if it's not in use.""" log.trace(f"Calculating in use time for channel {channel_id}.") diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index ef6a286d6..1e9332323 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -119,7 +119,7 @@ class HelpChannels(commands.Cog): await _caches.claim_times.set(message.channel.id, timestamp) await _caches.claimant_last_message_times.set(message.channel.id, timestamp) # non_claimant needs to be set too, to satisfy the condition in `_channel.get_closing_time` the first time. - # Otherwise it will fall back to the old method if no other messages are sent. + # Otherwise it will fall back to the old method if no other messages are sent. await _caches.non_claimant_last_message_times.set(message.channel.id, timestamp) # Not awaited because it may indefinitely hold the lock while waiting for a channel. @@ -378,7 +378,7 @@ class HelpChannels(commands.Cog): _unclaim_channel = decorator(_unclaim_channel) return await _unclaim_channel(channel, claimant_id, closed_on) - + async def _delete_message_time_caches(self, channel: discord.TextChannel) -> None: """Delete message time caches """ await _caches.claimant_last_message_times.delete(channel.id) -- cgit v1.2.3 From 8747a66b3133c5b942a3f10b7ede313e93120038 Mon Sep 17 00:00:00 2001 From: wookie184 Date: Sat, 20 Mar 2021 18:21:21 +0000 Subject: Added myself to CODEOWNERS --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 7217cb443..634bb4bca 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -12,6 +12,7 @@ bot/exts/info/information.py @mbaruh bot/exts/filters/** @mbaruh bot/exts/fun/** @ks129 bot/exts/utils/** @ks129 +bot/exts/recruitment/** @wookie184 # Rules bot/rules/** @mbaruh -- cgit v1.2.3 From 1eaf20181ba5bf80b673e7a6e2f73d8ca1c1d2b0 Mon Sep 17 00:00:00 2001 From: laundmo Date: Sat, 20 Mar 2021 19:55:45 +0100 Subject: run precommit with all deps --- bot/exts/help_channels/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 1e9332323..af106e92f 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -380,7 +380,7 @@ class HelpChannels(commands.Cog): return await _unclaim_channel(channel, claimant_id, closed_on) async def _delete_message_time_caches(self, channel: discord.TextChannel) -> None: - """Delete message time caches """ + """Delete message time caches.""" await _caches.claimant_last_message_times.delete(channel.id) await _caches.non_claimant_last_message_times.delete(channel.id) -- cgit v1.2.3 From fe04600ec554f88a746a10e272e055a030f46eb8 Mon Sep 17 00:00:00 2001 From: wookie184 Date: Sun, 21 Mar 2021 11:08:34 +0000 Subject: Added moderators category to config This was not done previously as the defcon channel in the category was viewable by helpers, but now it is mods+ I also removed the now unnecessary constants for whitelisted channels --- config-default.yml | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/config-default.yml b/config-default.yml index 502f0f861..3da6fe629 100644 --- a/config-default.yml +++ b/config-default.yml @@ -139,6 +139,7 @@ guild: help_dormant: 691405908919451718 help_in_use: 696958401460043776 logs: &LOGS 468520609152892958 + moderators: &MODS_CATEGORY 749736277464842262 modmail: &MODMAIL 714494672835444826 voice: 356013253765234688 @@ -193,12 +194,9 @@ guild: helpers: &HELPERS 385474242440986624 incidents: 714214212200562749 incidents_archive: 720668923636351037 - mods: &MODS 305126844661760000 + mods: 305126844661760000 mod_alerts: 473092532147060736 - mod_appeals: &MOD_APPEALS 808790025688711198 - mod_meta: &MOD_META 775412552795947058 - mod_spam: &MOD_SPAM 620607373828030464 - mod_tools: &MOD_TOOLS 775413915391098921 + mod_spam: 620607373828030464 nomination_voting: 822853512709931008 organisation: &ORGANISATION 551789653284356126 staff_lounge: &STAFF_LOUNGE 464905259261755392 @@ -226,17 +224,13 @@ guild: talent_pool: &TALENT_POOL 534321732593647616 moderation_categories: + - *MODS_CATEGORY - *MODMAIL - *LOGS moderation_channels: - *ADMINS - *ADMIN_SPAM - - *MOD_APPEALS - - *MOD_META - - *MOD_TOOLS - - *MODS - - *MOD_SPAM # Modlog cog ignores events which occur in these channels modlog_blacklist: -- cgit v1.2.3 From 5590a2fa66b3af7d0a4f8aa35f122247c8b7521d Mon Sep 17 00:00:00 2001 From: wookie184 Date: Sun, 21 Mar 2021 13:50:41 +0000 Subject: Limit nominate to nominations channel Added a forcenominate command to allow mods to override this if necessary --- bot/constants.py | 1 + bot/exts/recruitment/talentpool/_cog.py | 28 ++++++++++++++++++++++++++-- config-default.yml | 1 + 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index 467a4a2c4..fbb73d489 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -438,6 +438,7 @@ class Channels(metaclass=YAMLGetter): mods: int mod_alerts: int mod_spam: int + nominations: int nomination_voting: int organisation: int diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index b809cea17..fbe79382d 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -113,15 +113,39 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): """ await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache) + @nomination_group.command(name='forcewatch', aliases=('fw', 'forceadd', 'fa'), root_aliases=("forcenominate",)) + @has_any_role(*MODERATION_ROLES) + async def force_watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None: + """ + Adds the given `user` to the talent pool, from any channel. + + A `reason` for adding the user to the talent pool is optional. + """ + await self._watch_user(ctx, user, reason) + @nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",)) @has_any_role(*STAFF_ROLES) async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None: """ - Relay messages sent by the given `user` to the `#talent-pool` channel. + Adds the given `user` to the talent pool. A `reason` for adding the user to the talent pool is optional. - If given, it will be displayed in the header when relaying messages of this user to the channel. + This command can only be used in the `#nominations` channel. """ + if ctx.channel.id != Channels.nominations: + if any(role.id in MODERATION_ROLES for role in ctx.author.roles): + await ctx.send( + f":x: Nominations should be run in the <#{Channels.nominations}> channel. " + "Use `!tp forcewatch` to override this check." + ) + else: + await ctx.send(f":x: Nominations must be run in the <#{Channels.nominations}> channel") + return + + await self._watch_user(ctx, user, reason) + + async def _watch_user(self, ctx: Context, user: FetchedMember, reason: str) -> None: + """Adds the given user to the talent pool.""" if user.bot: await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.") return diff --git a/config-default.yml b/config-default.yml index 3da6fe629..a25826b5e 100644 --- a/config-default.yml +++ b/config-default.yml @@ -197,6 +197,7 @@ guild: mods: 305126844661760000 mod_alerts: 473092532147060736 mod_spam: 620607373828030464 + nominations: 822920136150745168 nomination_voting: 822853512709931008 organisation: &ORGANISATION 551789653284356126 staff_lounge: &STAFF_LOUNGE 464905259261755392 -- cgit v1.2.3 From 9251e8d1466d378144d8209a8442338b0f4f84fd Mon Sep 17 00:00:00 2001 From: wookie184 Date: Mon, 22 Mar 2021 19:55:42 +0000 Subject: config and constants changes Added moderators category to constants.py, and removed mods and mod_spam from config as no longer used --- bot/constants.py | 3 +-- config-default.yml | 2 -- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index fbb73d489..3bd6f4b66 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -388,6 +388,7 @@ class Categories(metaclass=YAMLGetter): help_available: int help_dormant: int help_in_use: int + moderators: int modmail: int voice: int @@ -435,9 +436,7 @@ class Channels(metaclass=YAMLGetter): helpers: int incidents: int incidents_archive: int - mods: int mod_alerts: int - mod_spam: int nominations: int nomination_voting: int organisation: int diff --git a/config-default.yml b/config-default.yml index a25826b5e..ef13bab87 100644 --- a/config-default.yml +++ b/config-default.yml @@ -194,9 +194,7 @@ guild: helpers: &HELPERS 385474242440986624 incidents: 714214212200562749 incidents_archive: 720668923636351037 - mods: 305126844661760000 mod_alerts: 473092532147060736 - mod_spam: 620607373828030464 nominations: 822920136150745168 nomination_voting: 822853512709931008 organisation: &ORGANISATION 551789653284356126 -- cgit v1.2.3 From afe7aff6948c795dd13f78ec31b1662e0a87493e Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 24 Mar 2021 19:25:10 +0000 Subject: Use fallback behaviour during cog init --- bot/exts/help_channels/_channel.py | 10 +++++++--- bot/exts/help_channels/_cog.py | 3 +-- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 986d3f28b..029f55217 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -25,7 +25,7 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco yield channel -async def get_closing_time(channel: discord.TextChannel) -> t.Tuple[datetime, str]: +async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.Tuple[datetime, str]: """Return the timestamp at which the given help `channel` should be closed along with the reason.""" log.trace(f"Getting the closing time for #{channel} ({channel.id}).") @@ -39,8 +39,12 @@ async def get_closing_time(channel: discord.TextChannel) -> t.Tuple[datetime, st non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) - if is_empty or not (non_claimant_last_message_time and claimant_last_message_time): - # Current help session has no messages, or at least one of the caches is empty. + if is_empty or not all( + init_done, + non_claimant_last_message_time, + claimant_last_message_time, + ): + # Current help channel has no messages, at least one of the caches is empty or the help system cog is starting. # Use the last message in the channel to determine closing time instead. msg = await _message.get_last_message(channel) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index af106e92f..78ef8e89f 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -298,8 +298,7 @@ class HelpChannels(commands.Cog): dormant will first be cancelled. """ log.trace(f"Handling in-use channel #{channel} ({channel.id}).") - - closing_time, closed_on = await _channel.get_closing_time(channel) + closing_time, closed_on = await _channel.get_closing_time(channel, self.init_task.done()) # The time at which the channel should be closed, based on messages sent. if closing_time < datetime.utcnow(): -- cgit v1.2.3 From 238142855b1a4fc0f88aed5c897b94fcf5a7f53a Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 24 Mar 2021 19:25:58 +0000 Subject: Use claiment idle time as default idle minutes. --- bot/exts/help_channels/_channel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 029f55217..9fbeeff17 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -34,7 +34,7 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T if is_empty: idle_minutes = constants.HelpChannels.deleted_idle_minutes else: - idle_minutes = constants.HelpChannels.idle_minutes_others + idle_minutes = constants.HelpChannels.idle_minutes_claimant non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) -- cgit v1.2.3 From 2ac3e1f9f24033c77145fa143350814e15542fd2 Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 24 Mar 2021 19:28:16 +0000 Subject: Add back seperation whitespace --- bot/exts/help_channels/_cog.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 78ef8e89f..06adff397 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -298,6 +298,7 @@ class HelpChannels(commands.Cog): dormant will first be cancelled. """ log.trace(f"Handling in-use channel #{channel} ({channel.id}).") + closing_time, closed_on = await _channel.get_closing_time(channel, self.init_task.done()) # The time at which the channel should be closed, based on messages sent. if closing_time < datetime.utcnow(): -- cgit v1.2.3 From 734573baa812dc697c4cbc40c39cd1899b2185ab Mon Sep 17 00:00:00 2001 From: kwzrd Date: Thu, 25 Mar 2021 11:25:51 +0100 Subject: Branding: improve 'compound_hash' docstring --- bot/exts/backend/branding/_cog.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 0723458c2..1f9602401 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -32,7 +32,11 @@ class AssetType(Enum): def compound_hash(objects: t.Iterable[RemoteObject]) -> str: - """Compound hashes are cached to check for change in any of the member `objects`.""" + """ + Join SHA attributes of `objects` into a single string. + + Compound hashes are cached to check for change in any of the member `objects`. + """ return "-".join(item.sha for item in objects) -- cgit v1.2.3 From bb5054c1aa8abcbd91a524bb532d2677f2029d97 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 25 Mar 2021 15:03:53 +0100 Subject: swap single quotes to double quotes where they were unnecessary --- bot/exts/info/doc/_cog.py | 24 ++++++++++++------------ bot/exts/info/doc/_inventory_parser.py | 12 ++++++------ bot/exts/info/doc/_parsing.py | 4 ++-- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 5af95717b..a06bfcbaf 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -213,7 +213,7 @@ class DocCog(commands.Cog): coros = [ self.update_or_reschedule_inventory( package["package"], package["base_url"], package["inventory_url"] - ) for package in await self.bot.api_client.get('bot/documentation-links') + ) for package in await self.bot.api_client.get("bot/documentation-links") ] await asyncio.gather(*coros) log.debug("Finished inventory refresh.") @@ -283,8 +283,8 @@ class DocCog(commands.Cog): # Show all symbols with the same name that were renamed in the footer, # with a max of 100 chars. if symbol_name in self.renamed_symbols: - renamed_symbols = ', '.join(self.renamed_symbols[symbol_name]) - footer_text = textwrap.shorten("Moved: " + renamed_symbols, 200, placeholder=' ...') + renamed_symbols = ", ".join(self.renamed_symbols[symbol_name]) + footer_text = textwrap.shorten("Moved: " + renamed_symbols, 200, placeholder=" ...") else: footer_text = "" @@ -296,12 +296,12 @@ class DocCog(commands.Cog): embed.set_footer(text=footer_text) return embed - @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) + @commands.group(name="docs", aliases=("doc", "d"), invoke_without_command=True) async def docs_group(self, ctx: commands.Context, *, symbol_name: Optional[str]) -> None: """Look up documentation for Python symbols.""" await self.get_command(ctx, symbol_name=symbol_name) - @docs_group.command(name='getdoc', aliases=('g',)) + @docs_group.command(name="getdoc", aliases=("g",)) async def get_command(self, ctx: commands.Context, *, symbol_name: Optional[str]) -> None: """ Return a documentation embed for a given symbol. @@ -344,7 +344,7 @@ class DocCog(commands.Cog): msg = await ctx.send(embed=doc_embed) await wait_for_deletion(msg, (ctx.author.id,)) - @docs_group.command(name='setdoc', aliases=('s',)) + @docs_group.command(name="setdoc", aliases=("s",)) @commands.has_any_role(*MODERATION_ROLES) @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) async def set_command( @@ -367,11 +367,11 @@ class DocCog(commands.Cog): """ inventory_url, inventory_dict = inventory body = { - 'package': package_name, - 'base_url': base_url, - 'inventory_url': inventory_url + "package": package_name, + "base_url": base_url, + "inventory_url": inventory_url } - await self.bot.api_client.post('bot/documentation-links', json=body) + await self.bot.api_client.post("bot/documentation-links", json=body) log.info( f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n" @@ -381,7 +381,7 @@ class DocCog(commands.Cog): self.update_single(package_name, base_url, inventory_dict) await ctx.send(f"Added the package `{package_name}` to the database and updated the inventories.") - @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) + @docs_group.command(name="deletedoc", aliases=("removedoc", "rm", "d")) @commands.has_any_role(*MODERATION_ROLES) @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) async def delete_command(self, ctx: commands.Context, package_name: PackageName) -> None: @@ -391,7 +391,7 @@ class DocCog(commands.Cog): Example: !docs deletedoc aiohttp """ - await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') + await self.bot.api_client.delete(f"bot/documentation-links/{package_name}") async with ctx.typing(): await self.refresh_inventories() diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py index 1615f15bd..80d5841a0 100644 --- a/bot/exts/info/doc/_inventory_parser.py +++ b/bot/exts/info/doc/_inventory_parser.py @@ -50,12 +50,12 @@ async def _load_v1(stream: aiohttp.StreamReader) -> InventoryDict: async for line in stream: name, type_, location = line.decode().rstrip().split(maxsplit=2) # version 1 did not add anchors to the location - if type_ == 'mod': - type_ = 'py:module' - location += '#module-' + name + if type_ == "mod": + type_ = "py:module" + location += "#module-" + name else: - type_ = 'py:' + type_ - location += '#' + name + type_ = "py:" + type_ + location += "#" + name invdata[type_].append((name, location)) return invdata @@ -66,7 +66,7 @@ async def _load_v2(stream: aiohttp.StreamReader) -> InventoryDict: async for line in ZlibStreamReader(stream): m = _V2_LINE_RE.match(line.rstrip()) name, type_, _prio, location, _dispname = m.groups() # ignore the parsed items we don't need - if location.endswith('$'): + if location.endswith("$"): location = location[:-1] + name invdata[type_].append((name, location)) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index b3402f655..bf840b96f 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -224,7 +224,7 @@ def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag] max_length=750, max_lines=13 ) - description = _WHITESPACE_AFTER_NEWLINES_RE.sub('', description) + description = _WHITESPACE_AFTER_NEWLINES_RE.sub("", description) if signatures is not None: signature = "".join(f"```py\n{signature}```" for signature in _truncate_signatures(signatures)) return f"{signature}\n{description}" @@ -253,4 +253,4 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[s else: signature = get_signatures(symbol_heading) description = get_dd_description(symbol_heading) - return _create_markdown(signature, description, symbol_data.url).replace('¶', '').strip() + return _create_markdown(signature, description, symbol_data.url).replace("¶", "").strip() -- cgit v1.2.3 From 8a58d04c277265f2009e46d6fcc9a85f6ddd5896 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Thu, 25 Mar 2021 20:16:25 +0100 Subject: Branding: cache fresh event description in daemon Previously, the event description & duration strings were only stored on event entry. In the case that the description or duration change for an on-going event, the cached values wouldn't be updated. After this commit, the cache is refreshed daily by the daemon. --- bot/exts/backend/branding/_cog.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 1f9602401..7d4f80f13 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -329,9 +329,8 @@ class Branding(commands.Cog): # Cache event identity to avoid re-entry in case of restart await self.cache_information.set("event_path", event.path) - # The following values are only stored for the purpose of presenting them to the users - await self.cache_information.set("event_duration", extract_event_duration(event)) - await self.cache_information.set("event_description", event.meta.description) + # Cache information shown in the 'about' embed + await self.populate_cache_event_description(event) # Notify guild of new event ~ this reads the information that we cached above! if event_changed: @@ -387,6 +386,20 @@ class Branding(commands.Cog): for event in chronological_events }) + async def populate_cache_event_description(self, event: Event) -> None: + """ + Cache `event` description & duration. + + This should be called when entering a new event, and can be called periodically to ensure that the cache + holds fresh information in the case that the event remains the same, but its description changes. + + The duration is stored formatted for the frontend. It is not intended to be used programmatically. + """ + log.trace("Caching event description & duration") + + await self.cache_information.set("event_description", event.meta.description) + await self.cache_information.set("event_duration", extract_event_duration(event)) + # endregion # region: Daemon @@ -439,6 +452,8 @@ class Branding(commands.Cog): await self.enter_event(new_event) return + await self.populate_cache_event_description(new_event) # Cache fresh frontend info in case of change + log.trace("Daemon main: event has not changed, checking for change in assets") if new_event.banner.sha != await self.cache_information.get("banner_hash"): -- cgit v1.2.3 From 65c009736507974ebce20a1f36b7e24a4ecc349b Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 19:45:32 +0000 Subject: Don't prepend command closes with auto --- bot/exts/help_channels/_stats.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/help_channels/_stats.py b/bot/exts/help_channels/_stats.py index cc9a053c4..123604945 100644 --- a/bot/exts/help_channels/_stats.py +++ b/bot/exts/help_channels/_stats.py @@ -28,8 +28,9 @@ async def report_complete_session(channel_id: int, closed_on: str) -> None: Set `is_auto` to True if the channel was automatically closed or False if manually closed. """ - caller = f"auto.{closed_on}" if closed_on else "command" - bot.instance.stats.incr(f"help.dormant_calls.{caller}") + if closed_on != "command": + closed_on = f"auto.{closed_on}" + bot.instance.stats.incr(f"help.dormant_calls.{closed_on}") in_use_time = await _channel.get_in_use_time(channel_id) if in_use_time: -- cgit v1.2.3 From 9f576bcf28aa228b6c7f7d57072bba3135047200 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 20:25:18 +0000 Subject: Change help channel logic to use timezone naive stamps --- bot/exts/help_channels/_channel.py | 4 ++-- bot/exts/help_channels/_cog.py | 10 ++++------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 9fbeeff17..9f6bc00a4 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -85,8 +85,8 @@ async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: claimed_timestamp = await _caches.claim_times.get(channel_id) if claimed_timestamp: - claimed = datetime.utcfromtimestamp(claimed_timestamp) - return datetime.utcnow() - claimed + claimed = datetime.fromtimestamp(claimed_timestamp) + return datetime.now() - claimed def is_excluded_channel(channel: discord.abc.GuildChannel) -> bool: diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 06adff397..f60e1a0c2 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -2,7 +2,7 @@ import asyncio import logging import random import typing as t -from datetime import datetime, timezone +from datetime import datetime from operator import attrgetter import discord @@ -114,13 +114,11 @@ class HelpChannels(commands.Cog): self.bot.stats.incr("help.claimed") - # Must use a timezone-aware datetime to ensure a correct POSIX timestamp. - timestamp = datetime.now(timezone.utc).timestamp() - await _caches.claim_times.set(message.channel.id, timestamp) - await _caches.claimant_last_message_times.set(message.channel.id, timestamp) + await _caches.claim_times.set(message.channel.id, message.created_at) + await _caches.claimant_last_message_times.set(message.channel.id, message.created_at) # non_claimant needs to be set too, to satisfy the condition in `_channel.get_closing_time` the first time. # Otherwise it will fall back to the old method if no other messages are sent. - await _caches.non_claimant_last_message_times.set(message.channel.id, timestamp) + await _caches.non_claimant_last_message_times.set(message.channel.id, message.created_at) # Not awaited because it may indefinitely hold the lock while waiting for a channel. scheduling.create_task(self.move_to_available(), name=f"help_claim_{message.id}") -- cgit v1.2.3 From 258086ff6831fc0665aeda07d379cd613979dfbc Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 20:27:42 +0000 Subject: Remove unneeded cache clearing --- bot/exts/help_channels/_cog.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index f60e1a0c2..4031cf3c1 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -273,8 +273,6 @@ class HelpChannels(commands.Cog): log.trace("Moving or rescheduling in-use channels.") for channel in _channel.get_category_channels(self.in_use_category): - # clear the cache here so moving doesn't rely on old cached messages. - await self._delete_message_time_caches(channel) await self.move_idle_channel(channel, has_task=False) # Prevent the command from being used until ready. @@ -377,15 +375,9 @@ class HelpChannels(commands.Cog): return await _unclaim_channel(channel, claimant_id, closed_on) - async def _delete_message_time_caches(self, channel: discord.TextChannel) -> None: - """Delete message time caches.""" - await _caches.claimant_last_message_times.delete(channel.id) - await _caches.non_claimant_last_message_times.delete(channel.id) - async def _unclaim_channel(self, channel: discord.TextChannel, claimant_id: int, closed_on: str) -> None: """Actual implementation of `unclaim_channel`. See that for full documentation.""" await _caches.claimants.delete(channel.id) - await self._delete_message_time_caches(channel) # Ignore missing tasks because a channel may still be dormant after the cooldown expires. if claimant_id in self.scheduler: -- cgit v1.2.3 From b1b105ad71ad53c94a1610e4e076bd0c0a4e466d Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 20:28:29 +0000 Subject: Check for close on command explictly. --- bot/exts/help_channels/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 4031cf3c1..7dde204d0 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -396,7 +396,7 @@ class HelpChannels(commands.Cog): # Cancel the task that makes the channel dormant only if called by the close command. # In other cases, the task is either already done or not-existent. - if not closed_on: + if closed_on == "command": self.scheduler.cancel(channel.id) async def move_to_in_use(self, channel: discord.TextChannel) -> None: -- cgit v1.2.3 From b8eef953511a283da912abdd3b6c673788f3652d Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 20:29:46 +0000 Subject: Schedule channels just opened using claimant configured idle time --- bot/exts/help_channels/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 7dde204d0..bac17fb2d 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -408,7 +408,7 @@ class HelpChannels(commands.Cog): category_id=constants.Categories.help_in_use, ) - timeout = constants.HelpChannels.idle_minutes_others * 60 + timeout = constants.HelpChannels.idle_minutes_claimant * 60 log.trace(f"Scheduling #{channel} ({channel.id}) to become dormant in {timeout} sec.") self.scheduler.schedule_later(timeout, channel.id, self.move_idle_channel(channel)) -- cgit v1.2.3 From ef38220761fa7f7299b2bd853dcd1f6f7cab9646 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 20:30:37 +0000 Subject: update help channel on_message docstring to reflect unchanged responsibility --- bot/exts/help_channels/_cog.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index bac17fb2d..a8828348c 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -416,12 +416,7 @@ class HelpChannels(commands.Cog): @commands.Cog.listener() async def on_message(self, message: discord.Message) -> None: - """ - Move an available channel to the In Use category and replace it with a dormant one. - - Update the `last_message_times` cache based on the current timestamp. If the message - author is the claimant of this channel, also update the `claimant_last_message_times` cache. - """ + """Move an available channel to the In Use category and replace it with a dormant one.""" if message.author.bot: return # Ignore messages sent by bots. -- cgit v1.2.3 From 93b9d410ce03a356b7c5edea87b389faeee36168 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 20:32:07 +0000 Subject: Only fetch claimant id once --- bot/exts/help_channels/_message.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index c20af2946..074fd01da 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -52,12 +52,13 @@ async def update_message_caches(message: discord.Message) -> None: # Must use a timezone-aware datetime to ensure a correct POSIX timestamp. timestamp = datetime.now(timezone.utc).timestamp() + claimant_id = await _caches.claimants.get(channel.id) + # Overwrite the claimant message time, if its from the claimant. - if message.author == await _caches.claimants.get(channel.id): + if message.author.id == claimant_id: await _caches.claimant_last_message_times.set(channel.id, timestamp) return - claimant_id = await _caches.claimants.get(channel.id) if not claimant_id: # The mapping for this channel doesn't exist, we can't do anything. return -- cgit v1.2.3 From 3114b48aa4f943726b6afbbc34bc2b02ebec0930 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 20:37:57 +0000 Subject: Change help channel caching logic to use timezone naive stamps --- bot/exts/help_channels/_message.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 074fd01da..9506f7edd 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -1,7 +1,7 @@ import logging import textwrap import typing as t -from datetime import datetime, timezone +from datetime import datetime import discord @@ -49,8 +49,8 @@ async def update_message_caches(message: discord.Message) -> None: # Confirm the channel is an in use help channel if is_in_category(channel, constants.Categories.help_in_use): log.trace(f"Checking if #{channel} ({channel.id}) has had a reply.") - # Must use a timezone-aware datetime to ensure a correct POSIX timestamp. - timestamp = datetime.now(timezone.utc).timestamp() + # Use datetime naive time stamp to be consistant with timestamps from discord. + timestamp = datetime.now().timestamp() claimant_id = await _caches.claimants.get(channel.id) -- cgit v1.2.3 From bae3d6a75337cf7df58505fd5759ff3bd8df723e Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 22:15:10 +0000 Subject: Create utc datetime objects from timestamps --- bot/exts/help_channels/_channel.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 9f6bc00a4..76cce1249 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -58,8 +58,8 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T return msg.created_at + timedelta(minutes=idle_minutes), "latest_message" # Get the later time at which a channel should be closed - non_claimant_last_message_time = datetime.fromtimestamp(non_claimant_last_message_time) - claimant_last_message_time = datetime.fromtimestamp(claimant_last_message_time) + non_claimant_last_message_time = datetime.utcfromtimestamp(non_claimant_last_message_time) + claimant_last_message_time = datetime.utcfromtimestamp(claimant_last_message_time) non_claimant_last_message_time += timedelta(minutes=idle_minutes) claimant_last_message_time += timedelta(minutes=constants.HelpChannels.idle_minutes_claimant) @@ -85,8 +85,8 @@ async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: claimed_timestamp = await _caches.claim_times.get(channel_id) if claimed_timestamp: - claimed = datetime.fromtimestamp(claimed_timestamp) - return datetime.now() - claimed + claimed = datetime.utcfromtimestamp(claimed_timestamp) + return datetime.utcnow() - claimed def is_excluded_channel(channel: discord.abc.GuildChannel) -> bool: -- cgit v1.2.3 From f22865931b9cc40b406d8af488d3e65321626f57 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 22:26:30 +0000 Subject: Refactor help channel update message cache function for clearer flow --- bot/exts/help_channels/_message.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 9506f7edd..d60b31dea 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -49,22 +49,21 @@ async def update_message_caches(message: discord.Message) -> None: # Confirm the channel is an in use help channel if is_in_category(channel, constants.Categories.help_in_use): log.trace(f"Checking if #{channel} ({channel.id}) has had a reply.") - # Use datetime naive time stamp to be consistant with timestamps from discord. - timestamp = datetime.now().timestamp() claimant_id = await _caches.claimants.get(channel.id) - # Overwrite the claimant message time, if its from the claimant. - if message.author.id == claimant_id: - await _caches.claimant_last_message_times.set(channel.id, timestamp) - return - if not claimant_id: # The mapping for this channel doesn't exist, we can't do anything. return - # Cache the timestamp of the non-claimants message - await _caches.non_claimant_last_message_times.set(channel.id, timestamp) + # Use datetime naive time stamp to be consistant with timestamps from discord. + timestamp = message.created_at.timestamp() + + # Overwrite the appropriate last message cache depending on the author of the message + if message.author.id == claimant_id: + await _caches.claimant_last_message_times.set(channel.id, timestamp) + else: + await _caches.non_claimant_last_message_times.set(channel.id, timestamp) async def get_last_message(channel: discord.TextChannel) -> t.Optional[discord.Message]: -- cgit v1.2.3 From d7eb48aef4035494d078e489fd926be4eae48e64 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 22:39:17 +0000 Subject: Refactor if block within help channel system to be more readable --- bot/exts/help_channels/_channel.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 76cce1249..9497cb4fb 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -39,13 +39,14 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) - if is_empty or not all( - init_done, - non_claimant_last_message_time, - claimant_last_message_time, + if ( + is_empty + or not init_done + or non_claimant_last_message_time is None + or claimant_last_message_time is None ): - # Current help channel has no messages, at least one of the caches is empty or the help system cog is starting. - # Use the last message in the channel to determine closing time instead. + # if the current help channel has no messages, the help system cog is starting or + # at least one of the caches is empty use the last message in the channel to determine closing time instead. msg = await _message.get_last_message(channel) -- cgit v1.2.3 From a3a5fc491a6fe47791f6a46ceda733f2b01442d7 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 23:24:32 +0000 Subject: Reset a channel's non-claimant cache on claim, to indicate that the session has yet to be answered. --- bot/exts/help_channels/_channel.py | 11 +++++++---- bot/exts/help_channels/_cog.py | 5 ++--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 9497cb4fb..22966dbe0 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -36,17 +36,20 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T else: idle_minutes = constants.HelpChannels.idle_minutes_claimant - non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) + non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) + if non_claimant_last_message_time is None: + # A non-claimant hasn't messaged since session start, set to min timestamp so only claimant + # idle period is considered when getting the closing time. + non_claimant_last_message_time = datetime.min.timestamp() if ( is_empty or not init_done - or non_claimant_last_message_time is None or claimant_last_message_time is None ): - # if the current help channel has no messages, the help system cog is starting or - # at least one of the caches is empty use the last message in the channel to determine closing time instead. + # If the current help channel has no messages, the help system cog is starting or + # the claimant cache is empty, use the last message in the channel to determine closing time instead. msg = await _message.get_last_message(channel) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index a8828348c..d9b288280 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -116,9 +116,8 @@ class HelpChannels(commands.Cog): await _caches.claim_times.set(message.channel.id, message.created_at) await _caches.claimant_last_message_times.set(message.channel.id, message.created_at) - # non_claimant needs to be set too, to satisfy the condition in `_channel.get_closing_time` the first time. - # Otherwise it will fall back to the old method if no other messages are sent. - await _caches.non_claimant_last_message_times.set(message.channel.id, message.created_at) + # Reset thie non_claimant cache for this channel to indicate that this session has yet to be answered. + await _caches.non_claimant_last_message_times.delete(message.channel.id) # Not awaited because it may indefinitely hold the lock while waiting for a channel. scheduling.create_task(self.move_to_available(), name=f"help_claim_{message.id}") -- cgit v1.2.3 From a95178989a9a5d2e2afd40256da8e672de8b2325 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 25 Mar 2021 23:32:20 +0000 Subject: Convert to timestamp before posting to redis --- bot/exts/help_channels/_cog.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index d9b288280..46817218f 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -114,8 +114,8 @@ class HelpChannels(commands.Cog): self.bot.stats.incr("help.claimed") - await _caches.claim_times.set(message.channel.id, message.created_at) - await _caches.claimant_last_message_times.set(message.channel.id, message.created_at) + await _caches.claim_times.set(message.channel.id, message.created_at.timestamp()) + await _caches.claimant_last_message_times.set(message.channel.id, message.created_at.timestamp()) # Reset thie non_claimant cache for this channel to indicate that this session has yet to be answered. await _caches.non_claimant_last_message_times.delete(message.channel.id) -- cgit v1.2.3 From fea068ff529aa0e2b2f92d0f8e58f5954a49237d Mon Sep 17 00:00:00 2001 From: Chris Date: Fri, 26 Mar 2021 00:07:02 +0000 Subject: Fix error when converting non claimant message datetime.min cannot be converted to a timestamp as it's pre-epoch. Instead wait until we actuall need it and then create the correct datetime object depending on teh cache contents. --- bot/exts/help_channels/_channel.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 22966dbe0..5845e7087 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -37,11 +37,6 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T idle_minutes = constants.HelpChannels.idle_minutes_claimant claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) - non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) - if non_claimant_last_message_time is None: - # A non-claimant hasn't messaged since session start, set to min timestamp so only claimant - # idle period is considered when getting the closing time. - non_claimant_last_message_time = datetime.min.timestamp() if ( is_empty @@ -61,10 +56,18 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T # The time at which a channel should be closed. return msg.created_at + timedelta(minutes=idle_minutes), "latest_message" - # Get the later time at which a channel should be closed - non_claimant_last_message_time = datetime.utcfromtimestamp(non_claimant_last_message_time) + # Switch to datetime objects so we can use time deltas claimant_last_message_time = datetime.utcfromtimestamp(claimant_last_message_time) + non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) + if non_claimant_last_message_time: + non_claimant_last_message_time = datetime.utcfromtimestamp(non_claimant_last_message_time) + else: + # If it's falsey, then it indicates a non-claimant has yet to reply to this session. + # Set to min date time so it isn't considered when calculating the closing time. + non_claimant_last_message_time = datetime.min + + # Get the later time at which a channel should be closed non_claimant_last_message_time += timedelta(minutes=idle_minutes) claimant_last_message_time += timedelta(minutes=constants.HelpChannels.idle_minutes_claimant) -- cgit v1.2.3 From 519398bac8cb04ab296e43cc707e466a8a501f12 Mon Sep 17 00:00:00 2001 From: Chris Date: Fri, 26 Mar 2021 00:09:00 +0000 Subject: Add 1 second due to POSIX timestamps being lower resolution than datetime objects. --- bot/exts/help_channels/_cog.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 46817218f..0e71661ac 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -2,7 +2,7 @@ import asyncio import logging import random import typing as t -from datetime import datetime +from datetime import datetime, timedelta from operator import attrgetter import discord @@ -295,8 +295,10 @@ class HelpChannels(commands.Cog): log.trace(f"Handling in-use channel #{channel} ({channel.id}).") closing_time, closed_on = await _channel.get_closing_time(channel, self.init_task.done()) - # The time at which the channel should be closed, based on messages sent. - if closing_time < datetime.utcnow(): + + # Closing time is in the past. + # Add 1 second due to POSIX timestamps being lower resolution than datetime objects. + if closing_time < (datetime.utcnow() + timedelta(seconds=1)): log.info( f"#{channel} ({channel.id}) is idle past {closing_time} " -- cgit v1.2.3 From bc25bfdf42cdaaba924a7ad6de1dc06a9b381285 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Fri, 26 Mar 2021 13:37:21 +0100 Subject: Ensure the base url ends with a slash A base url without a trailing slash won't join properly with the relative paths, raising an error my prevent some mistakes when a new inventory is added --- bot/exts/info/doc/_cog.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index a06bfcbaf..ff67b0e61 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -365,6 +365,8 @@ class DocCog(commands.Cog): https://docs.python.org/3/ \ https://docs.python.org/3/objects.inv """ + if not base_url.endswith("/"): + raise commands.BadArgument("The base url must end with a slash.") inventory_url, inventory_dict = inventory body = { "package": package_name, -- cgit v1.2.3 From c14d9ea78a64b90ccf7815a71206c906c81af710 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Fri, 26 Mar 2021 14:44:46 +0100 Subject: Branding: raise on non-200 responses The fetch helpers will now raise when the request fails rather than logging a warning and returning a fallback value. This allows better error logging as the caller is able to log the propagated exception while adding its own context. Additionally, the caller in some cases no longer needs to check for the None return and raise its own exception. --- bot/exts/backend/branding/_cog.py | 8 ++++---- bot/exts/backend/branding/_repository.py | 35 +++++++++++++++----------------- 2 files changed, 20 insertions(+), 23 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 7d4f80f13..d6c5b159b 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -145,10 +145,10 @@ class Branding(commands.Cog): """ log.info(f"Applying {asset_type.value} asset to the guild") - file = await self.repository.fetch_file(download_url) - - if file is None: - log.error(f"Failed to download {asset_type.value} from branding repository!") + try: + file = await self.repository.fetch_file(download_url) + except Exception as fetch_exc: + log.error(f"Failed to fetch '{asset_type.value}' asset: {fetch_exc}") return False await self.bot.wait_until_guild_available() diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index e14ff4226..715361c5d 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -100,33 +100,30 @@ class BrandingRepository: The directory will be represented by a mapping from file or sub-directory names to their corresponding instances of `RemoteObject`. Passing a custom `types` value allows only getting files or directories. - If the request fails, returns an empty dictionary. + An exception will be raised if the request fails, or if the response lacks the expected keys. """ full_url = f"{BRANDING_URL}/{path}" log.debug(f"Fetching directory from branding repository: {full_url}") async with self.bot.http_session.get(full_url, params=PARAMS, headers=HEADERS) as response: - if response.status == 200: - json_directory = await response.json() - else: - log.warning(f"Received non-200 response status: {response.status}") - return {} + if response.status != 200: + raise RuntimeError(f"Failed to fetch directory due to status: {response.status}") + json_directory = await response.json() return {file["name"]: RemoteObject(file) for file in json_directory if file["type"] in types} - async def fetch_file(self, download_url: str) -> t.Optional[bytes]: + async def fetch_file(self, download_url: str) -> bytes: """ - Fetch file from `download_url`. + Fetch file as bytes from `download_url`. - Returns the file as bytes unless the request fails, in which case None is given. + Raise an exception if the request does not succeed. """ log.debug(f"Fetching file from branding repository: {download_url}") async with self.bot.http_session.get(download_url, params=PARAMS, headers=HEADERS) as response: - if response.status == 200: - return await response.read() - else: - log.warning(f"Received non-200 response status: {response.status}") + if response.status != 200: + raise RuntimeError(f"Failed to fetch file due to status: {response.status}") + return await response.read() async def parse_meta_file(self, raw_file: bytes) -> MetaFile: """ @@ -170,16 +167,11 @@ class BrandingRepository: server_icons = await self.fetch_directory(contents["server_icons"].path, types=("file",)) - if server_icons is None: - raise BrandingMisconfiguration("Failed to fetch server icons!") if len(server_icons) == 0: raise BrandingMisconfiguration("Found no server icons!") meta_bytes = await self.fetch_file(contents["meta.md"].download_url) - if meta_bytes is None: - raise BrandingMisconfiguration("Failed to fetch 'meta.md' file!") - meta_file = await self.parse_meta_file(meta_bytes) return Event(directory.path, meta_file, contents["banner.png"], list(server_icons.values())) @@ -193,7 +185,12 @@ class BrandingRepository: """ log.debug("Discovering events in branding repository") - event_directories = await self.fetch_directory("events", types=("dir",)) # Skip files + try: + event_directories = await self.fetch_directory("events", types=("dir",)) # Skip files + except Exception as fetch_exc: + log.error(f"Failed to fetch 'events' directory: {fetch_exc}") + return [] + instances: t.List[Event] = [] for event_directory in event_directories.values(): -- cgit v1.2.3 From c619a98e6ce16298e999d18667b86ee9f094b550 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Fri, 26 Mar 2021 15:03:56 +0100 Subject: Branding: raise custom error when constructing remote objects The default KeyError message from dict lookup is just the missing key. In order to give more context in the log message, we raise our own. --- bot/exts/backend/branding/_repository.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 715361c5d..91a95ae3a 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -43,6 +43,9 @@ class RemoteObject: def __init__(self, dictionary: t.Dict[str, t.Any]) -> None: """Initialize by grabbing annotated attributes from `dictionary`.""" + missing_keys = self.__annotations__.keys() - dictionary.keys() + if missing_keys: + raise KeyError(f"Fetched object lacks expected keys: {missing_keys}") for annotation in self.__annotations__: setattr(self, annotation, dictionary[annotation]) -- cgit v1.2.3 From fc6a160983cc9548f51e385033ab3755c9d121b1 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Fri, 26 Mar 2021 15:06:03 +0100 Subject: Branding: make 'meta.md' parser synchronous No reason for this to be async. --- bot/exts/backend/branding/_repository.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 91a95ae3a..420cfb9ea 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -128,7 +128,7 @@ class BrandingRepository: raise RuntimeError(f"Failed to fetch file due to status: {response.status}") return await response.read() - async def parse_meta_file(self, raw_file: bytes) -> MetaFile: + def parse_meta_file(self, raw_file: bytes) -> MetaFile: """ Parse a 'meta.md' file from raw bytes. @@ -175,7 +175,7 @@ class BrandingRepository: meta_bytes = await self.fetch_file(contents["meta.md"].download_url) - meta_file = await self.parse_meta_file(meta_bytes) + meta_file = self.parse_meta_file(meta_bytes) return Event(directory.path, meta_file, contents["banner.png"], list(server_icons.values())) -- cgit v1.2.3 From bd64acac079c564d3fca64519463518f7056dfe2 Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Fri, 26 Mar 2021 17:49:15 +0000 Subject: fix: remove . from the hyperlink Co-authored-by: Joe Banks <20439493+jb3@users.noreply.github.com> --- bot/resources/tags/intents.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/resources/tags/intents.md b/bot/resources/tags/intents.md index 6a282bc17..e08fd1c33 100644 --- a/bot/resources/tags/intents.md +++ b/bot/resources/tags/intents.md @@ -16,4 +16,4 @@ intents.members = True bot = commands.Bot(command_prefix="!", intents=intents) ``` -For more info about using intents, see the [discord.py docs on intents.](https://discordpy.readthedocs.io/en/latest/intents.html) +For more info about using intents, see the [discord.py docs on intents](https://discordpy.readthedocs.io/en/latest/intents.html). -- cgit v1.2.3 From 2cf2402ea51e3a61d319706a95bc4ab633d6b8fc Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Fri, 26 Mar 2021 17:52:07 +0000 Subject: feat: add link to discord dev portal intents section --- bot/resources/tags/intents.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/resources/tags/intents.md b/bot/resources/tags/intents.md index e08fd1c33..464caf0ba 100644 --- a/bot/resources/tags/intents.md +++ b/bot/resources/tags/intents.md @@ -16,4 +16,4 @@ intents.members = True bot = commands.Bot(command_prefix="!", intents=intents) ``` -For more info about using intents, see the [discord.py docs on intents](https://discordpy.readthedocs.io/en/latest/intents.html). +For more info about using intents, see the [discord.py docs on intents](https://discordpy.readthedocs.io/en/latest/intents.html), and for general information about them, see the [Discord developer documentation on intents](https://discord.com/developers/docs/topics/gateway#gateway-intents). -- cgit v1.2.3 From a732e0a412e72233af98b4954c2fa001a06bd8c7 Mon Sep 17 00:00:00 2001 From: Chris Date: Fri, 26 Mar 2021 21:57:59 +0000 Subject: Use correct constant for each type of help session user --- bot/exts/help_channels/_channel.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 5845e7087..d46969d4f 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -32,9 +32,9 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T is_empty = await _message.is_empty(channel) if is_empty: - idle_minutes = constants.HelpChannels.deleted_idle_minutes + idle_minutes_claimant = constants.HelpChannels.deleted_idle_minutes else: - idle_minutes = constants.HelpChannels.idle_minutes_claimant + idle_minutes_claimant = constants.HelpChannels.idle_minutes_claimant claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) @@ -54,7 +54,7 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T return datetime.min, "deleted" # The time at which a channel should be closed. - return msg.created_at + timedelta(minutes=idle_minutes), "latest_message" + return msg.created_at + timedelta(minutes=idle_minutes_claimant), "latest_message" # Switch to datetime objects so we can use time deltas claimant_last_message_time = datetime.utcfromtimestamp(claimant_last_message_time) @@ -68,8 +68,8 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T non_claimant_last_message_time = datetime.min # Get the later time at which a channel should be closed - non_claimant_last_message_time += timedelta(minutes=idle_minutes) - claimant_last_message_time += timedelta(minutes=constants.HelpChannels.idle_minutes_claimant) + non_claimant_last_message_time += timedelta(minutes=constants.HelpChannels.idle_minutes_others) + claimant_last_message_time += timedelta(minutes=idle_minutes_claimant) # The further away closing time is what we should use. if claimant_last_message_time >= non_claimant_last_message_time: -- cgit v1.2.3 From cc3c47463d4317b072810deb622d54dddf9a132c Mon Sep 17 00:00:00 2001 From: Chris Date: Fri, 26 Mar 2021 15:19:25 -0700 Subject: Switch to datetime.fromtimestamp() to avoid over-compensation Previously we were using `utcfromtimestamp()` which would compensate the timestamp when converting to UTC even though the timestamp itself was in UTC: >>> datetime.utcnow() datetime.datetime(2021, 3, 26, 22, 8, 47, 441603) >>> a = datetime.utcnow().timestamp() 1616821624.207364 >>> a = datetime.utcfromtimestamp(a) datetime.datetime(2021, 3, 27, 5, 7, 4, 207364) By switching to `fromtimestamp()` this avoids that behaviour. --- bot/exts/help_channels/_channel.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index d46969d4f..b1960531d 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -57,11 +57,11 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T return msg.created_at + timedelta(minutes=idle_minutes_claimant), "latest_message" # Switch to datetime objects so we can use time deltas - claimant_last_message_time = datetime.utcfromtimestamp(claimant_last_message_time) + claimant_last_message_time = datetime.fromtimestamp(claimant_last_message_time) non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) if non_claimant_last_message_time: - non_claimant_last_message_time = datetime.utcfromtimestamp(non_claimant_last_message_time) + non_claimant_last_message_time = datetime.fromtimestamp(non_claimant_last_message_time) else: # If it's falsey, then it indicates a non-claimant has yet to reply to this session. # Set to min date time so it isn't considered when calculating the closing time. @@ -92,7 +92,7 @@ async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: claimed_timestamp = await _caches.claim_times.get(channel_id) if claimed_timestamp: - claimed = datetime.utcfromtimestamp(claimed_timestamp) + claimed = datetime.fromtimestamp(claimed_timestamp) return datetime.utcnow() - claimed -- cgit v1.2.3 From 288cdac3e826a4fb67d8ba2c1fe98ea52f9fe658 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 27 Mar 2021 11:59:22 +0100 Subject: Branding: ensure daemon logs exceptions --- bot/exts/backend/branding/_cog.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index d6c5b159b..57347b60e 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -477,19 +477,22 @@ class Branding(commands.Cog): """ log.trace("Daemon loop: calling daemon main") - await self.daemon_main() + try: + await self.daemon_main() + except Exception: + log.exception("Daemon loop: failed with an unhandled exception!") @daemon_loop.before_loop async def daemon_before(self) -> None: """ - Call `daemon_main` immediately, then block `daemon_loop` until the next-up UTC midnight. + Call `daemon_loop` immediately, then block the loop until the next-up UTC midnight. - The first iteration will be invoked manually such that synchronisation happens immediately after daemon start. + The first iteration is invoked directly such that synchronisation happens immediately after daemon start. We then calculate the time until the next-up midnight and sleep before letting `daemon_loop` begin. """ - log.info("Daemon before: synchronising guild") + log.trace("Daemon before: performing start-up iteration") - await self.daemon_main() + await self.daemon_loop() log.trace("Daemon before: calculating time to sleep before loop begins") now = datetime.utcnow() -- cgit v1.2.3 From 3a0ddbb3709bd36f2e15bb77c5de7f157ed64425 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 27 Mar 2021 13:24:31 +0100 Subject: Branding: revise documentation --- bot/exts/backend/branding/_cog.py | 116 ++++++++++++++----------------- bot/exts/backend/branding/_repository.py | 61 ++++++++-------- 2 files changed, 80 insertions(+), 97 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 57347b60e..c7d326da3 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -74,8 +74,8 @@ def extract_event_name(event: Event) -> str: An event with a path of 'events/black_history_month' will resolve to 'Black History Month'. """ - name = event.path.split("/")[-1] # Inner-most directory name - words = name.split("_") # Words from snake case + name = event.path.split("/")[-1] # Inner-most directory name. + words = name.split("_") # Words from snake case. return " ".join(word.title() for word in words) @@ -84,44 +84,35 @@ class Branding(commands.Cog): """ Guild branding management. - This cog is responsible for automatic management of the guild's branding while sourcing assets directly from - the branding repository. + Extension responsible for automatic synchronisation of the guild's branding with the branding repository. + Event definitions and assets are automatically discovered and applied as appropriate. - We utilize multiple Redis caches to persist state. As a result, the cog should seamlessly transition across - restarts without having to query either the Discord or GitHub APIs, as it will always remember which - assets are currently applied. + All state is stored in Redis. The cog should therefore seamlessly transition across restarts and maintain + a consistent icon rotation schedule for events with multiple icon assets. - Additionally, the state of the icon rotation is persisted. As a result, the rotation doesn't reset unless - the current event or its icons change. + By caching hashes of banner & icon assets, we discover changes in currently applied assets and always keep + the latest version applied. - The cog is designed to be autonomous. The daemon, unless disabled, will poll the branding repository at - midnight every day and respond to detected changes. Since we persist SHA hashes of tracked assets, - changes in an on-going event will trigger automatic resynchronisation. - - A #changelog notification is automatically sent when entering a new event. Changes in the branding of - an on-going event do not trigger a repeated notification. - - The command interface allows moderators+ to control the daemon or request an asset synchronisation, - while regular users can see information about the current event and the overall event schedule. + The command interface allows moderators+ to control the daemon or request asset synchronisation, while + regular users can see information about the current event and the overall event schedule. """ # RedisCache[ - # "daemon_active": If True, daemon auto-starts; controlled via commands (bool) - # "event_path": Path from root in the branding repo (str) - # "event_description": Markdown description (str) - # "event_duration": Human-readable date range or 'Fallback' (str) - # "banner_hash": Hash of the last applied banner (str) - # "icons_hash": Compound hash of icons in rotation (str) - # "last_rotation_timestamp": POSIX timestamp (float) + # "daemon_active": bool | If True, daemon starts on start-up. Controlled via commands. + # "event_path": str | Current event's path in the branding repo. + # "event_description": str | Current event's Markdown description. + # "event_duration": str | Current event's human-readable date range. + # "banner_hash": str | SHA of the currently applied banner. + # "icons_hash": str | Compound SHA of all icons in current rotation. + # "last_rotation_timestamp": float | POSIX UTC timestamp. # ] cache_information = RedisCache() - # Cache holding icons in current rotation ~ the keys are download URLs (str) and the values are integers - # corresponding to the amount of times each icon has been used in the current rotation + # Icons in current rotation. Keys (str) are download URLs, values (int) track the amount of times each + # icon has been used in the current rotation. cache_icons = RedisCache() - # Cache holding all available event names & their durations; this is cached by the daemon and read by - # the calendar command with the intention of preventing API spam; doesn't contain the fallback event + # All available event names & durations. Cached by the daemon nightly; read by the calendar command. cache_events = RedisCache() def __init__(self, bot: Bot) -> None: @@ -129,19 +120,16 @@ class Branding(commands.Cog): self.bot = bot self.repository = BrandingRepository(bot) - self.bot.loop.create_task(self.maybe_start_daemon()) # Start depending on cache + self.bot.loop.create_task(self.maybe_start_daemon()) # Start depending on cache. - # region: Internal utility + # region: Internal logic & state management - @mock_in_debug(return_value=True) + @mock_in_debug(return_value=True) # Mocked in development environment to prevent API spam. async def apply_asset(self, asset_type: AssetType, download_url: str) -> bool: """ Download asset from `download_url` and apply it to PyDis as `asset_type`. - This function is mocked in the development environment in order to prevent API spam during testing. - Decorator should be temporarily removed in order to test internal methodology. - - Returns a boolean indicating whether the application was successful. + Return a boolean indicating whether the application was successful. """ log.info(f"Applying {asset_type.value} asset to the guild") @@ -154,7 +142,7 @@ class Branding(commands.Cog): await self.bot.wait_until_guild_available() pydis: discord.Guild = self.bot.get_guild(Guild.id) - timeout = 10 # Seconds + timeout = 10 # Seconds. try: with async_timeout.timeout(timeout): await pydis.edit(**{asset_type.value: file}) @@ -174,7 +162,7 @@ class Branding(commands.Cog): Banners should always be applied via this method in order to ensure that the last hash is cached. - Returns a boolean indicating whether the application was successful. + Return a boolean indicating whether the application was successful. """ success = await self.apply_asset(AssetType.BANNER, banner.download_url) @@ -194,14 +182,14 @@ class Branding(commands.Cog): In the case that there is only 1 icon in the rotation and has already been applied, do nothing. - Returns a boolean indicating whether a new icon was applied successfully. + Return a boolean indicating whether a new icon was applied successfully. """ log.debug("Rotating icons") state = await self.cache_icons.to_dict() log.trace(f"Total icons in rotation: {len(state)}") - if not state: # This would only happen if rotation not initiated, but we can handle gracefully + if not state: # This would only happen if rotation not initiated, but we can handle gracefully. log.warning("Attempted icon rotation with an empty icon cache!") return False @@ -209,7 +197,7 @@ class Branding(commands.Cog): log.debug("Aborting icon rotation: only 1 icon is available and has already been applied") return False - current_iteration = min(state.values()) # Choose iteration to draw from + current_iteration = min(state.values()) # Choose iteration to draw from. options = [download_url for download_url, times_used in state.items() if times_used == current_iteration] log.trace(f"Choosing from {len(options)} icons in iteration {current_iteration}") @@ -218,7 +206,7 @@ class Branding(commands.Cog): success = await self.apply_asset(AssetType.ICON, next_icon) if success: - await self.cache_icons.increment(next_icon) # Push the icon into the next iteration + await self.cache_icons.increment(next_icon) # Push the icon into the next iteration. timestamp = datetime.utcnow().timestamp() await self.cache_information.set("last_rotation_timestamp", timestamp) @@ -237,7 +225,7 @@ class Branding(commands.Cog): last_rotation_timestamp = await self.cache_information.get("last_rotation_timestamp") - if last_rotation_timestamp is None: # Maiden case ~ never rotated + if last_rotation_timestamp is None: # Maiden case ~ never rotated. await self.rotate_icons() return @@ -253,9 +241,9 @@ class Branding(commands.Cog): """ Set up a new icon rotation. - This function should be called whenever the set of `available_icons` changes. This is generally the case - when we enter a new event, but potentially also when the assets of an on-going event change. In such cases, - a reset of `cache_icons` is necessary, because it contains download URLs which may have gotten stale. + This function should be called whenever available icons change. This is generally the case when we enter + a new event, but potentially also when the assets of an on-going event change. In such cases, a reset + of `cache_icons` is necessary, because it contains download URLs which may have gotten stale. This function does not upload a new icon! """ @@ -314,25 +302,25 @@ class Branding(commands.Cog): The #changelog notification is sent only if `event` differs from the currently cached event. - Returns a 2-tuple indicating whether the banner, and the icon, were applied successfully. + Return a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ log.debug(f"Entering event: {event.path}") - banner_success = await self.apply_banner(event.banner) # Only one asset ~ apply directly + banner_success = await self.apply_banner(event.banner) # Only one asset ~ apply directly. - await self.initiate_icon_rotation(event.icons) # Prepare a new rotation - icon_success = await self.rotate_icons() # Apply an icon from the new rotation + await self.initiate_icon_rotation(event.icons) # Prepare a new rotation. + icon_success = await self.rotate_icons() # Apply an icon from the new rotation. - # This will only be False in the case of a manual same-event re-synchronisation + # This will only be False in the case of a manual same-event re-synchronisation. event_changed = event.path != await self.cache_information.get("event_path") - # Cache event identity to avoid re-entry in case of restart + # Cache event identity to avoid re-entry in case of restart. await self.cache_information.set("event_path", event.path) - # Cache information shown in the 'about' embed + # Cache information shown in the 'about' embed. await self.populate_cache_event_description(event) - # Notify guild of new event ~ this reads the information that we cached above! + # Notify guild of new event ~ this reads the information that we cached above. if event_changed: await self.send_info_embed(Channels.change_log) else: @@ -348,7 +336,7 @@ class Branding(commands.Cog): in a recovery scenario. In the usual case, the daemon already has an `Event` instance and can pass it to `enter_event` directly. - Returns a 2-tuple indicating whether the banner, and the icon, were applied successfully. + Return a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ log.debug("Synchronise: fetching current event") @@ -380,7 +368,7 @@ class Branding(commands.Cog): log.trace(f"Writing {len(chronological_events)} events (fallback omitted)") - with contextlib.suppress(ValueError): # Cache raises when updated with an empty dict + with contextlib.suppress(ValueError): # Cache raises when updated with an empty dict. await self.cache_events.update({ extract_event_name(event): extract_event_duration(event) for event in chronological_events @@ -407,7 +395,7 @@ class Branding(commands.Cog): """ Start the daemon depending on cache state. - The daemon will only start if it's been previously explicitly enabled via a command. + The daemon will only start if it has been explicitly enabled via a command. """ log.debug("Checking whether daemon is enabled") @@ -452,7 +440,7 @@ class Branding(commands.Cog): await self.enter_event(new_event) return - await self.populate_cache_event_description(new_event) # Cache fresh frontend info in case of change + await self.populate_cache_event_description(new_event) # Cache fresh frontend info in case of change. log.trace("Daemon main: event has not changed, checking for change in assets") @@ -497,7 +485,7 @@ class Branding(commands.Cog): log.trace("Daemon before: calculating time to sleep before loop begins") now = datetime.utcnow() - # The actual midnight moment is offset into the future in order to prevent issues with imprecise sleep + # The actual midnight moment is offset into the future in order to prevent issues with imprecise sleep. tomorrow = now + timedelta(days=1) midnight = datetime.combine(tomorrow, time(minute=1)) @@ -517,7 +505,7 @@ class Branding(commands.Cog): @branding_group.command(name="about", aliases=("current", "event")) async def branding_about_cmd(self, ctx: commands.Context) -> None: - """Show the current event description.""" + """Show the current event's description and duration.""" await self.send_info_embed(ctx.channel.id) @commands.has_any_role(*MODERATION_ROLES) @@ -526,7 +514,7 @@ class Branding(commands.Cog): """ Force branding synchronisation. - Shows which assets have failed to synchronise, if any. + Show which assets have failed to synchronise, if any. """ async with ctx.typing(): banner_success, icon_success = await self.synchronise() @@ -565,7 +553,7 @@ class Branding(commands.Cog): """ if ctx.invoked_subcommand: # If you're wondering why this works: when the 'refresh' subcommand eventually re-invokes - # this group, the attribute will be automatically set to None by the framework + # this group, the attribute will be automatically set to None by the framework. return available_events = await self.cache_events.to_dict() @@ -578,10 +566,10 @@ class Branding(commands.Cog): embed = discord.Embed(title="Current event calendar", colour=discord.Colour.blurple()) - # Because a Discord embed can only contain up to 25 fields, we only show the first 25 + # Because Discord embeds can only contain up to 25 fields, we only show the first 25. first_25 = list(available_events.items())[:25] - if len(first_25) != len(available_events): # Alert core devs that a paginating solution is now necessary + if len(first_25) != len(available_events): # Alert core devs that a paginating solution is now necessary. log.warning(f"There are {len(available_events)} events, but the calendar view can only display 25!") for name, duration in first_25: diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 420cfb9ea..694e79b51 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -8,21 +8,21 @@ from bot.bot import Bot from bot.constants import Keys from bot.errors import BrandingMisconfiguration -# Base URL for requests into the branding repository +# Base URL for requests into the branding repository. BRANDING_URL = "https://api.github.com/repos/kwzrd/pydis-branding/contents" -PARAMS = {"ref": "kwzrd/events-rework"} # Target branch -HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3 +PARAMS = {"ref": "kwzrd/events-rework"} # Target branch. +HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3. -# A GitHub token is not necessary for the cog to operate, unauthorized requests are however limited to 60 per hour +# A GitHub token is not necessary. However, unauthorized requests are limited to 60 per hour. if Keys.github: HEADERS["Authorization"] = f"token {Keys.github}" -# Since event periods are year-agnostic, we parse them into `datetime` objects with a manually inserted year -# Please note that this is intentionally a leap year in order to allow Feb 29 to be valid +# Since event periods are year-agnostic, we parse them into `datetime` objects with a manually inserted year. +# Please note that this is intentionally a leap year in order to allow Feb 29 to be valid. ARBITRARY_YEAR = 2020 -# Format used to parse date strings after we inject `ARBITRARY_YEAR` at the end +# Format used to parse date strings after we inject `ARBITRARY_YEAR` at the end. DATE_FMT = "%B %d %Y" # Ex: July 10 2020 log = logging.getLogger(__name__) @@ -30,15 +30,15 @@ log = logging.getLogger(__name__) class RemoteObject: """ - Represent a remote file or directory on GitHub. + Remote file or directory on GitHub. The annotations match keys in the response JSON that we're interested in. """ - sha: str # Hash helps us detect asset change - name: str # Filename - path: str # Path from repo root - type: str # Either 'file' or 'dir' + sha: str # Hash helps us detect asset change. + name: str # Filename. + path: str # Path from repo root. + type: str # Either 'file' or 'dir'. download_url: t.Optional[str] # If type is 'dir', this is None! def __init__(self, dictionary: t.Dict[str, t.Any]) -> None: @@ -51,18 +51,18 @@ class RemoteObject: class MetaFile(t.NamedTuple): - """Composition of attributes defined in a 'meta.md' file.""" + """Attributes defined in a 'meta.md' file.""" is_fallback: bool start_date: t.Optional[date] end_date: t.Optional[date] - description: str # Markdown event description + description: str # Markdown event description. class Event(t.NamedTuple): - """Represent an event defined in the branding repository.""" + """Event defined in the branding repository.""" - path: str # Path from repo root where event lives + path: str # Path from repo root where event lives. This is the event's identity. meta: MetaFile banner: RemoteObject icons: t.List[RemoteObject] @@ -75,15 +75,12 @@ class BrandingRepository: """ Branding repository abstraction. - This class represents the branding repository's main branch and exposes available events and assets as objects. + This class represents the branding repository's main branch and exposes available events and assets + as objects. It performs the necessary amount of validation to ensure that a misconfigured event + isn't returned. Such events are simply ignored, and will be substituted with the fallback event, + if available. Warning logs will inform core developers if a misconfigured event is encountered. - The API is primarily formed by the `get_current_event` function. It performs the necessary amount of validation - to ensure that a misconfigured event isn't returned. Such events are simply ignored, and will be substituted - with the fallback event, if available. - - Warning logs will inform core developers if a misconfigured event is encountered. - - Colliding events cause no special behaviour - in such cases, the first found active event is returned. + Colliding events cause no special behaviour. In such cases, the first found active event is returned. We work with the assumption that the branding repository checks for such conflicts and prevents them from reaching the main branch. @@ -100,10 +97,9 @@ class BrandingRepository: """ Fetch directory found at `path` in the branding repository. - The directory will be represented by a mapping from file or sub-directory names to their corresponding - instances of `RemoteObject`. Passing a custom `types` value allows only getting files or directories. + Raise an exception if the request fails, or if the response lacks the expected keys. - An exception will be raised if the request fails, or if the response lacks the expected keys. + Passing custom `types` allows getting only files or directories. By default, both are included. """ full_url = f"{BRANDING_URL}/{path}" log.debug(f"Fetching directory from branding repository: {full_url}") @@ -148,8 +144,8 @@ class BrandingRepository: if None in (start_date_raw, end_date_raw): raise BrandingMisconfiguration("Non-fallback event doesn't have start and end dates defined!") - # We extend the configured month & day with an arbitrary leap year to allow a `datetime` repr to exist - # This may raise errors if configured in a wrong format ~ we let the caller handle such cases + # We extend the configured month & day with an arbitrary leap year, allowing a datetime object to exist. + # This may raise errors if misconfigured. We let the caller handle such cases. start_date = datetime.strptime(f"{start_date_raw} {ARBITRARY_YEAR}", DATE_FMT).date() end_date = datetime.strptime(f"{end_date_raw} {ARBITRARY_YEAR}", DATE_FMT).date() @@ -183,13 +179,12 @@ class BrandingRepository: """ Discover available events in the branding repository. - Misconfigured events are skipped, the return value may therefore not contain a representation of each - directory in the repository. May return an empty list in the catastrophic case. + Misconfigured events are skipped. May return an empty list in the catastrophic case. """ log.debug("Discovering events in branding repository") try: - event_directories = await self.fetch_directory("events", types=("dir",)) # Skip files + event_directories = await self.fetch_directory("events", types=("dir",)) # Skip files. except Exception as fetch_exc: log.error(f"Failed to fetch 'events' directory: {fetch_exc}") return [] @@ -220,7 +215,7 @@ class BrandingRepository: utc_now = datetime.utcnow() log.debug(f"Finding active event for: {utc_now}") - # As all events exist in the arbitrary year, we construct a separate object for the purposes of comparison + # Construct an object in the arbitrary year for the purpose of comparison. lookup_now = date(year=ARBITRARY_YEAR, month=utc_now.month, day=utc_now.day) available_events = await self.get_events() -- cgit v1.2.3 From 0e8ba3462050e79020ffce0bb6e0bbaf788ecc07 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 27 Mar 2021 14:17:21 +0100 Subject: Branding: revise log messages & levels Logs are now proper sentences ended with full stops. Exceptions are logged with full tracebacks, and log level are revised to be more sensible and consistent across the extension. --- bot/exts/backend/branding/_cog.py | 78 ++++++++++++++++---------------- bot/exts/backend/branding/_repository.py | 21 +++++---- 2 files changed, 50 insertions(+), 49 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index c7d326da3..38ec279cd 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -131,12 +131,12 @@ class Branding(commands.Cog): Return a boolean indicating whether the application was successful. """ - log.info(f"Applying {asset_type.value} asset to the guild") + log.info(f"Applying '{asset_type.value}' asset to the guild.") try: file = await self.repository.fetch_file(download_url) - except Exception as fetch_exc: - log.error(f"Failed to fetch '{asset_type.value}' asset: {fetch_exc}") + except Exception: + log.exception(f"Failed to fetch '{asset_type.value}' asset.") return False await self.bot.wait_until_guild_available() @@ -146,14 +146,14 @@ class Branding(commands.Cog): try: with async_timeout.timeout(timeout): await pydis.edit(**{asset_type.value: file}) - except discord.HTTPException as http_exc: - log.error(f"Asset upload to Discord failed: {http_exc}") + except discord.HTTPException: + log.exception("Asset upload to Discord failed.") return False except asyncio.TimeoutError: - log.error(f"Asset upload to Discord timed out after {timeout} seconds!") + log.error(f"Asset upload to Discord timed out after {timeout} seconds.") return False else: - log.debug("Asset uploaded successfully!") + log.trace("Asset uploaded successfully.") return True async def apply_banner(self, banner: RemoteObject) -> bool: @@ -184,23 +184,23 @@ class Branding(commands.Cog): Return a boolean indicating whether a new icon was applied successfully. """ - log.debug("Rotating icons") + log.debug("Rotating icons.") state = await self.cache_icons.to_dict() - log.trace(f"Total icons in rotation: {len(state)}") + log.trace(f"Total icons in rotation: {len(state)}.") if not state: # This would only happen if rotation not initiated, but we can handle gracefully. - log.warning("Attempted icon rotation with an empty icon cache!") + log.warning("Attempted icon rotation with an empty icon cache. This indicates wrong logic.") return False if len(state) == 1 and 1 in state.values(): - log.debug("Aborting icon rotation: only 1 icon is available and has already been applied") + log.debug("Aborting icon rotation: only 1 icon is available and has already been applied.") return False current_iteration = min(state.values()) # Choose iteration to draw from. options = [download_url for download_url, times_used in state.items() if times_used == current_iteration] - log.trace(f"Choosing from {len(options)} icons in iteration {current_iteration}") + log.trace(f"Choosing from {len(options)} icons in iteration {current_iteration}.") next_icon = random.choice(options) success = await self.apply_asset(AssetType.ICON, next_icon) @@ -221,7 +221,7 @@ class Branding(commands.Cog): Because there is work to be done before the timestamp is read and written, the next read will likely commence slightly under 24 hours after the last write. """ - log.debug("Checking if icons should rotate") + log.debug("Checking whether it's time for icons to rotate.") last_rotation_timestamp = await self.cache_information.get("last_rotation_timestamp") @@ -232,7 +232,7 @@ class Branding(commands.Cog): last_rotation = datetime.fromtimestamp(last_rotation_timestamp) difference = (datetime.utcnow() - last_rotation) + timedelta(minutes=5) - log.trace(f"Icons last rotated at {last_rotation} (difference: {difference})") + log.trace(f"Icons last rotated at {last_rotation} (difference: {difference}).") if difference.days >= BrandingConfig.cycle_frequency: await self.rotate_icons() @@ -247,14 +247,14 @@ class Branding(commands.Cog): This function does not upload a new icon! """ - log.debug("Initiating new icon rotation") + log.debug("Initiating new icon rotation.") await self.cache_icons.clear() new_state = {icon.download_url: 0 for icon in available_icons} await self.cache_icons.update(new_state) - log.trace(f"Icon rotation initiated for {len(new_state)} icons") + log.trace(f"Icon rotation initiated for {len(new_state)} icons.") await self.cache_information.set("icons_hash", compound_hash(available_icons)) @@ -268,7 +268,7 @@ class Branding(commands.Cog): To support either case, we read information about the current event from `cache_information`. The caller is therefore responsible for making sure that the cache is up-to-date before calling this function. """ - log.debug(f"Sending event information event to channel id: {channel_id}") + log.debug(f"Sending event information event to channel id: {channel_id}.") await self.bot.wait_until_guild_available() channel: t.Optional[discord.TextChannel] = self.bot.get_channel(channel_id) @@ -277,7 +277,7 @@ class Branding(commands.Cog): log.warning(f"Cannot send event information: channel {channel_id} not found!") return - log.debug(f"Destination channel: #{channel.name}") + log.trace(f"Destination channel: #{channel.name}.") description = await self.cache_information.get("event_description") duration = await self.cache_information.get("event_duration") @@ -304,7 +304,7 @@ class Branding(commands.Cog): Return a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ - log.debug(f"Entering event: {event.path}") + log.info(f"Entering event: '{event.path}'.") banner_success = await self.apply_banner(event.banner) # Only one asset ~ apply directly. @@ -324,7 +324,7 @@ class Branding(commands.Cog): if event_changed: await self.send_info_embed(Channels.change_log) else: - log.trace("Omitted #changelog notification as event has not changed (indicating manual re-sync)") + log.trace("Omitted #changelog notification as event has not changed. Assuming manual re-sync.") return banner_success, icon_success @@ -338,14 +338,14 @@ class Branding(commands.Cog): Return a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ - log.debug("Synchronise: fetching current event") + log.debug("Synchronise: fetching current event.") current_event, available_events = await self.repository.get_current_event() await self.populate_cache_events(available_events) if current_event is None: - log.error("Failed to fetch event ~ cannot synchronise!") + log.error("Failed to fetch event. Cannot synchronise!") return False, False return await self.enter_event(current_event) @@ -359,14 +359,14 @@ class Branding(commands.Cog): The cache does not store the fallback event, as it is not shown in the calendar. """ - log.debug("Populating events cache") + log.debug("Populating events cache.") await self.cache_events.clear() no_fallback = [event for event in events if not event.meta.is_fallback] chronological_events = sorted(no_fallback, key=attrgetter("meta.start_date")) - log.trace(f"Writing {len(chronological_events)} events (fallback omitted)") + log.trace(f"Writing {len(chronological_events)} events (fallback omitted).") with contextlib.suppress(ValueError): # Cache raises when updated with an empty dict. await self.cache_events.update({ @@ -383,7 +383,7 @@ class Branding(commands.Cog): The duration is stored formatted for the frontend. It is not intended to be used programmatically. """ - log.trace("Caching event description & duration") + log.debug("Caching event description & duration.") await self.cache_information.set("event_description", event.meta.description) await self.cache_information.set("event_duration", extract_event_duration(event)) @@ -397,7 +397,7 @@ class Branding(commands.Cog): The daemon will only start if it has been explicitly enabled via a command. """ - log.debug("Checking whether daemon is enabled") + log.debug("Checking whether daemon should start.") should_begin: t.Optional[bool] = await self.cache_information.get("daemon_active") # None if never set! @@ -410,7 +410,7 @@ class Branding(commands.Cog): This is **not** done automatically! The daemon otherwise remains active in the background. """ - log.debug("Cog unload: cancelling daemon") + log.debug("Cog unload: cancelling daemon.") self.daemon_loop.cancel() @@ -425,14 +425,14 @@ class Branding(commands.Cog): we check the banner & icons hashes against the currently cached values. If there is a mismatch, each specific asset is re-applied. """ - log.trace("Daemon main: checking current event") + log.info("Daemon main: checking current event.") new_event, available_events = await self.repository.get_current_event() await self.populate_cache_events(available_events) if new_event is None: - log.warning("Daemon main: failed to get current event from branding repository, will do nothing") + log.warning("Daemon main: failed to get current event from branding repository, will do nothing.") return if new_event.path != await self.cache_information.get("event_path"): @@ -442,14 +442,14 @@ class Branding(commands.Cog): await self.populate_cache_event_description(new_event) # Cache fresh frontend info in case of change. - log.trace("Daemon main: event has not changed, checking for change in assets") + log.trace("Daemon main: event has not changed, checking for change in assets.") if new_event.banner.sha != await self.cache_information.get("banner_hash"): - log.debug("Daemon main: detected banner change!") + log.debug("Daemon main: detected banner change.") await self.apply_banner(new_event.banner) if compound_hash(new_event.icons) != await self.cache_information.get("icons_hash"): - log.debug("Daemon main: detected icon change!") + log.debug("Daemon main: detected icon change.") await self.initiate_icon_rotation(new_event.icons) await self.rotate_icons() else: @@ -463,7 +463,7 @@ class Branding(commands.Cog): The scheduler maintains an exact 24-hour frequency even if this coroutine takes time to complete. If the coroutine is started at 00:01 and completes at 00:05, it will still be started at 00:01 the next day. """ - log.trace("Daemon loop: calling daemon main") + log.trace("Daemon loop: calling daemon main.") try: await self.daemon_main() @@ -478,11 +478,11 @@ class Branding(commands.Cog): The first iteration is invoked directly such that synchronisation happens immediately after daemon start. We then calculate the time until the next-up midnight and sleep before letting `daemon_loop` begin. """ - log.trace("Daemon before: performing start-up iteration") + log.trace("Daemon before: performing start-up iteration.") await self.daemon_loop() - log.trace("Daemon before: calculating time to sleep before loop begins") + log.trace("Daemon before: calculating time to sleep before loop begins.") now = datetime.utcnow() # The actual midnight moment is offset into the future in order to prevent issues with imprecise sleep. @@ -490,7 +490,7 @@ class Branding(commands.Cog): midnight = datetime.combine(tomorrow, time(minute=1)) sleep_secs = (midnight - now).total_seconds() - log.trace(f"Daemon before: sleeping {sleep_secs} seconds before next-up midnight: {midnight}") + log.trace(f"Daemon before: sleeping {sleep_secs} seconds before next-up midnight: {midnight}.") await asyncio.sleep(sleep_secs) @@ -557,7 +557,7 @@ class Branding(commands.Cog): return available_events = await self.cache_events.to_dict() - log.debug(f"Found {len(available_events)} cached events available for calendar view") + log.trace(f"Found {len(available_events)} cached events available for calendar view.") if not available_events: resp = make_embed("No events found!", "Cache may be empty, try `branding calendar refresh`.", success=False) @@ -570,7 +570,7 @@ class Branding(commands.Cog): first_25 = list(available_events.items())[:25] if len(first_25) != len(available_events): # Alert core devs that a paginating solution is now necessary. - log.warning(f"There are {len(available_events)} events, but the calendar view can only display 25!") + log.warning(f"There are {len(available_events)} events, but the calendar view can only display 25.") for name, duration in first_25: embed.add_field(name=name[:256], value=duration[:1024]) @@ -588,7 +588,7 @@ class Branding(commands.Cog): Supplementary subcommand allowing force-refreshing the event cache. Implemented as a subcommand because unlike the supergroup, it requires moderator privileges. """ - log.debug("Performing command-requested event cache refresh") + log.info("Performing command-requested event cache refresh.") async with ctx.typing(): available_events = await self.repository.get_events() diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 694e79b51..3a9745ed5 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -102,7 +102,7 @@ class BrandingRepository: Passing custom `types` allows getting only files or directories. By default, both are included. """ full_url = f"{BRANDING_URL}/{path}" - log.debug(f"Fetching directory from branding repository: {full_url}") + log.debug(f"Fetching directory from branding repository: '{full_url}'.") async with self.bot.http_session.get(full_url, params=PARAMS, headers=HEADERS) as response: if response.status != 200: @@ -117,7 +117,7 @@ class BrandingRepository: Raise an exception if the request does not succeed. """ - log.debug(f"Fetching file from branding repository: {download_url}") + log.debug(f"Fetching file from branding repository: '{download_url}'.") async with self.bot.http_session.get(download_url, params=PARAMS, headers=HEADERS) as response: if response.status != 200: @@ -181,26 +181,25 @@ class BrandingRepository: Misconfigured events are skipped. May return an empty list in the catastrophic case. """ - log.debug("Discovering events in branding repository") + log.debug("Discovering events in branding repository.") try: event_directories = await self.fetch_directory("events", types=("dir",)) # Skip files. - except Exception as fetch_exc: - log.error(f"Failed to fetch 'events' directory: {fetch_exc}") + except Exception: + log.exception("Failed to fetch 'events' directory.") return [] instances: t.List[Event] = [] for event_directory in event_directories.values(): - log.trace(f"Attempting to construct event from directory: {event_directory.path}") + log.trace(f"Attempting to construct event from directory: '{event_directory.path}'.") try: instance = await self.construct_event(event_directory) except Exception as exc: - log.warning(f"Could not construct event '{event_directory.path}': {exc}") + log.warning(f"Could not construct event '{event_directory.path}'.", exc_info=exc) else: instances.append(instance) - log.trace(f"Found {len(instances)} correctly configured events") return instances async def get_current_event(self) -> t.Tuple[t.Optional[Event], t.List[Event]]: @@ -213,19 +212,21 @@ class BrandingRepository: The current event may be None in the case that no event is active, and no fallback event is found. """ utc_now = datetime.utcnow() - log.debug(f"Finding active event for: {utc_now}") + log.debug(f"Finding active event for: {utc_now}.") # Construct an object in the arbitrary year for the purpose of comparison. lookup_now = date(year=ARBITRARY_YEAR, month=utc_now.month, day=utc_now.day) + log.trace(f"Lookup object in arbitrary year: {lookup_now}.") available_events = await self.get_events() + log.trace(f"Found {len(available_events)} available events.") for event in available_events: meta = event.meta if not meta.is_fallback and (meta.start_date <= lookup_now <= meta.end_date): return event, available_events - log.debug("No active event found, looking for fallback") + log.trace("No active event found. Looking for fallback event.") for event in available_events: if event.meta.is_fallback: -- cgit v1.2.3 From f9f9ced5320a593bb59836086d0d5983b4df58df Mon Sep 17 00:00:00 2001 From: Den4200 Date: Sat, 27 Mar 2021 13:17:12 -0400 Subject: Restrict redirects for paste uploads with an extension that is not `.py`. --- bot/utils/services.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bot/utils/services.py b/bot/utils/services.py index 5949c9e48..68cbd896e 100644 --- a/bot/utils/services.py +++ b/bot/utils/services.py @@ -47,7 +47,13 @@ async def send_to_paste_service(contents: str, *, extension: str = "") -> Option continue elif "key" in response_json: log.info(f"Successfully uploaded contents to paste service behind key {response_json['key']}.") - return URLs.paste_service.format(key=response_json['key']) + extension + + paste_link = URLs.paste_service.format(key=response_json['key']) + extension + + if extension == '.py': + return paste_link + return paste_link + "?noredirect" + log.warning( f"Got unexpected JSON response from paste service: {response_json}\n" f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." -- cgit v1.2.3 From 2abbcc8a63d0378392eba52593a65249d9204e9e Mon Sep 17 00:00:00 2001 From: Den4200 Date: Sat, 27 Mar 2021 13:26:25 -0400 Subject: Remove the old DMRelay cog. The moderation team has decided that this may come in conflict with Discord's ToS, and it does not serve too much of a purpose anymore. It was fun while it lasted! --- bot/exts/moderation/dm_relay.py | 132 ---------------------------------------- 1 file changed, 132 deletions(-) delete mode 100644 bot/exts/moderation/dm_relay.py diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py deleted file mode 100644 index 6d081741c..000000000 --- a/bot/exts/moderation/dm_relay.py +++ /dev/null @@ -1,132 +0,0 @@ -import logging -from typing import Optional - -import discord -from async_rediscache import RedisCache -from discord import Color -from discord.ext import commands -from discord.ext.commands import Cog - -from bot import constants -from bot.bot import Bot -from bot.converters import UserMentionOrID -from bot.utils.checks import in_whitelist_check -from bot.utils.messages import send_attachments -from bot.utils.webhooks import send_webhook - -log = logging.getLogger(__name__) - - -class DMRelay(Cog): - """Relay direct messages to and from the bot.""" - - # RedisCache[str, t.Union[discord.User.id, discord.Member.id]] - dm_cache = RedisCache() - - def __init__(self, bot: Bot): - self.bot = bot - self.webhook_id = constants.Webhooks.dm_log - self.webhook = None - self.bot.loop.create_task(self.fetch_webhook()) - - @commands.command(aliases=("reply",)) - async def send_dm(self, ctx: commands.Context, member: Optional[UserMentionOrID], *, message: str) -> None: - """ - Allows you to send a DM to a user from the bot. - - If `member` is not provided, it will send to the last user who DM'd the bot. - - This feature should be used extremely sparingly. Use ModMail if you need to have a serious - conversation with a user. This is just for responding to extraordinary DMs, having a little - fun with users, and telling people they are DMing the wrong bot. - - NOTE: This feature will be removed if it is overused. - """ - if not member: - user_id = await self.dm_cache.get("last_user") - member = ctx.guild.get_member(user_id) if user_id else None - - # If we still don't have a Member at this point, give up - if not member: - log.debug("This bot has never gotten a DM, or the RedisCache has been cleared.") - await ctx.message.add_reaction("❌") - return - - if member.id == self.bot.user.id: - log.debug("Not sending message to bot user") - return await ctx.send("🚫 I can't send messages to myself!") - - try: - await member.send(message) - except discord.errors.Forbidden: - log.debug("User has disabled DMs.") - await ctx.message.add_reaction("❌") - else: - await ctx.message.add_reaction("✅") - self.bot.stats.incr("dm_relay.dm_sent") - - async def fetch_webhook(self) -> None: - """Fetches the webhook object, so we can post to it.""" - await self.bot.wait_until_guild_available() - - try: - self.webhook = await self.bot.fetch_webhook(self.webhook_id) - except discord.HTTPException: - log.exception(f"Failed to fetch webhook with id `{self.webhook_id}`") - - @Cog.listener() - async def on_message(self, message: discord.Message) -> None: - """Relays the message's content and attachments to the dm_log channel.""" - # Only relay DMs from humans - if message.author.bot or message.guild or self.webhook is None: - return - - if message.clean_content: - await send_webhook( - webhook=self.webhook, - content=message.clean_content, - username=f"{message.author.display_name} ({message.author.id})", - avatar_url=message.author.avatar_url - ) - await self.dm_cache.set("last_user", message.author.id) - self.bot.stats.incr("dm_relay.dm_received") - - # Handle any attachments - if message.attachments: - try: - await send_attachments( - message, - self.webhook, - username=f"{message.author.display_name} ({message.author.id})" - ) - except (discord.errors.Forbidden, discord.errors.NotFound): - e = discord.Embed( - description=":x: **This message contained an attachment, but it could not be retrieved**", - color=Color.red() - ) - await send_webhook( - webhook=self.webhook, - embed=e, - username=f"{message.author.display_name} ({message.author.id})", - avatar_url=message.author.avatar_url - ) - except discord.HTTPException: - log.exception("Failed to send an attachment to the webhook") - - async def cog_check(self, ctx: commands.Context) -> bool: - """Only allow moderators to invoke the commands in this cog.""" - checks = [ - await commands.has_any_role(*constants.MODERATION_ROLES).predicate(ctx), - in_whitelist_check( - ctx, - channels=[constants.Channels.dm_log], - redirect=None, - fail_silently=True, - ) - ] - return all(checks) - - -def setup(bot: Bot) -> None: - """Load the DMRelay cog.""" - bot.add_cog(DMRelay(bot)) -- cgit v1.2.3 From 2759409123d458a4a0a274b835bebb3cc728b83a Mon Sep 17 00:00:00 2001 From: Den4200 Date: Sat, 27 Mar 2021 13:49:22 -0400 Subject: Fix tests for paste uploads. Accounts for no redirects on extensions that are not `.py`. --- tests/bot/utils/test_services.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/bot/utils/test_services.py b/tests/bot/utils/test_services.py index 1b48f6560..3b71022db 100644 --- a/tests/bot/utils/test_services.py +++ b/tests/bot/utils/test_services.py @@ -30,9 +30,9 @@ class PasteTests(unittest.IsolatedAsyncioTestCase): """Url with specified extension is returned on successful requests.""" key = "paste_key" test_cases = ( - (f"https://paste_service.com/{key}.txt", "txt"), + (f"https://paste_service.com/{key}.txt?noredirect", "txt"), (f"https://paste_service.com/{key}.py", "py"), - (f"https://paste_service.com/{key}", ""), + (f"https://paste_service.com/{key}?noredirect", ""), ) response = MagicMock( json=AsyncMock(return_value={"key": key}) -- cgit v1.2.3 From e85988e8d63af2a30835a72ec363895b60f22260 Mon Sep 17 00:00:00 2001 From: Den4200 Date: Sat, 27 Mar 2021 13:51:12 -0400 Subject: Create the new DMRelay cog. Includes the `!dmrelay` command, allowing moderators to relay direct messages between the bot and other users. --- bot/exts/moderation/dm_relay.py | 59 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 bot/exts/moderation/dm_relay.py diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py new file mode 100644 index 000000000..2bf2391a4 --- /dev/null +++ b/bot/exts/moderation/dm_relay.py @@ -0,0 +1,59 @@ +import logging +import textwrap + +import discord +from discord.ext.commands import Cog, Context, command + +from bot.bot import Bot +from bot.constants import Emojis +from bot.utils.services import send_to_paste_service + +log = logging.getLogger(__name__) + + +class DMRelay(Cog): + """Relay direct messages from the bot.""" + + def __init__(self, bot: Bot): + self.bot = bot + + @command(aliases=("relay", "dr")) + async def dmrelay(self, ctx: Context, user: discord.User, limit: int = 100) -> None: + """Relays the direct message history between the bot and given user.""" + log.trace(f"Relaying DMs with {user.name} ({user.id})") + + if not user.dm_channel: + await ctx.send(f"{Emojis.cross_mark} No direct message history with {user.mention}.") + return + + output = textwrap.dedent(f"""\ + User: {user} ({user.id}) + Channel ID: {user.dm_channel.id}\n + """) + + async for msg in user.history(limit=limit, oldest_first=True): + created_at = msg.created_at.strftime(r"%Y-%m-%d %H:%M") + + # Metadata (author, created_at, id) + output += f"{msg.author} [{created_at}] ({msg.id}): " + + # Content + if msg.content: + output += msg.content + "\n" + + # Embeds + if (embeds := len(msg.embeds)) > 0: + output += f"<{embeds} embed{'s' if embeds > 1 else ''}>\n" + + # Attachments + attachments = "\n".join(a.url for a in msg.attachments) + if attachments: + output += attachments + "\n" + + paste_link = await send_to_paste_service(output, extension="txt") + await ctx.send(paste_link) + + +def setup(bot: Bot) -> None: + """Load the DMRelay cog.""" + bot.add_cog(DMRelay(bot)) -- cgit v1.2.3 From 4be90b3c454138e3548c7394fcb2a1182b05b7d7 Mon Sep 17 00:00:00 2001 From: Den4200 Date: Sat, 27 Mar 2021 13:52:43 -0400 Subject: Restrict DMRelay cog to moderators only. --- bot/exts/moderation/dm_relay.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py index 2bf2391a4..1d57862d9 100644 --- a/bot/exts/moderation/dm_relay.py +++ b/bot/exts/moderation/dm_relay.py @@ -2,10 +2,10 @@ import logging import textwrap import discord -from discord.ext.commands import Cog, Context, command +from discord.ext.commands import Cog, Context, command, has_any_role from bot.bot import Bot -from bot.constants import Emojis +from bot.constants import Emojis, MODERATION_ROLES from bot.utils.services import send_to_paste_service log = logging.getLogger(__name__) @@ -53,6 +53,10 @@ class DMRelay(Cog): paste_link = await send_to_paste_service(output, extension="txt") await ctx.send(paste_link) + async def cog_check(self, ctx: Context) -> bool: + """Only allow moderators to invoke the commands in this cog.""" + return await has_any_role(*MODERATION_ROLES).predicate(ctx) + def setup(bot: Bot) -> None: """Load the DMRelay cog.""" -- cgit v1.2.3 From 721068e77cb0888feba465d0ba39a58aab12a7bf Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 27 Mar 2021 19:40:08 +0100 Subject: Branding: omit notification when entering evergreen The fallback event should not produce a notification. --- bot/exts/backend/branding/_cog.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 38ec279cd..e12706c32 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -300,7 +300,7 @@ class Branding(commands.Cog): An event change should always be handled via this function, as it ensures that the cache is populated. - The #changelog notification is sent only if `event` differs from the currently cached event. + The #changelog notification is omitted when `event` is fallback, or already applied. Return a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ @@ -321,10 +321,10 @@ class Branding(commands.Cog): await self.populate_cache_event_description(event) # Notify guild of new event ~ this reads the information that we cached above. - if event_changed: + if event_changed and not event.meta.is_fallback: await self.send_info_embed(Channels.change_log) else: - log.trace("Omitted #changelog notification as event has not changed. Assuming manual re-sync.") + log.trace("Omitting #changelog notification. Event has not changed, or new event is fallback.") return banner_success, icon_success -- cgit v1.2.3 From 1e8d5977084085f37395f10e320931305c3b7cca Mon Sep 17 00:00:00 2001 From: kwzrd Date: Sat, 27 Mar 2021 19:46:49 +0100 Subject: Branding: add contextual message to #changelog notifications It would be strange to just send the embed with no explanation of what it means. --- bot/exts/backend/branding/_cog.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index e12706c32..b07edbffd 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -258,17 +258,16 @@ class Branding(commands.Cog): await self.cache_information.set("icons_hash", compound_hash(available_icons)) - async def send_info_embed(self, channel_id: int) -> None: + async def send_info_embed(self, channel_id: int, *, is_notification: bool) -> None: """ Send the currently cached event description to `channel_id`. - This function is called when entering a new event with the destination being #changelog. However, it can - also be invoked on-demand by users. + When `is_notification` holds, a short contextual message for the #changelog channel is added. - To support either case, we read information about the current event from `cache_information`. The caller - is therefore responsible for making sure that the cache is up-to-date before calling this function. + We read event information from `cache_information`. The caller is therefore responsible for making + sure that the cache is up-to-date before calling this function. """ - log.debug(f"Sending event information event to channel id: {channel_id}.") + log.debug(f"Sending event information event to channel: {channel_id} ({is_notification=}).") await self.bot.wait_until_guild_available() channel: t.Optional[discord.TextChannel] = self.bot.get_channel(channel_id) @@ -283,12 +282,15 @@ class Branding(commands.Cog): duration = await self.cache_information.get("event_duration") if None in (description, duration): + content = None embed = make_embed("No event in cache", "Is the daemon enabled?", success=False) + else: + content = "Python Discord is entering a new event!" if is_notification else None embed = discord.Embed(description=description[:2048], colour=discord.Colour.blurple()) embed.set_footer(text=duration[:2048]) - await channel.send(embed=embed) + await channel.send(content=content, embed=embed) async def enter_event(self, event: Event) -> t.Tuple[bool, bool]: """ @@ -322,7 +324,7 @@ class Branding(commands.Cog): # Notify guild of new event ~ this reads the information that we cached above. if event_changed and not event.meta.is_fallback: - await self.send_info_embed(Channels.change_log) + await self.send_info_embed(Channels.change_log, is_notification=True) else: log.trace("Omitting #changelog notification. Event has not changed, or new event is fallback.") @@ -506,7 +508,7 @@ class Branding(commands.Cog): @branding_group.command(name="about", aliases=("current", "event")) async def branding_about_cmd(self, ctx: commands.Context) -> None: """Show the current event's description and duration.""" - await self.send_info_embed(ctx.channel.id) + await self.send_info_embed(ctx.channel.id, is_notification=False) @commands.has_any_role(*MODERATION_ROLES) @branding_group.command(name="sync") -- cgit v1.2.3 From 459ce9220ab8659d545e32f4ef1532da50789ca7 Mon Sep 17 00:00:00 2001 From: Dennis Pham Date: Sun, 28 Mar 2021 11:52:41 -0400 Subject: Added a newline to space out some code. Co-authored-by: Joe Banks <20439493+jb3@users.noreply.github.com> --- bot/utils/services.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/utils/services.py b/bot/utils/services.py index 68cbd896e..db9c93d0f 100644 --- a/bot/utils/services.py +++ b/bot/utils/services.py @@ -52,6 +52,7 @@ async def send_to_paste_service(contents: str, *, extension: str = "") -> Option if extension == '.py': return paste_link + return paste_link + "?noredirect" log.warning( -- cgit v1.2.3 From 8cc27e52735d03273267012c0344acc54c602ea9 Mon Sep 17 00:00:00 2001 From: Dennis Pham Date: Sun, 28 Mar 2021 11:53:41 -0400 Subject: Improve `DMRelay` cog description. Co-authored-by: Joe Banks <20439493+jb3@users.noreply.github.com> --- bot/exts/moderation/dm_relay.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py index 1d57862d9..2b897c0ed 100644 --- a/bot/exts/moderation/dm_relay.py +++ b/bot/exts/moderation/dm_relay.py @@ -12,7 +12,7 @@ log = logging.getLogger(__name__) class DMRelay(Cog): - """Relay direct messages from the bot.""" + """Inspect messages sent to the bot.""" def __init__(self, bot: Bot): self.bot = bot -- cgit v1.2.3 From 14514d1ed15e9155ef8700473cd0953126421177 Mon Sep 17 00:00:00 2001 From: Den4200 Date: Sun, 28 Mar 2021 11:58:01 -0400 Subject: Account for requesting the bot's DMs with itself. --- bot/exts/moderation/dm_relay.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py index 1d57862d9..ed1c45292 100644 --- a/bot/exts/moderation/dm_relay.py +++ b/bot/exts/moderation/dm_relay.py @@ -22,7 +22,7 @@ class DMRelay(Cog): """Relays the direct message history between the bot and given user.""" log.trace(f"Relaying DMs with {user.name} ({user.id})") - if not user.dm_channel: + if self.bot.user == user or not user.dm_channel: await ctx.send(f"{Emojis.cross_mark} No direct message history with {user.mention}.") return -- cgit v1.2.3 From d36e179912242ea6c21a1d5e1a4485034a1b5343 Mon Sep 17 00:00:00 2001 From: Den4200 Date: Sun, 28 Mar 2021 12:12:41 -0400 Subject: Force cache to update for user history. Before, the user would have to send a DM to the bot after startup for the bot to realize there is DM history with that specific user. Now, we force a cache refresh when a moderator invokes `!dmrelay`, so this shouldn't be an issue anymore. --- bot/exts/moderation/dm_relay.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py index ed1c45292..17316ff48 100644 --- a/bot/exts/moderation/dm_relay.py +++ b/bot/exts/moderation/dm_relay.py @@ -22,7 +22,14 @@ class DMRelay(Cog): """Relays the direct message history between the bot and given user.""" log.trace(f"Relaying DMs with {user.name} ({user.id})") - if self.bot.user == user or not user.dm_channel: + if self.bot.user == user: + await ctx.send(f"{Emojis.cross_mark} No direct message history with myself.") + return + + # Force cache to update + await user.history(limit=1).flatten() + + if not user.dm_channel: await ctx.send(f"{Emojis.cross_mark} No direct message history with {user.mention}.") return -- cgit v1.2.3 From 39d71b578b5f1cfaae2acd01743f8b7522e2c490 Mon Sep 17 00:00:00 2001 From: Den4200 Date: Sun, 28 Mar 2021 13:19:40 -0400 Subject: Reduce API calls in `!dmrelay`. --- bot/exts/moderation/dm_relay.py | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py index cc63a80fe..a03230b3d 100644 --- a/bot/exts/moderation/dm_relay.py +++ b/bot/exts/moderation/dm_relay.py @@ -22,22 +22,11 @@ class DMRelay(Cog): """Relays the direct message history between the bot and given user.""" log.trace(f"Relaying DMs with {user.name} ({user.id})") - if self.bot.user == user: - await ctx.send(f"{Emojis.cross_mark} No direct message history with myself.") + if user.bot: + await ctx.send(f"{Emojis.cross_mark} No direct message history with bots.") return - # Force cache to update - await user.history(limit=1).flatten() - - if not user.dm_channel: - await ctx.send(f"{Emojis.cross_mark} No direct message history with {user.mention}.") - return - - output = textwrap.dedent(f"""\ - User: {user} ({user.id}) - Channel ID: {user.dm_channel.id}\n - """) - + output = "" async for msg in user.history(limit=limit, oldest_first=True): created_at = msg.created_at.strftime(r"%Y-%m-%d %H:%M") @@ -57,7 +46,16 @@ class DMRelay(Cog): if attachments: output += attachments + "\n" - paste_link = await send_to_paste_service(output, extension="txt") + if not output: + await ctx.send(f"{Emojis.cross_mark} No direct message history with {user.mention}.") + return + + metadata = textwrap.dedent(f"""\ + User: {user} ({user.id}) + Channel ID: {user.dm_channel.id}\n + """) + + paste_link = await send_to_paste_service(metadata + output, extension="txt") await ctx.send(paste_link) async def cog_check(self, ctx: Context) -> bool: -- cgit v1.2.3 From 00dd501b99d31b8ca4e6b1edb1638e64abe95ae3 Mon Sep 17 00:00:00 2001 From: ToxicKidz <78174417+ToxicKidz@users.noreply.github.com> Date: Sun, 28 Mar 2021 15:15:35 -0400 Subject: Filter codeblick escapes and allow no mentions for !raw command --- bot/exts/info/information.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index c54ca96bf..ce35df470 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -6,7 +6,7 @@ from collections import defaultdict from typing import Any, DefaultDict, Dict, Mapping, Optional, Tuple, Union import fuzzywuzzy -from discord import Colour, Embed, Guild, Message, Role +from discord import AllowedMentions, Colour, Embed, Guild, Message, Role from discord.ext.commands import BucketType, Cog, Context, Paginator, command, group, has_any_role from bot import constants @@ -447,9 +447,9 @@ class Information(Cog): def add_content(title: str, content: str) -> None: paginator.add_line(f'== {title} ==\n') - # replace backticks as it breaks out of code blocks. Spaces seemed to be the most reasonable solution. - # we hope it's not close to 2000 - paginator.add_line(content.replace('```', '`` `')) + # Replace backticks as it breaks out of code blocks. + # An invisble character seemed to be the most reasonable solution. We hope it's not close to 2000. + paginator.add_line(content.replace('`', '`\u200b')) paginator.close_page() if message.content: @@ -468,7 +468,7 @@ class Information(Cog): add_content(title, transformer(item)) for page in paginator.pages: - await ctx.send(page) + await ctx.send(page, allowed_mentions=AllowedMentions.none()) @raw.command() async def json(self, ctx: Context, message: Message) -> None: -- cgit v1.2.3 From 59dd861ca822f8dcef4c73732300e0f737b3bfa1 Mon Sep 17 00:00:00 2001 From: ToxicKidz <78174417+ToxicKidz@users.noreply.github.com> Date: Sun, 28 Mar 2021 18:12:20 -0400 Subject: Update bot/exts/info/information.py Co-authored-by: Joe Banks <20439493+jb3@users.noreply.github.com> --- bot/exts/info/information.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index ce35df470..0555544ce 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -448,7 +448,7 @@ class Information(Cog): def add_content(title: str, content: str) -> None: paginator.add_line(f'== {title} ==\n') # Replace backticks as it breaks out of code blocks. - # An invisble character seemed to be the most reasonable solution. We hope it's not close to 2000. + # An invisible character seemed to be the most reasonable solution. We hope it's not close to 2000. paginator.add_line(content.replace('`', '`\u200b')) paginator.close_page() -- cgit v1.2.3 From 450a205e6115bfa296427d4bf15b839433627878 Mon Sep 17 00:00:00 2001 From: Joe Banks <20439493+jb3@users.noreply.github.com> Date: Mon, 29 Mar 2021 00:04:51 +0100 Subject: Add myself to CODEOWNERS (#1489) --- .github/CODEOWNERS | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 634bb4bca..1df05e990 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,14 +4,14 @@ **/bot/exts/moderation/*silence.py @MarkKoz bot/exts/info/codeblock/** @MarkKoz bot/exts/utils/extensions.py @MarkKoz -bot/exts/utils/snekbox.py @MarkKoz @Akarys42 +bot/exts/utils/snekbox.py @MarkKoz @Akarys42 @jb3 bot/exts/help_channels/** @MarkKoz @Akarys42 -bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129 -bot/exts/info/** @Akarys42 @Den4200 -bot/exts/info/information.py @mbaruh -bot/exts/filters/** @mbaruh +bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129 @jb3 +bot/exts/info/** @Akarys42 @Den4200 @jb3 +bot/exts/info/information.py @mbaruh @jb3 +bot/exts/filters/** @mbaruh @jb3 bot/exts/fun/** @ks129 -bot/exts/utils/** @ks129 +bot/exts/utils/** @ks129 @jb3 bot/exts/recruitment/** @wookie184 # Rules @@ -30,9 +30,9 @@ tests/bot/exts/test_cogs.py @MarkKoz tests/** @Akarys42 # CI & Docker -.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ @Den4200 -Dockerfile @MarkKoz @Akarys42 @Den4200 -docker-compose.yml @MarkKoz @Akarys42 @Den4200 +.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ @Den4200 @jb3 +Dockerfile @MarkKoz @Akarys42 @Den4200 @jb3 +docker-compose.yml @MarkKoz @Akarys42 @Den4200 @jb3 # Tools Pipfile* @Akarys42 -- cgit v1.2.3 From 58fbc2ebad75f4f86c47d23128e0c8421e68446e Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 30 Mar 2021 15:10:00 +0000 Subject: Alphabetize configuration --- bot/constants.py | 4 ++-- bot/exts/utils/utils.py | 2 +- config-default.yml | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index 787e8b12e..7be0b9f6d 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -487,13 +487,13 @@ class Roles(metaclass=YAMLGetter): voice_verified: int admins: int - domain_leads: int - project_leads: int core_developers: int devops: int + domain_leads: int helpers: int moderators: int owners: int + project_leads: int jammers: int team_leaders: int diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index c45f73b88..cae7f2593 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -9,7 +9,7 @@ from discord.ext.commands import BadArgument, Cog, Context, clean_content, comma from discord.utils import snowflake_time from bot.bot import Bot -from bot.constants import Channels, MODERATION_ROLES, STAFF_ROLES, Roles +from bot.constants import Channels, MODERATION_ROLES, Roles, STAFF_ROLES from bot.converters import Snowflake from bot.decorators import in_whitelist from bot.pagination import LinePaginator diff --git a/config-default.yml b/config-default.yml index dde7e48e1..59e6f4333 100644 --- a/config-default.yml +++ b/config-default.yml @@ -263,13 +263,13 @@ guild: # Staff admins: &ADMINS_ROLE 267628507062992896 - domain_leads: 807415650778742785 - project_leads: 807415650778742785 core_developers: 587606783669829632 devops: 409416496733880320 + domain_leads: 807415650778742785 helpers: &HELPERS_ROLE 267630620367257601 moderators: &MODS_ROLE 267629731250176001 owners: &OWNERS_ROLE 267627879762755584 + project_leads: 815701647526330398 # Code Jam jammers: 737249140966162473 -- cgit v1.2.3 From fb89ef5b677200596d2a71a8f73c4f9a33608203 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Tue, 30 Mar 2021 20:14:29 +0200 Subject: Increase the total number of help channels to 42 We've seen an increase in help channel activity and we're running out of help channels frequently. That's why we're increasing the number of help channels from 38 to 42. Note that the old configuration said 32, but we had more channels in actual rotation due to a race condition we had in the past. The system will never delete channels that were already in rotation, meaning that those that were added over the limit in the past still existed. --- config-default.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config-default.yml b/config-default.yml index 502f0f861..4e7060ea4 100644 --- a/config-default.yml +++ b/config-default.yml @@ -481,7 +481,7 @@ help_channels: # Maximum number of channels across all 3 categories # Note Discord has a hard limit of 50 channels per category, so this shouldn't be > 50 - max_total_channels: 32 + max_total_channels: 42 # Prefix for help channel names name_prefix: 'help-' -- cgit v1.2.3 From 07e3b4573a43099351978543569b47dce3ba7b7c Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Tue, 30 Mar 2021 11:18:11 -0700 Subject: Update arrow to 1.0.3 It has some API changes, so it's best to update now before the project starts using the library more. --- Pipfile | 6 +- Pipfile.lock | 597 +++++++++++++++++++++++++++++++++-------------------------- 2 files changed, 336 insertions(+), 267 deletions(-) diff --git a/Pipfile b/Pipfile index 0a94fb888..99e480278 100644 --- a/Pipfile +++ b/Pipfile @@ -9,12 +9,14 @@ aiodns = "~=2.0" aiohttp = "~=3.7" aioping = "~=0.3.1" aioredis = "~=1.3.1" +arrow = "~=1.0.3" "async-rediscache[fakeredis]" = "~=0.1.2" beautifulsoup4 = "~=4.9" colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"} coloredlogs = "~=14.0" deepdiff = "~=4.0" "discord.py" = "~=1.6.0" +emoji = "~=0.6" feedparser = "~=5.2" fuzzywuzzy = "~=0.17" lxml = "~=4.4" @@ -26,11 +28,10 @@ requests = "~=2.22" sentry-sdk = "~=0.19" sphinx = "~=2.2" statsd = "~=3.3" -arrow = "~=0.17" -emoji = "~=0.6" [dev-packages] coverage = "~=5.0" +coveralls = "~=2.1" flake8 = "~=3.8" flake8-annotations = "~=2.0" flake8-bugbear = "~=20.1" @@ -41,7 +42,6 @@ flake8-tidy-imports = "~=4.0" flake8-todo = "~=0.7" pep8-naming = "~=0.9" pre-commit = "~=2.1" -coveralls = "~=2.1" [requires] python_version = "3.8" diff --git a/Pipfile.lock b/Pipfile.lock index f8cedb08f..d16cef2a8 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "228ae55fe5700ac3827ba6b661933b60b1d06f44fea8bcbe8c5a769fa10ab2fd" + "sha256": "e5b57ca7276af4709b345055d4b3705c4142c61c4669c796b79a73379ec37a9a" }, "pipfile-spec": 6, "requires": { @@ -18,11 +18,11 @@ "default": { "aio-pika": { "hashes": [ - "sha256:9773440a89840941ac3099a7720bf9d51e8764a484066b82ede4d395660ff430", - "sha256:a8065be3c722eb8f9fff8c0e7590729e7782202cdb9363d9830d7d5d47b45c7c" + "sha256:1d4305a5f78af3857310b4fe48348cdcf6c097e0e275ea88c2cd08570531a369", + "sha256:e69afef8695f47c5d107bbdba21bdb845d5c249acb3be53ef5c2d497b02657c0" ], "index": "pypi", - "version": "==6.7.1" + "version": "==6.8.0" }, "aiodns": { "hashes": [ @@ -34,46 +34,46 @@ }, "aiohttp": { "hashes": [ - "sha256:119feb2bd551e58d83d1b38bfa4cb921af8ddedec9fad7183132db334c3133e0", - "sha256:16d0683ef8a6d803207f02b899c928223eb219111bd52420ef3d7a8aa76227b6", - "sha256:2eb3efe243e0f4ecbb654b08444ae6ffab37ac0ef8f69d3a2ffb958905379daf", - "sha256:2ffea7904e70350da429568113ae422c88d2234ae776519549513c8f217f58a9", - "sha256:40bd1b101b71a18a528ffce812cc14ff77d4a2a1272dfb8b11b200967489ef3e", - "sha256:418597633b5cd9639e514b1d748f358832c08cd5d9ef0870026535bd5eaefdd0", - "sha256:481d4b96969fbfdcc3ff35eea5305d8565a8300410d3d269ccac69e7256b1329", - "sha256:4c1bdbfdd231a20eee3e56bd0ac1cd88c4ff41b64ab679ed65b75c9c74b6c5c2", - "sha256:5563ad7fde451b1986d42b9bb9140e2599ecf4f8e42241f6da0d3d624b776f40", - "sha256:58c62152c4c8731a3152e7e650b29ace18304d086cb5552d317a54ff2749d32a", - "sha256:5b50e0b9460100fe05d7472264d1975f21ac007b35dcd6fd50279b72925a27f4", - "sha256:5d84ecc73141d0a0d61ece0742bb7ff5751b0657dab8405f899d3ceb104cc7de", - "sha256:5dde6d24bacac480be03f4f864e9a67faac5032e28841b00533cd168ab39cad9", - "sha256:5e91e927003d1ed9283dee9abcb989334fc8e72cf89ebe94dc3e07e3ff0b11e9", - "sha256:62bc216eafac3204877241569209d9ba6226185aa6d561c19159f2e1cbb6abfb", - "sha256:6c8200abc9dc5f27203986100579fc19ccad7a832c07d2bc151ce4ff17190076", - "sha256:6ca56bdfaf825f4439e9e3673775e1032d8b6ea63b8953d3812c71bd6a8b81de", - "sha256:71680321a8a7176a58dfbc230789790639db78dad61a6e120b39f314f43f1907", - "sha256:7c7820099e8b3171e54e7eedc33e9450afe7cd08172632d32128bd527f8cb77d", - "sha256:7dbd087ff2f4046b9b37ba28ed73f15fd0bc9f4fdc8ef6781913da7f808d9536", - "sha256:822bd4fd21abaa7b28d65fc9871ecabaddc42767884a626317ef5b75c20e8a2d", - "sha256:8ec1a38074f68d66ccb467ed9a673a726bb397142c273f90d4ba954666e87d54", - "sha256:950b7ef08b2afdab2488ee2edaff92a03ca500a48f1e1aaa5900e73d6cf992bc", - "sha256:99c5a5bf7135607959441b7d720d96c8e5c46a1f96e9d6d4c9498be8d5f24212", - "sha256:b84ad94868e1e6a5e30d30ec419956042815dfaea1b1df1cef623e4564c374d9", - "sha256:bc3d14bf71a3fb94e5acf5bbf67331ab335467129af6416a437bd6024e4f743d", - "sha256:c2a80fd9a8d7e41b4e38ea9fe149deed0d6aaede255c497e66b8213274d6d61b", - "sha256:c44d3c82a933c6cbc21039326767e778eface44fca55c65719921c4b9661a3f7", - "sha256:cc31e906be1cc121ee201adbdf844522ea3349600dd0a40366611ca18cd40e81", - "sha256:d5d102e945ecca93bcd9801a7bb2fa703e37ad188a2f81b1e65e4abe4b51b00c", - "sha256:dd7936f2a6daa861143e376b3a1fb56e9b802f4980923594edd9ca5670974895", - "sha256:dee68ec462ff10c1d836c0ea2642116aba6151c6880b688e56b4c0246770f297", - "sha256:e76e78863a4eaec3aee5722d85d04dcbd9844bc6cd3bfa6aa880ff46ad16bfcb", - "sha256:eab51036cac2da8a50d7ff0ea30be47750547c9aa1aa2cf1a1b710a1827e7dbe", - "sha256:f4496d8d04da2e98cc9133e238ccebf6a13ef39a93da2e87146c8c8ac9768242", - "sha256:fbd3b5e18d34683decc00d9a360179ac1e7a320a5fee10ab8053ffd6deab76e0", - "sha256:feb24ff1226beeb056e247cf2e24bba5232519efb5645121c4aea5b6ad74c1f2" + "sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe", + "sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe", + "sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5", + "sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8", + "sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd", + "sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb", + "sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c", + "sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87", + "sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0", + "sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290", + "sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5", + "sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287", + "sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde", + "sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf", + "sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8", + "sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16", + "sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf", + "sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809", + "sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213", + "sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f", + "sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013", + "sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b", + "sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9", + "sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5", + "sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb", + "sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df", + "sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4", + "sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439", + "sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f", + "sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22", + "sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f", + "sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5", + "sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970", + "sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009", + "sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc", + "sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a", + "sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95" ], "index": "pypi", - "version": "==3.7.4" + "version": "==3.7.4.post0" }, "aioping": { "hashes": [ @@ -96,6 +96,7 @@ "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573", "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e" ], + "markers": "python_version >= '3.6'", "version": "==3.3.1" }, "alabaster": { @@ -107,11 +108,11 @@ }, "arrow": { "hashes": [ - "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5", - "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4" + "sha256:3515630f11a15c61dcb4cdd245883270dd334c83f3e639824e65a4b79cc48543", + "sha256:399c9c8ae732270e1aa58ead835a79a40d7be8aa109c579898eb41029b5a231d" ], "index": "pypi", - "version": "==0.17.0" + "version": "==1.0.3" }, "async-rediscache": { "extras": [ @@ -122,6 +123,7 @@ "sha256:c25e4fff73f64d20645254783c3224a4c49e083e3fab67c44f17af944c5e26af" ], "index": "pypi", + "markers": "python_version ~= '3.7'", "version": "==0.1.4" }, "async-timeout": { @@ -129,6 +131,7 @@ "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f", "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3" ], + "markers": "python_full_version >= '3.5.3'", "version": "==3.0.1" }, "attrs": { @@ -136,6 +139,7 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "babel": { @@ -143,6 +147,7 @@ "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.9.0" }, "beautifulsoup4": { @@ -205,17 +210,17 @@ }, "chardet": { "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" ], - "version": "==3.0.4" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==4.0.0" }, "colorama": { "hashes": [ "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], - "index": "pypi", "markers": "sys_platform == 'win32'", "version": "==0.4.4" }, @@ -248,6 +253,7 @@ "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, "emoji": { @@ -259,10 +265,10 @@ }, "fakeredis": { "hashes": [ - "sha256:01cb47d2286825a171fb49c0e445b1fa9307087e07cbb3d027ea10dbff108b6a", - "sha256:2c6041cf0225889bc403f3949838b2c53470a95a9e2d4272422937786f5f8f73" + "sha256:1ac0cef767c37f51718874a33afb5413e69d132988cb6a80c6e6dbeddf8c7623", + "sha256:e0416e4941cecd3089b0d901e60c8dc3c944f6384f5e29e2261c0d3c5fa99669" ], - "version": "==1.4.5" + "version": "==1.5.0" }, "feedparser": { "hashes": [ @@ -283,60 +289,57 @@ }, "hiredis": { "hashes": [ - "sha256:06a039208f83744a702279b894c8cf24c14fd63c59cd917dcde168b79eef0680", - "sha256:0a909bf501459062aa1552be1461456518f367379fdc9fdb1f2ca5e4a1fdd7c0", - "sha256:18402d9e54fb278cb9a8c638df6f1550aca36a009d47ecf5aa263a38600f35b0", - "sha256:1e4cbbc3858ec7e680006e5ca590d89a5e083235988f26a004acf7244389ac01", - "sha256:23344e3c2177baf6975fbfa361ed92eb7d36d08f454636e5054b3faa7c2aff8a", - "sha256:289b31885b4996ce04cadfd5fc03d034dce8e2a8234479f7c9e23b9e245db06b", - "sha256:2c1c570ae7bf1bab304f29427e2475fe1856814312c4a1cf1cd0ee133f07a3c6", - "sha256:2c227c0ed371771ffda256034427320870e8ea2e4fd0c0a618c766e7c49aad73", - "sha256:3bb9b63d319402cead8bbd9dd55dca3b667d2997e9a0d8a1f9b6cc274db4baee", - "sha256:3ef2183de67b59930d2db8b8e8d4d58e00a50fcc5e92f4f678f6eed7a1c72d55", - "sha256:43b8ed3dbfd9171e44c554cb4acf4ee4505caa84c5e341858b50ea27dd2b6e12", - "sha256:47bcf3c5e6c1e87ceb86cdda2ee983fa0fe56a999e6185099b3c93a223f2fa9b", - "sha256:5263db1e2e1e8ae30500cdd75a979ff99dcc184201e6b4b820d0de74834d2323", - "sha256:5b1451727f02e7acbdf6aae4e06d75f66ee82966ff9114550381c3271a90f56c", - "sha256:6996883a8a6ff9117cbb3d6f5b0dcbbae6fb9e31e1a3e4e2f95e0214d9a1c655", - "sha256:6c96f64a54f030366657a54bb90b3093afc9c16c8e0dfa29fc0d6dbe169103a5", - "sha256:7332d5c3e35154cd234fd79573736ddcf7a0ade7a986db35b6196b9171493e75", - "sha256:7885b6f32c4a898e825bb7f56f36a02781ac4a951c63e4169f0afcf9c8c30dfb", - "sha256:7b0f63f10a166583ab744a58baad04e0f52cfea1ac27bfa1b0c21a48d1003c23", - "sha256:819f95d4eba3f9e484dd115ab7ab72845cf766b84286a00d4ecf76d33f1edca1", - "sha256:8968eeaa4d37a38f8ca1f9dbe53526b69628edc9c42229a5b2f56d98bb828c1f", - "sha256:89ebf69cb19a33d625db72d2ac589d26e936b8f7628531269accf4a3196e7872", - "sha256:8daecd778c1da45b8bd54fd41ffcd471a86beed3d8e57a43acf7a8d63bba4058", - "sha256:955ba8ea73cf3ed8bd2f963b4cb9f8f0dcb27becd2f4b3dd536fd24c45533454", - "sha256:964f18a59f5a64c0170f684c417f4fe3e695a536612e13074c4dd5d1c6d7c882", - "sha256:969843fbdfbf56cdb71da6f0bdf50f9985b8b8aeb630102945306cf10a9c6af2", - "sha256:996021ef33e0f50b97ff2d6b5f422a0fe5577de21a8873b58a779a5ddd1c3132", - "sha256:9e9c9078a7ce07e6fce366bd818be89365a35d2e4b163268f0ca9ba7e13bb2f6", - "sha256:a04901757cb0fb0f5602ac11dda48f5510f94372144d06c2563ba56c480b467c", - "sha256:a7bf1492429f18d205f3a818da3ff1f242f60aa59006e53dee00b4ef592a3363", - "sha256:aa0af2deb166a5e26e0d554b824605e660039b161e37ed4f01b8d04beec184f3", - "sha256:abfb15a6a7822f0fae681785cb38860e7a2cb1616a708d53df557b3d76c5bfd4", - "sha256:b253fe4df2afea4dfa6b1fa8c5fef212aff8bcaaeb4207e81eed05cb5e4a7919", - "sha256:b27f082f47d23cffc4cf1388b84fdc45c4ef6015f906cd7e0d988d9e35d36349", - "sha256:b33aea449e7f46738811fbc6f0b3177c6777a572207412bbbf6f525ffed001ae", - "sha256:b44f9421c4505c548435244d74037618f452844c5d3c67719d8a55e2613549da", - "sha256:bcc371151d1512201d0214c36c0c150b1dc64f19c2b1a8c9cb1d7c7c15ebd93f", - "sha256:c2851deeabd96d3f6283e9c6b26e0bfed4de2dc6fb15edf913e78b79fc5909ed", - "sha256:cdfd501c7ac5b198c15df800a3a34c38345f5182e5f80770caf362bccca65628", - "sha256:d2c0caffa47606d6d7c8af94ba42547bd2a441f06c74fd90a1ffe328524a6c64", - "sha256:dcb2db95e629962db5a355047fb8aefb012df6c8ae608930d391619dbd96fd86", - "sha256:e0eeb9c112fec2031927a1745788a181d0eecbacbed941fc5c4f7bc3f7b273bf", - "sha256:e154891263306200260d7f3051982774d7b9ef35af3509d5adbbe539afd2610c", - "sha256:e2e023a42dcbab8ed31f97c2bcdb980b7fbe0ada34037d87ba9d799664b58ded", - "sha256:e64be68255234bb489a574c4f2f8df7029c98c81ec4d160d6cd836e7f0679390", - "sha256:e82d6b930e02e80e5109b678c663a9ed210680ded81c1abaf54635d88d1da298" - ], - "version": "==1.1.0" + "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e", + "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27", + "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163", + "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc", + "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26", + "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e", + "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579", + "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a", + "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048", + "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87", + "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63", + "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54", + "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05", + "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb", + "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea", + "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5", + "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e", + "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc", + "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99", + "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a", + "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581", + "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426", + "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db", + "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a", + "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a", + "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d", + "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443", + "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79", + "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d", + "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9", + "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d", + "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485", + "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5", + "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048", + "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0", + "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6", + "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41", + "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298", + "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce", + "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0", + "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a" + ], + "markers": "python_version >= '3.6'", + "version": "==2.0.0" }, "humanfriendly": { "hashes": [ "sha256:066562956639ab21ff2676d1fda0b5987e985c534fc76700a19bd54bcb81121d", "sha256:d5c731705114b9ad673754f3317d9fa4c23212f36b29bdc4272a892eafc9bc72" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==9.1" }, "idna": { @@ -344,6 +347,7 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "imagesize": { @@ -351,6 +355,7 @@ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, "jinja2": { @@ -358,50 +363,50 @@ "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.11.3" }, "lxml": { "hashes": [ - "sha256:0448576c148c129594d890265b1a83b9cd76fd1f0a6a04620753d9a6bcfd0a4d", - "sha256:127f76864468d6630e1b453d3ffbbd04b024c674f55cf0a30dc2595137892d37", - "sha256:1471cee35eba321827d7d53d104e7b8c593ea3ad376aa2df89533ce8e1b24a01", - "sha256:2363c35637d2d9d6f26f60a208819e7eafc4305ce39dc1d5005eccc4593331c2", - "sha256:2e5cc908fe43fe1aa299e58046ad66981131a66aea3129aac7770c37f590a644", - "sha256:2e6fd1b8acd005bd71e6c94f30c055594bbd0aa02ef51a22bbfa961ab63b2d75", - "sha256:366cb750140f221523fa062d641393092813b81e15d0e25d9f7c6025f910ee80", - "sha256:42ebca24ba2a21065fb546f3e6bd0c58c3fe9ac298f3a320147029a4850f51a2", - "sha256:4e751e77006da34643ab782e4a5cc21ea7b755551db202bc4d3a423b307db780", - "sha256:4fb85c447e288df535b17ebdebf0ec1cf3a3f1a8eba7e79169f4f37af43c6b98", - "sha256:50c348995b47b5a4e330362cf39fc503b4a43b14a91c34c83b955e1805c8e308", - "sha256:535332fe9d00c3cd455bd3dd7d4bacab86e2d564bdf7606079160fa6251caacf", - "sha256:535f067002b0fd1a4e5296a8f1bf88193080ff992a195e66964ef2a6cfec5388", - "sha256:5be4a2e212bb6aa045e37f7d48e3e1e4b6fd259882ed5a00786f82e8c37ce77d", - "sha256:60a20bfc3bd234d54d49c388950195d23a5583d4108e1a1d47c9eef8d8c042b3", - "sha256:648914abafe67f11be7d93c1a546068f8eff3c5fa938e1f94509e4a5d682b2d8", - "sha256:681d75e1a38a69f1e64ab82fe4b1ed3fd758717bed735fb9aeaa124143f051af", - "sha256:68a5d77e440df94011214b7db907ec8f19e439507a70c958f750c18d88f995d2", - "sha256:69a63f83e88138ab7642d8f61418cf3180a4d8cd13995df87725cb8b893e950e", - "sha256:6e4183800f16f3679076dfa8abf2db3083919d7e30764a069fb66b2b9eff9939", - "sha256:6fd8d5903c2e53f49e99359b063df27fdf7acb89a52b6a12494208bf61345a03", - "sha256:791394449e98243839fa822a637177dd42a95f4883ad3dec2a0ce6ac99fb0a9d", - "sha256:7a7669ff50f41225ca5d6ee0a1ec8413f3a0d8aa2b109f86d540887b7ec0d72a", - "sha256:7e9eac1e526386df7c70ef253b792a0a12dd86d833b1d329e038c7a235dfceb5", - "sha256:7ee8af0b9f7de635c61cdd5b8534b76c52cd03536f29f51151b377f76e214a1a", - "sha256:8246f30ca34dc712ab07e51dc34fea883c00b7ccb0e614651e49da2c49a30711", - "sha256:8c88b599e226994ad4db29d93bc149aa1aff3dc3a4355dd5757569ba78632bdf", - "sha256:923963e989ffbceaa210ac37afc9b906acebe945d2723e9679b643513837b089", - "sha256:94d55bd03d8671686e3f012577d9caa5421a07286dd351dfef64791cf7c6c505", - "sha256:97db258793d193c7b62d4e2586c6ed98d51086e93f9a3af2b2034af01450a74b", - "sha256:a9d6bc8642e2c67db33f1247a77c53476f3a166e09067c0474facb045756087f", - "sha256:cd11c7e8d21af997ee8079037fff88f16fda188a9776eb4b81c7e4c9c0a7d7fc", - "sha256:d8d3d4713f0c28bdc6c806a278d998546e8efc3498949e3ace6e117462ac0a5e", - "sha256:e0bfe9bb028974a481410432dbe1b182e8191d5d40382e5b8ff39cdd2e5c5931", - "sha256:f4822c0660c3754f1a41a655e37cb4dbbc9be3d35b125a37fab6f82d47674ebc", - "sha256:f83d281bb2a6217cd806f4cf0ddded436790e66f393e124dfe9731f6b3fb9afe", - "sha256:fc37870d6716b137e80d19241d0e2cff7a7643b925dfa49b4c8ebd1295eb506e" + "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d", + "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3", + "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2", + "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f", + "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927", + "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3", + "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7", + "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f", + "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade", + "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468", + "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b", + "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4", + "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83", + "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04", + "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791", + "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51", + "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1", + "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a", + "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f", + "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee", + "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec", + "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969", + "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28", + "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a", + "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa", + "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106", + "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d", + "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4", + "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0", + "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4", + "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2", + "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0", + "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654", + "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2", + "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23", + "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586" ], "index": "pypi", - "version": "==4.6.2" + "version": "==4.6.3" }, "markdownify": { "hashes": [ @@ -466,15 +471,16 @@ "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "more-itertools": { "hashes": [ - "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330", - "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf" + "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced", + "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713" ], "index": "pypi", - "version": "==8.6.0" + "version": "==8.7.0" }, "multidict": { "hashes": [ @@ -516,12 +522,14 @@ "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281", "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80" ], + "markers": "python_version >= '3.6'", "version": "==5.1.0" }, "ordered-set": { "hashes": [ "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95" ], + "markers": "python_version >= '3.5'", "version": "==4.0.2" }, "packaging": { @@ -529,6 +537,7 @@ "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.9" }, "pamqp": { @@ -577,20 +586,23 @@ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pygments": { "hashes": [ - "sha256:37a13ba168a02ac54cc5891a42b1caec333e59b66addb7fa633ea8a6d73445c0", - "sha256:b21b072d0ccdf29297a82a2363359d99623597b8a265b8081760e4d0f7153c88" + "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94", + "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8" ], - "version": "==2.8.0" + "markers": "python_version >= '3.5'", + "version": "==2.8.1" }, "pyparsing": { "hashes": [ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "python-dateutil": { @@ -610,28 +622,45 @@ }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", + "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", + "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", + "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" ], "index": "pypi", - "version": "==5.3.1" + "version": "==5.4.1" }, "redis": { "hashes": [ "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2", "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.5.3" }, "requests": { @@ -644,17 +673,18 @@ }, "sentry-sdk": { "hashes": [ - "sha256:0a711ec952441c2ec89b8f5d226c33bc697914f46e876b44a4edd3e7864cf4d0", - "sha256:737a094e49a529dd0fdcaafa9e97cf7c3d5eb964bd229821d640bc77f3502b3f" + "sha256:4ae8d1ced6c67f1c8ea51d82a16721c166c489b76876c9f2c202b8a50334b237", + "sha256:e75c8c58932bda8cd293ea8e4b242527129e1caaec91433d21b8b2f20fee030b" ], "index": "pypi", - "version": "==0.19.5" + "version": "==0.20.3" }, "six": { "hashes": [ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -673,11 +703,11 @@ }, "soupsieve": { "hashes": [ - "sha256:407fa1e8eb3458d1b5614df51d9651a1180ea5fedf07feb46e45d7e25e6d6cdd", - "sha256:d3a5ea5b350423f47d07639f74475afedad48cf41c0ad7a82ca13a3928af34f6" + "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc", + "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b" ], "markers": "python_version >= '3.0'", - "version": "==2.2" + "version": "==2.2.1" }, "sphinx": { "hashes": [ @@ -692,6 +722,7 @@ "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" ], + "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-devhelp": { @@ -699,6 +730,7 @@ "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" ], + "markers": "python_version >= '3.5'", "version": "==1.0.2" }, "sphinxcontrib-htmlhelp": { @@ -706,6 +738,7 @@ "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], + "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-jsmath": { @@ -713,6 +746,7 @@ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], + "markers": "python_version >= '3.5'", "version": "==1.0.1" }, "sphinxcontrib-qthelp": { @@ -720,6 +754,7 @@ "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" ], + "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-serializinghtml": { @@ -727,6 +762,7 @@ "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" ], + "markers": "python_version >= '3.5'", "version": "==1.1.4" }, "statsd": { @@ -747,10 +783,11 @@ }, "urllib3": { "hashes": [ - "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", - "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" + "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", + "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" ], - "version": "==1.26.3" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.26.4" }, "yarl": { "hashes": [ @@ -792,6 +829,7 @@ "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a", "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71" ], + "markers": "python_version >= '3.6'", "version": "==1.6.3" } }, @@ -808,6 +846,7 @@ "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, "certifi": { @@ -822,69 +861,74 @@ "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d", "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1" ], + "markers": "python_full_version >= '3.6.1'", "version": "==3.2.0" }, "chardet": { "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" ], - "version": "==3.0.4" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==4.0.0" }, "coverage": { "hashes": [ - "sha256:08b3ba72bd981531fd557f67beee376d6700fba183b167857038997ba30dd297", - "sha256:2757fa64e11ec12220968f65d086b7a29b6583d16e9a544c889b22ba98555ef1", - "sha256:3102bb2c206700a7d28181dbe04d66b30780cde1d1c02c5f3c165cf3d2489497", - "sha256:3498b27d8236057def41de3585f317abae235dd3a11d33e01736ffedb2ef8606", - "sha256:378ac77af41350a8c6b8801a66021b52da8a05fd77e578b7380e876c0ce4f528", - "sha256:38f16b1317b8dd82df67ed5daa5f5e7c959e46579840d77a67a4ceb9cef0a50b", - "sha256:3911c2ef96e5ddc748a3c8b4702c61986628bb719b8378bf1e4a6184bbd48fe4", - "sha256:3a3c3f8863255f3c31db3889f8055989527173ef6192a283eb6f4db3c579d830", - "sha256:3b14b1da110ea50c8bcbadc3b82c3933974dbeea1832e814aab93ca1163cd4c1", - "sha256:535dc1e6e68fad5355f9984d5637c33badbdc987b0c0d303ee95a6c979c9516f", - "sha256:6f61319e33222591f885c598e3e24f6a4be3533c1d70c19e0dc59e83a71ce27d", - "sha256:723d22d324e7997a651478e9c5a3120a0ecbc9a7e94071f7e1954562a8806cf3", - "sha256:76b2775dda7e78680d688daabcb485dc87cf5e3184a0b3e012e1d40e38527cc8", - "sha256:782a5c7df9f91979a7a21792e09b34a658058896628217ae6362088b123c8500", - "sha256:7e4d159021c2029b958b2363abec4a11db0ce8cd43abb0d9ce44284cb97217e7", - "sha256:8dacc4073c359f40fcf73aede8428c35f84639baad7e1b46fce5ab7a8a7be4bb", - "sha256:8f33d1156241c43755137288dea619105477961cfa7e47f48dbf96bc2c30720b", - "sha256:8ffd4b204d7de77b5dd558cdff986a8274796a1e57813ed005b33fd97e29f059", - "sha256:93a280c9eb736a0dcca19296f3c30c720cb41a71b1f9e617f341f0a8e791a69b", - "sha256:9a4f66259bdd6964d8cf26142733c81fb562252db74ea367d9beb4f815478e72", - "sha256:9a9d4ff06804920388aab69c5ea8a77525cf165356db70131616acd269e19b36", - "sha256:a2070c5affdb3a5e751f24208c5c4f3d5f008fa04d28731416e023c93b275277", - "sha256:a4857f7e2bc6921dbd487c5c88b84f5633de3e7d416c4dc0bb70256775551a6c", - "sha256:a607ae05b6c96057ba86c811d9c43423f35e03874ffb03fbdcd45e0637e8b631", - "sha256:a66ca3bdf21c653e47f726ca57f46ba7fc1f260ad99ba783acc3e58e3ebdb9ff", - "sha256:ab110c48bc3d97b4d19af41865e14531f300b482da21783fdaacd159251890e8", - "sha256:b239711e774c8eb910e9b1ac719f02f5ae4bf35fa0420f438cdc3a7e4e7dd6ec", - "sha256:be0416074d7f253865bb67630cf7210cbc14eb05f4099cc0f82430135aaa7a3b", - "sha256:c46643970dff9f5c976c6512fd35768c4a3819f01f61169d8cdac3f9290903b7", - "sha256:c5ec71fd4a43b6d84ddb88c1df94572479d9a26ef3f150cef3dacefecf888105", - "sha256:c6e5174f8ca585755988bc278c8bb5d02d9dc2e971591ef4a1baabdf2d99589b", - "sha256:c89b558f8a9a5a6f2cfc923c304d49f0ce629c3bd85cb442ca258ec20366394c", - "sha256:cc44e3545d908ecf3e5773266c487ad1877be718d9dc65fc7eb6e7d14960985b", - "sha256:cc6f8246e74dd210d7e2b56c76ceaba1cc52b025cd75dbe96eb48791e0250e98", - "sha256:cd556c79ad665faeae28020a0ab3bda6cd47d94bec48e36970719b0b86e4dcf4", - "sha256:ce6f3a147b4b1a8b09aae48517ae91139b1b010c5f36423fa2b866a8b23df879", - "sha256:ceb499d2b3d1d7b7ba23abe8bf26df5f06ba8c71127f188333dddcf356b4b63f", - "sha256:cef06fb382557f66d81d804230c11ab292d94b840b3cb7bf4450778377b592f4", - "sha256:e448f56cfeae7b1b3b5bcd99bb377cde7c4eb1970a525c770720a352bc4c8044", - "sha256:e52d3d95df81c8f6b2a1685aabffadf2d2d9ad97203a40f8d61e51b70f191e4e", - "sha256:ee2f1d1c223c3d2c24e3afbb2dd38be3f03b1a8d6a83ee3d9eb8c36a52bee899", - "sha256:f2c6888eada180814b8583c3e793f3f343a692fc802546eed45f40a001b1169f", - "sha256:f51dbba78d68a44e99d484ca8c8f604f17e957c1ca09c3ebc2c7e3bbd9ba0448", - "sha256:f54de00baf200b4539a5a092a759f000b5f45fd226d6d25a76b0dff71177a714", - "sha256:fa10fee7e32213f5c7b0d6428ea92e3a3fdd6d725590238a3f92c0de1c78b9d2", - "sha256:fabeeb121735d47d8eab8671b6b031ce08514c86b7ad8f7d5490a7b6dcd6267d", - "sha256:fac3c432851038b3e6afe086f777732bcf7f6ebbfd90951fa04ee53db6d0bcdd", - "sha256:fda29412a66099af6d6de0baa6bd7c52674de177ec2ad2630ca264142d69c6c7", - "sha256:ff1330e8bc996570221b450e2d539134baa9465f5cb98aff0e0f73f34172e0ae" + "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c", + "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6", + "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45", + "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a", + "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03", + "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529", + "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a", + "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a", + "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2", + "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6", + "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759", + "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53", + "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a", + "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4", + "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff", + "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502", + "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793", + "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb", + "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905", + "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821", + "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b", + "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81", + "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0", + "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b", + "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3", + "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184", + "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701", + "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a", + "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82", + "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638", + "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5", + "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083", + "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6", + "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90", + "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465", + "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a", + "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3", + "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e", + "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066", + "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf", + "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b", + "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae", + "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669", + "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873", + "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b", + "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6", + "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb", + "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160", + "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c", + "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079", + "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d", + "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6" ], "index": "pypi", - "version": "==5.3.1" + "version": "==5.5" }, "coveralls": { "hashes": [ @@ -916,19 +960,19 @@ }, "flake8": { "hashes": [ - "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839", - "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b" + "sha256:12d05ab02614b6aee8df7c36b97d1a3b2372761222b19b58621355e82acddcff", + "sha256:78873e372b12b093da7b5e5ed302e8ad9e988b38b063b61ad937f26ca58fc5f0" ], "index": "pypi", - "version": "==3.8.4" + "version": "==3.9.0" }, "flake8-annotations": { "hashes": [ - "sha256:3a377140556aecf11fa9f3bb18c10db01f5ea56dc79a730e2ec9b4f1f49e2055", - "sha256:e17947a48a5b9f632fe0c72682fc797c385e451048e7dfb20139f448a074cb3e" + "sha256:40a4d504cdf64126ea0bdca39edab1608bc6d515e96569b7e7c3c59c84f66c36", + "sha256:eabbfb2dd59ae0e9835f509f930e79cd99fa4ff1026fe6ca073503a57407037c" ], "index": "pypi", - "version": "==2.5.0" + "version": "==2.6.1" }, "flake8-bugbear": { "hashes": [ @@ -940,11 +984,11 @@ }, "flake8-docstrings": { "hashes": [ - "sha256:3d5a31c7ec6b7367ea6506a87ec293b94a0a46c0bce2bb4975b7f1d09b6f3717", - "sha256:a256ba91bc52307bef1de59e2a009c3cf61c3d0952dbe035d6ff7208940c2edc" + "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde", + "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b" ], "index": "pypi", - "version": "==1.5.0" + "version": "==1.6.0" }, "flake8-import-order": { "hashes": [ @@ -986,16 +1030,18 @@ }, "identify": { "hashes": [ - "sha256:de7129142a5c86d75a52b96f394d94d96d497881d2aaf8eafe320cdbe8ac4bcc", - "sha256:e0dae57c0397629ce13c289f6ddde0204edf518f557bfdb1e56474aa143e77c3" + "sha256:43cb1965e84cdd247e875dec6d13332ef5be355ddc16776396d98089b9053d87", + "sha256:c7c0f590526008911ccc5ceee6ed7b085cbc92f7b6591d0ee5913a130ad64034" ], - "version": "==1.5.14" + "markers": "python_full_version >= '3.6.1'", + "version": "==2.2.2" }, "idna": { "hashes": [ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "mccabe": { @@ -1022,51 +1068,70 @@ }, "pre-commit": { "hashes": [ - "sha256:6c86d977d00ddc8a60d68eec19f51ef212d9462937acf3ea37c7adec32284ac0", - "sha256:ee784c11953e6d8badb97d19bc46b997a3a9eded849881ec587accd8608d74a4" + "sha256:94c82f1bf5899d56edb1d926732f4e75a7df29a0c8c092559c77420c9d62428b", + "sha256:de55c5c72ce80d79106e48beb1b54104d16495ce7f95b0c7b13d4784193a00af" ], "index": "pypi", - "version": "==2.9.3" + "version": "==2.11.1" }, "pycodestyle": { "hashes": [ - "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", - "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" + "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", + "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef" ], - "version": "==2.6.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.7.0" }, "pydocstyle": { "hashes": [ - "sha256:19b86fa8617ed916776a11cd8bc0197e5b9856d5433b777f51a3defe13075325", - "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678" + "sha256:164befb520d851dbcf0e029681b91f4f599c62c5cd8933fd54b1bfbd50e89e1f", + "sha256:d4449cf16d7e6709f63192146706933c7a334af7c0f083904799ccb851c50f6d" ], - "version": "==5.1.1" + "markers": "python_version >= '3.6'", + "version": "==6.0.0" }, "pyflakes": { "hashes": [ - "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", - "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" + "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3", + "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db" ], - "version": "==2.2.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.3.1" }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", + "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", + "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", + "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" ], "index": "pypi", - "version": "==5.3.1" + "version": "==5.4.1" }, "requests": { "hashes": [ @@ -1081,6 +1146,7 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { @@ -1095,21 +1161,24 @@ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.2" }, "urllib3": { "hashes": [ - "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", - "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" + "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", + "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" ], - "version": "==1.26.3" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.26.4" }, "virtualenv": { "hashes": [ - "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d", - "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3" + "sha256:49ec4eb4c224c6f7dd81bb6d0a28a09ecae5894f4e593c89b0db0885f565a107", + "sha256:83f95875d382c7abafe06bd2a4cdd1b363e1bb77e02f155ebe8ac082a916b37c" ], - "version": "==20.4.2" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.4.3" } } } -- cgit v1.2.3 From b71a1e5d595d0775ffc1b4f502b9fc5efc3ca18d Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Tue, 30 Mar 2021 11:32:11 -0700 Subject: HelpChannels: use aware datetimes everywhere MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fix issues converting timestamps to datetimes and vice-versa. The main culprit id `datetime.timestamp()`, which always assumes naïve objects are in local time. That behaviour conflicts with discord.py, which returns naïve objects in UTC rather than local time. Switching from `utcfromtimestamp` to `fromtimestamp` was incorrect since the latter also assumes the timestamp is in local time. --- bot/exts/help_channels/_channel.py | 25 ++++++++++++++----------- bot/exts/help_channels/_cog.py | 18 +++++++++++------- bot/exts/help_channels/_message.py | 18 +++++++++--------- 3 files changed, 34 insertions(+), 27 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index b1960531d..719d341bd 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -1,8 +1,10 @@ import logging import typing as t -from datetime import datetime, timedelta +from datetime import timedelta +import arrow import discord +from arrow import Arrow import bot from bot import constants @@ -25,8 +27,8 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco yield channel -async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.Tuple[datetime, str]: - """Return the timestamp at which the given help `channel` should be closed along with the reason.""" +async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.Tuple[Arrow, str]: + """Return the time at which the given help `channel` should be closed along with the reason.""" log.trace(f"Getting the closing time for #{channel} ({channel.id}).") is_empty = await _message.is_empty(channel) @@ -49,23 +51,24 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T msg = await _message.get_last_message(channel) if not msg: - # last message can't be retreived, return datetime.min so channel closes right now. + # Last message can't be retrieved, return datetime.min so channel closes right now. log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages, closing now.") - return datetime.min, "deleted" + return Arrow.min, "deleted" # The time at which a channel should be closed. - return msg.created_at + timedelta(minutes=idle_minutes_claimant), "latest_message" + time = Arrow.fromdatetime(msg.created_at) + timedelta(minutes=idle_minutes_claimant) + return time, "latest_message" # Switch to datetime objects so we can use time deltas - claimant_last_message_time = datetime.fromtimestamp(claimant_last_message_time) + claimant_last_message_time = Arrow.utcfromtimestamp(claimant_last_message_time) non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) if non_claimant_last_message_time: - non_claimant_last_message_time = datetime.fromtimestamp(non_claimant_last_message_time) + non_claimant_last_message_time = Arrow.utcfromtimestamp(non_claimant_last_message_time) else: # If it's falsey, then it indicates a non-claimant has yet to reply to this session. # Set to min date time so it isn't considered when calculating the closing time. - non_claimant_last_message_time = datetime.min + non_claimant_last_message_time = Arrow.min # Get the later time at which a channel should be closed non_claimant_last_message_time += timedelta(minutes=constants.HelpChannels.idle_minutes_others) @@ -92,8 +95,8 @@ async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: claimed_timestamp = await _caches.claim_times.get(channel_id) if claimed_timestamp: - claimed = datetime.fromtimestamp(claimed_timestamp) - return datetime.utcnow() - claimed + claimed = Arrow.utcfromtimestamp(claimed_timestamp) + return arrow.utcnow() - claimed def is_excluded_channel(channel: discord.abc.GuildChannel) -> bool: diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 0e71661ac..832c9cd84 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -2,9 +2,10 @@ import asyncio import logging import random import typing as t -from datetime import datetime, timedelta +from datetime import timedelta from operator import attrgetter +import arrow import discord import discord.abc from discord.ext import commands @@ -72,7 +73,7 @@ class HelpChannels(commands.Cog): self.channel_queue: asyncio.Queue[discord.TextChannel] = None self.name_queue: t.Deque[str] = None - self.last_notification: t.Optional[datetime] = None + self.last_notification: t.Optional[arrow.Arrow] = None # Asyncio stuff self.queue_tasks: t.List[asyncio.Task] = [] @@ -114,9 +115,12 @@ class HelpChannels(commands.Cog): self.bot.stats.incr("help.claimed") - await _caches.claim_times.set(message.channel.id, message.created_at.timestamp()) - await _caches.claimant_last_message_times.set(message.channel.id, message.created_at.timestamp()) - # Reset thie non_claimant cache for this channel to indicate that this session has yet to be answered. + # datetime.timestamp() would assume it's local, despite d.py giving a (naïve) UTC time. + timestamp = arrow.Arrow.fromdatetime(message.created_at).timestamp() + + await _caches.claim_times.set(message.channel.id, timestamp) + await _caches.claimant_last_message_times.set(message.channel.id, timestamp) + # Delete to indicate that the help session has yet to receive an answer. await _caches.non_claimant_last_message_times.delete(message.channel.id) # Not awaited because it may indefinitely hold the lock while waiting for a channel. @@ -298,7 +302,7 @@ class HelpChannels(commands.Cog): # Closing time is in the past. # Add 1 second due to POSIX timestamps being lower resolution than datetime objects. - if closing_time < (datetime.utcnow() + timedelta(seconds=1)): + if closing_time < (arrow.utcnow() + timedelta(seconds=1)): log.info( f"#{channel} ({channel.id}) is idle past {closing_time} " @@ -311,7 +315,7 @@ class HelpChannels(commands.Cog): if has_task: self.scheduler.cancel(channel.id) - delay = (closing_time - datetime.utcnow()).seconds + delay = (closing_time - arrow.utcnow()).seconds log.info( f"#{channel} ({channel.id}) is still active; " f"scheduling it to be moved after {delay} seconds." diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index d60b31dea..afd698ffe 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -1,9 +1,10 @@ import logging import textwrap import typing as t -from datetime import datetime +import arrow import discord +from arrow import Arrow import bot from bot import constants @@ -51,13 +52,12 @@ async def update_message_caches(message: discord.Message) -> None: log.trace(f"Checking if #{channel} ({channel.id}) has had a reply.") claimant_id = await _caches.claimants.get(channel.id) - if not claimant_id: # The mapping for this channel doesn't exist, we can't do anything. return - # Use datetime naive time stamp to be consistant with timestamps from discord. - timestamp = message.created_at.timestamp() + # datetime.timestamp() would assume it's local, despite d.py giving a (naïve) UTC time. + timestamp = Arrow.fromdatetime(message.created_at).timestamp() # Overwrite the appropriate last message cache depending on the author of the message if message.author.id == claimant_id: @@ -128,12 +128,12 @@ async def dm_on_open(message: discord.Message) -> None: ) -async def notify(channel: discord.TextChannel, last_notification: t.Optional[datetime]) -> t.Optional[datetime]: +async def notify(channel: discord.TextChannel, last_notification: t.Optional[Arrow]) -> t.Optional[Arrow]: """ Send a message in `channel` notifying about a lack of available help channels. - If a notification was sent, return the `datetime` at which the message was sent. Otherwise, - return None. + If a notification was sent, return the time at which the message was sent. + Otherwise, return None. Configuration: @@ -147,7 +147,7 @@ async def notify(channel: discord.TextChannel, last_notification: t.Optional[dat log.trace("Notifying about lack of channels.") if last_notification: - elapsed = (datetime.utcnow() - last_notification).seconds + elapsed = (arrow.utcnow() - last_notification).seconds minimum_interval = constants.HelpChannels.notify_minutes * 60 should_send = elapsed >= minimum_interval else: @@ -170,7 +170,7 @@ async def notify(channel: discord.TextChannel, last_notification: t.Optional[dat allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles) ) - return message.created_at + return Arrow.fromdatetime(message.created_at) except Exception: # Handle it here cause this feature isn't critical for the functionality of the system. log.exception("Failed to send notification about lack of dormant channels!") -- cgit v1.2.3 From feb9b22a40e311807eca5be58de04d0d95e85554 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Tue, 30 Mar 2021 11:47:36 -0700 Subject: HelpChannels: refactor get_closing_time --- bot/exts/help_channels/_caches.py | 5 +-- bot/exts/help_channels/_channel.py | 80 +++++++++++++++++++------------------- bot/exts/help_channels/_cog.py | 1 - 3 files changed, 43 insertions(+), 43 deletions(-) diff --git a/bot/exts/help_channels/_caches.py b/bot/exts/help_channels/_caches.py index 834c5f4c2..e741fd20f 100644 --- a/bot/exts/help_channels/_caches.py +++ b/bot/exts/help_channels/_caches.py @@ -12,9 +12,8 @@ claimants = RedisCache(namespace="HelpChannels.help_channel_claimants") # RedisCache[discord.TextChannel.id, UtcPosixTimestamp] claimant_last_message_times = RedisCache(namespace="HelpChannels.claimant_last_message_times") -# This cache maps a help channel to the timestamp of the last, non-claimant, -# message. This cache being empty for a given help channel indicates the -# question is unanswered. +# This cache maps a help channel to the timestamp of the last non-claimant message. +# This cache being empty for a given help channel indicates the question is unanswered. # RedisCache[discord.TextChannel.id, UtcPosixTimestamp] non_claimant_last_message_times = RedisCache(namespace="HelpChannels.non_claimant_last_message_times") diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 719d341bd..8af059830 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -28,65 +28,67 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.Tuple[Arrow, str]: - """Return the time at which the given help `channel` should be closed along with the reason.""" + """ + Return the time at which the given help `channel` should be closed along with the reason. + + `init_done` is True if the cog has finished loading and False otherwise. + + The time is calculated as follows: + + * If `init_done` is True or the cached time for the claimant's last message is unavailable, + add the configured `idle_minutes_claimant` to the time the most recent message was sent. + * If the help session is empty (see `is_empty`), do the above but with `deleted_idle_minutes`. + * If either of the above is attempted but the channel is completely empty, close the channel + immediately. + * Otherwise, retrieve the times of the claimant's and non-claimant's last messages from the + cache. Add the configured `idle_minutes_claimant` and idle_minutes_others`, respectively, and + choose the time which is furthest in the future. + """ log.trace(f"Getting the closing time for #{channel} ({channel.id}).") is_empty = await _message.is_empty(channel) - if is_empty: idle_minutes_claimant = constants.HelpChannels.deleted_idle_minutes else: idle_minutes_claimant = constants.HelpChannels.idle_minutes_claimant - claimant_last_message_time = await _caches.claimant_last_message_times.get(channel.id) - - if ( - is_empty - or not init_done - or claimant_last_message_time is None - ): - # If the current help channel has no messages, the help system cog is starting or - # the claimant cache is empty, use the last message in the channel to determine closing time instead. + claimant_time = await _caches.claimant_last_message_times.get(channel.id) + # The current session lacks messages, the cog is still starting, or the cache is empty. + if is_empty or not init_done or claimant_time is None: msg = await _message.get_last_message(channel) - if not msg: - # Last message can't be retrieved, return datetime.min so channel closes right now. log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages, closing now.") return Arrow.min, "deleted" - # The time at which a channel should be closed. + # Use the greatest offset to avoid the possibility of prematurely closing the channel. time = Arrow.fromdatetime(msg.created_at) + timedelta(minutes=idle_minutes_claimant) return time, "latest_message" - # Switch to datetime objects so we can use time deltas - claimant_last_message_time = Arrow.utcfromtimestamp(claimant_last_message_time) - non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel.id) + claimant_time = Arrow.utcfromtimestamp(claimant_time) + others_time = await _caches.non_claimant_last_message_times.get(channel.id) - if non_claimant_last_message_time: - non_claimant_last_message_time = Arrow.utcfromtimestamp(non_claimant_last_message_time) + if others_time: + others_time = Arrow.utcfromtimestamp(others_time) else: - # If it's falsey, then it indicates a non-claimant has yet to reply to this session. - # Set to min date time so it isn't considered when calculating the closing time. - non_claimant_last_message_time = Arrow.min - - # Get the later time at which a channel should be closed - non_claimant_last_message_time += timedelta(minutes=constants.HelpChannels.idle_minutes_others) - claimant_last_message_time += timedelta(minutes=idle_minutes_claimant) - - # The further away closing time is what we should use. - if claimant_last_message_time >= non_claimant_last_message_time: - log.trace( - f"#{channel} ({channel.id}) should be closed at " - f"{claimant_last_message_time} due to claimant timeout." - ) - return claimant_last_message_time, "claimant_timeout" + # The help session hasn't received any answers (messages from non-claimants) yet. + # Set to min value so it isn't considered when calculating the closing time. + others_time = Arrow.min + + # Offset the cached times by the configured values. + others_time += timedelta(minutes=constants.HelpChannels.idle_minutes_others) + claimant_time += timedelta(minutes=idle_minutes_claimant) + + # Use the time which is the furthest into the future. + if claimant_time >= others_time: + closing_time = claimant_time + reason = "claimant_timeout" else: - log.trace( - f"#{channel} ({channel.id}) should be closed at " - f"{non_claimant_last_message_time} due to others timeout." - ) - return non_claimant_last_message_time, "others_timeout" + closing_time = others_time + reason = "others_timeout" + + log.trace(f"#{channel} ({channel.id}) should be closed at {closing_time} due to {reason}.") + return closing_time, reason async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 832c9cd84..183ee8a9b 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -303,7 +303,6 @@ class HelpChannels(commands.Cog): # Closing time is in the past. # Add 1 second due to POSIX timestamps being lower resolution than datetime objects. if closing_time < (arrow.utcnow() + timedelta(seconds=1)): - log.info( f"#{channel} ({channel.id}) is idle past {closing_time} " f"and will be made dormant. Reason: {closed_on}" -- cgit v1.2.3 From 96037aa80717420d3a3b077b38e8cff571488ba8 Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 30 Mar 2021 20:48:37 +0100 Subject: Enumerate all possible values for closed_on in docstring --- bot/exts/help_channels/_cog.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 183ee8a9b..658e50201 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -366,7 +366,10 @@ class HelpChannels(commands.Cog): Remove the cooldown role from the channel claimant if they have no other channels claimed. Cancel the scheduled cooldown role removal task. - `closed_on` is the reason that the channel was closed for. Examples: "cleanup", "command", "claimant_timeout" + `closed_on` is the reason that the channel was closed for. Possible values for this are: + "cleanup", "command", "claimant_timeout", "others_timeout", "deleted". + All values, except for "command", get prefixed with "auto." within `_stats.report_complete_session()` + before being added to the bot's stats. """ claimant_id = await _caches.claimants.get(channel.id) _unclaim_channel = self._unclaim_channel -- cgit v1.2.3 From b030711b69a50c2b2c068865b1987d61f8267f1c Mon Sep 17 00:00:00 2001 From: Den4200 Date: Tue, 30 Mar 2021 15:58:05 -0400 Subject: Remove `dm_log` from channels and webhooks in the config. --- bot/constants.py | 2 -- config-default.yml | 2 -- 2 files changed, 4 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index 467a4a2c4..4040d757e 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -414,7 +414,6 @@ class Channels(metaclass=YAMLGetter): cooldown: int attachment_log: int - dm_log: int message_log: int mod_log: int user_log: int @@ -466,7 +465,6 @@ class Webhooks(metaclass=YAMLGetter): big_brother: int dev_log: int - dm_log: int duck_pond: int incidents_archive: int reddit: int diff --git a/config-default.yml b/config-default.yml index 4e7060ea4..39b33ca10 100644 --- a/config-default.yml +++ b/config-default.yml @@ -169,7 +169,6 @@ guild: # Logs attachment_log: &ATTACH_LOG 649243850006855680 - dm_log: 653713721625018428 message_log: &MESSAGE_LOG 467752170159079424 mod_log: &MOD_LOG 282638479504965634 user_log: 528976905546760203 @@ -287,7 +286,6 @@ guild: webhooks: big_brother: 569133704568373283 dev_log: 680501655111729222 - dm_log: 654567640664244225 duck_pond: 637821475327311927 incidents_archive: 720671599790915702 python_news: &PYNEWS_WEBHOOK 704381182279942324 -- cgit v1.2.3 From d77af18516a030dd039014e2fcf5bb56e09e8358 Mon Sep 17 00:00:00 2001 From: Den4200 Date: Tue, 30 Mar 2021 16:00:21 -0400 Subject: Replace usage of `textwrap.dedent` with a cleaner method. --- bot/exts/moderation/dm_relay.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py index a03230b3d..68a48c67c 100644 --- a/bot/exts/moderation/dm_relay.py +++ b/bot/exts/moderation/dm_relay.py @@ -1,5 +1,4 @@ import logging -import textwrap import discord from discord.ext.commands import Cog, Context, command, has_any_role @@ -50,10 +49,10 @@ class DMRelay(Cog): await ctx.send(f"{Emojis.cross_mark} No direct message history with {user.mention}.") return - metadata = textwrap.dedent(f"""\ - User: {user} ({user.id}) - Channel ID: {user.dm_channel.id}\n - """) + metadata = ( + f"User: {user} ({user.id})\n" + f"Channel ID: {user.dm_channel.id}\n\n" + ) paste_link = await send_to_paste_service(metadata + output, extension="txt") await ctx.send(paste_link) -- cgit v1.2.3 From 2334c95d8781df829ac2ec1a1c5abb2b4d776586 Mon Sep 17 00:00:00 2001 From: Den4200 Date: Tue, 30 Mar 2021 16:06:23 -0400 Subject: Gracefully handle failure to upload to hastebin in `!dmrelay`. --- bot/exts/moderation/dm_relay.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py index 68a48c67c..1d2206e27 100644 --- a/bot/exts/moderation/dm_relay.py +++ b/bot/exts/moderation/dm_relay.py @@ -55,6 +55,11 @@ class DMRelay(Cog): ) paste_link = await send_to_paste_service(metadata + output, extension="txt") + + if paste_link is None: + await ctx.send(f"{Emojis.cross_mark} Failed to upload output to hastebin.") + return + await ctx.send(paste_link) async def cog_check(self, ctx: Context) -> bool: -- cgit v1.2.3 From 2621f2488409ae4a1cc4b036249861e1bd10b1c7 Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 30 Mar 2021 21:20:01 +0100 Subject: Change to an Enum for possible closing reasons --- bot/exts/help_channels/_channel.py | 22 +++++++++++++++++----- bot/exts/help_channels/_cog.py | 22 ++++++++++++---------- bot/exts/help_channels/_stats.py | 8 +++----- 3 files changed, 32 insertions(+), 20 deletions(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 8af059830..b8db337fc 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -1,6 +1,7 @@ import logging import typing as t from datetime import timedelta +from enum import Enum import arrow import discord @@ -17,6 +18,17 @@ MAX_CHANNELS_PER_CATEGORY = 50 EXCLUDED_CHANNELS = (constants.Channels.cooldown,) +class ClosingReason(Enum): + """All possible closing reasons for help channels.""" + + COMMAND = "command" + LATEST_MESSSAGE = "auto.latest_message" + CLAIMANT_TIMEOUT = "auto.claimant_timeout" + OTHER_TIMEOUT = "auto.other_timeout" + DELETED = "auto.deleted" + CLEANUP = "auto.deleted" + + def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[discord.TextChannel]: """Yield the text channels of the `category` in an unsorted manner.""" log.trace(f"Getting text channels in the category '{category}' ({category.id}).") @@ -27,7 +39,7 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco yield channel -async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.Tuple[Arrow, str]: +async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.Tuple[Arrow, ClosingReason]: """ Return the time at which the given help `channel` should be closed along with the reason. @@ -59,11 +71,11 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T msg = await _message.get_last_message(channel) if not msg: log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages, closing now.") - return Arrow.min, "deleted" + return Arrow.min, ClosingReason.DELETED # Use the greatest offset to avoid the possibility of prematurely closing the channel. time = Arrow.fromdatetime(msg.created_at) + timedelta(minutes=idle_minutes_claimant) - return time, "latest_message" + return time, ClosingReason.LATEST_MESSSAGE claimant_time = Arrow.utcfromtimestamp(claimant_time) others_time = await _caches.non_claimant_last_message_times.get(channel.id) @@ -82,10 +94,10 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T # Use the time which is the furthest into the future. if claimant_time >= others_time: closing_time = claimant_time - reason = "claimant_timeout" + reason = ClosingReason.CLAIMANT_TIMEOUT else: closing_time = others_time - reason = "others_timeout" + reason = ClosingReason.OTHER_TIMEOUT log.trace(f"#{channel} ({channel.id}) should be closed at {closing_time} due to {reason}.") return closing_time, reason diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 658e50201..18457f6a5 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -192,7 +192,7 @@ class HelpChannels(commands.Cog): # Don't use a discord.py check because the check needs to fail silently. if await self.close_check(ctx): log.info(f"Close command invoked by {ctx.author} in #{ctx.channel}.") - await self.unclaim_channel(ctx.channel, closed_on="command") + await self.unclaim_channel(ctx.channel, closed_on=_channel.ClosingReason.COMMAND) async def get_available_candidate(self) -> discord.TextChannel: """ @@ -238,7 +238,7 @@ class HelpChannels(commands.Cog): elif missing < 0: log.trace(f"Moving {abs(missing)} superfluous available channels over to the Dormant category.") for channel in channels[:abs(missing)]: - await self.unclaim_channel(channel, closed_on="cleanup") + await self.unclaim_channel(channel, closed_on=_channel.ClosingReason.CLEANUP) async def init_categories(self) -> None: """Get the help category objects. Remove the cog if retrieval fails.""" @@ -305,7 +305,7 @@ class HelpChannels(commands.Cog): if closing_time < (arrow.utcnow() + timedelta(seconds=1)): log.info( f"#{channel} ({channel.id}) is idle past {closing_time} " - f"and will be made dormant. Reason: {closed_on}" + f"and will be made dormant. Reason: {closed_on.value}" ) await self.unclaim_channel(channel, closed_on=closed_on) @@ -358,7 +358,7 @@ class HelpChannels(commands.Cog): _stats.report_counts() @lock.lock_arg(f"{NAMESPACE}.unclaim", "channel") - async def unclaim_channel(self, channel: discord.TextChannel, *, closed_on: str) -> None: + async def unclaim_channel(self, channel: discord.TextChannel, *, closed_on: _channel.ClosingReason) -> None: """ Unclaim an in-use help `channel` to make it dormant. @@ -366,10 +366,7 @@ class HelpChannels(commands.Cog): Remove the cooldown role from the channel claimant if they have no other channels claimed. Cancel the scheduled cooldown role removal task. - `closed_on` is the reason that the channel was closed for. Possible values for this are: - "cleanup", "command", "claimant_timeout", "others_timeout", "deleted". - All values, except for "command", get prefixed with "auto." within `_stats.report_complete_session()` - before being added to the bot's stats. + `closed_on` is the reason that the channel was closed. See _channel.ClosingReason for possible values. """ claimant_id = await _caches.claimants.get(channel.id) _unclaim_channel = self._unclaim_channel @@ -382,7 +379,12 @@ class HelpChannels(commands.Cog): return await _unclaim_channel(channel, claimant_id, closed_on) - async def _unclaim_channel(self, channel: discord.TextChannel, claimant_id: int, closed_on: str) -> None: + async def _unclaim_channel( + self, + channel: discord.TextChannel, + claimant_id: int, + closed_on: _channel.ClosingReason + ) -> None: """Actual implementation of `unclaim_channel`. See that for full documentation.""" await _caches.claimants.delete(channel.id) @@ -403,7 +405,7 @@ class HelpChannels(commands.Cog): # Cancel the task that makes the channel dormant only if called by the close command. # In other cases, the task is either already done or not-existent. - if closed_on == "command": + if closed_on == _channel.ClosingReason.COMMAND: self.scheduler.cancel(channel.id) async def move_to_in_use(self, channel: discord.TextChannel) -> None: diff --git a/bot/exts/help_channels/_stats.py b/bot/exts/help_channels/_stats.py index 123604945..eb34e75e1 100644 --- a/bot/exts/help_channels/_stats.py +++ b/bot/exts/help_channels/_stats.py @@ -22,15 +22,13 @@ def report_counts() -> None: log.warning(f"Couldn't find category {name!r} to track channel count stats.") -async def report_complete_session(channel_id: int, closed_on: str) -> None: +async def report_complete_session(channel_id: int, closed_on: _channel.ClosingReason) -> None: """ Report stats for a completed help session channel `channel_id`. - Set `is_auto` to True if the channel was automatically closed or False if manually closed. + `closed_on` is the reason why the channel was closed. See `_channel.ClosingReason` for possible reasons. """ - if closed_on != "command": - closed_on = f"auto.{closed_on}" - bot.instance.stats.incr(f"help.dormant_calls.{closed_on}") + bot.instance.stats.incr(f"help.dormant_calls.{closed_on.value}") in_use_time = await _channel.get_in_use_time(channel_id) if in_use_time: -- cgit v1.2.3 From 0b4e48883949213260733c3263b4067531f785ea Mon Sep 17 00:00:00 2001 From: Den4200 Date: Tue, 30 Mar 2021 16:21:16 -0400 Subject: Removed user event announcements from the config. --- bot/constants.py | 1 - config-default.yml | 3 --- 2 files changed, 4 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index 4040d757e..91b60b9a6 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -402,7 +402,6 @@ class Channels(metaclass=YAMLGetter): python_events: int python_news: int reddit: int - user_event_announcements: int dev_contrib: int dev_core: int diff --git a/config-default.yml b/config-default.yml index 39b33ca10..e3fd98ac0 100644 --- a/config-default.yml +++ b/config-default.yml @@ -150,7 +150,6 @@ guild: python_events: &PYEVENTS_CHANNEL 729674110270963822 python_news: &PYNEWS_CHANNEL 704372456592506880 reddit: &REDDIT_CHANNEL 458224812528238616 - user_event_announcements: &USER_EVENT_A 592000283102674944 # Development dev_contrib: &DEV_CONTRIB 635950537262759947 @@ -322,7 +321,6 @@ filter: - *MOD_LOG - *STAFF_LOUNGE - *TALENT_POOL - - *USER_EVENT_A role_whitelist: - *ADMINS_ROLE @@ -511,7 +509,6 @@ duck_pond: - *PYEVENTS_CHANNEL - *MAILING_LISTS - *REDDIT_CHANNEL - - *USER_EVENT_A - *DUCK_POND - *CHANGE_LOG - *STAFF_ANNOUNCEMENTS -- cgit v1.2.3 From 57fb66d5b01dabee9d57efdeee419263b7ecd983 Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 30 Mar 2021 21:27:44 +0100 Subject: Fix copy & paste error in closing reason enum --- bot/exts/help_channels/_channel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index b8db337fc..2837bc7c5 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -26,7 +26,7 @@ class ClosingReason(Enum): CLAIMANT_TIMEOUT = "auto.claimant_timeout" OTHER_TIMEOUT = "auto.other_timeout" DELETED = "auto.deleted" - CLEANUP = "auto.deleted" + CLEANUP = "auto.cleanup" def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[discord.TextChannel]: -- cgit v1.2.3 From 74eed3e9d39a203b6abfa2ea45ef00286f3639b1 Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 30 Mar 2021 22:26:37 +0100 Subject: Use deleted reason if help channel is closed due to being empty --- bot/exts/help_channels/_channel.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 2837bc7c5..0846b28c8 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -75,7 +75,8 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T # Use the greatest offset to avoid the possibility of prematurely closing the channel. time = Arrow.fromdatetime(msg.created_at) + timedelta(minutes=idle_minutes_claimant) - return time, ClosingReason.LATEST_MESSSAGE + reason = ClosingReason.DELETED if is_empty else ClosingReason.LATEST_MESSSAGE + return time, reason claimant_time = Arrow.utcfromtimestamp(claimant_time) others_time = await _caches.non_claimant_last_message_times.get(channel.id) -- cgit v1.2.3 From a14dd82dd672b123f4cf00b6324be5bb79528cdd Mon Sep 17 00:00:00 2001 From: kwzrd Date: Wed, 31 Mar 2021 19:59:18 +0200 Subject: Branding: target 'main' branch With the branding-side PR merged, we can now target the production branch. --- bot/exts/backend/branding/_repository.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 3a9745ed5..e6c2396b1 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -9,9 +9,9 @@ from bot.constants import Keys from bot.errors import BrandingMisconfiguration # Base URL for requests into the branding repository. -BRANDING_URL = "https://api.github.com/repos/kwzrd/pydis-branding/contents" +BRANDING_URL = "https://api.github.com/repos/python-discord/branding/contents" -PARAMS = {"ref": "kwzrd/events-rework"} # Target branch. +PARAMS = {"ref": "main"} # Target branch. HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3. # A GitHub token is not necessary. However, unauthorized requests are limited to 60 per hour. -- cgit v1.2.3 From 220590c7bb6593b06d1796f0807568e06fefa99e Mon Sep 17 00:00:00 2001 From: kwzrd Date: Wed, 31 Mar 2021 20:17:25 +0200 Subject: Branding: apply documentation improvements after review No code changes in this commit. Co-authored-by: Shivansh-007 Co-authored-by: Joe Banks --- bot/exts/backend/branding/_cog.py | 14 +++++++------- bot/exts/backend/branding/_repository.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index b07edbffd..0a4ddcc88 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -144,7 +144,7 @@ class Branding(commands.Cog): timeout = 10 # Seconds. try: - with async_timeout.timeout(timeout): + with async_timeout.timeout(timeout): # Raise after `timeout` seconds. await pydis.edit(**{asset_type.value: file}) except discord.HTTPException: log.exception("Asset upload to Discord failed.") @@ -160,7 +160,7 @@ class Branding(commands.Cog): """ Apply `banner` to the guild and cache its hash if successful. - Banners should always be applied via this method in order to ensure that the last hash is cached. + Banners should always be applied via this method to ensure that the last hash is cached. Return a boolean indicating whether the application was successful. """ @@ -217,9 +217,9 @@ class Branding(commands.Cog): """ Call `rotate_icons` if the configured amount of time has passed since last rotation. - We offset the calculated time difference into the future in order to avoid off-by-a-little-bit errors. - Because there is work to be done before the timestamp is read and written, the next read will likely - commence slightly under 24 hours after the last write. + We offset the calculated time difference into the future to avoid off-by-a-little-bit errors. Because there + is work to be done before the timestamp is read and written, the next read will likely commence slightly + under 24 hours after the last write. """ log.debug("Checking whether it's time for icons to rotate.") @@ -298,7 +298,7 @@ class Branding(commands.Cog): We cache `event` information to ensure that we: * Remember which event we're currently in across restarts - * Provide an on-demand information embed without re-querying the branding repository + * Provide an on-demand informational embed without re-querying the branding repository An event change should always be handled via this function, as it ensures that the cache is populated. @@ -487,7 +487,7 @@ class Branding(commands.Cog): log.trace("Daemon before: calculating time to sleep before loop begins.") now = datetime.utcnow() - # The actual midnight moment is offset into the future in order to prevent issues with imprecise sleep. + # The actual midnight moment is offset into the future to prevent issues with imprecise sleep. tomorrow = now + timedelta(days=1) midnight = datetime.combine(tomorrow, time(minute=1)) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index e6c2396b1..740a4a083 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -19,7 +19,7 @@ if Keys.github: HEADERS["Authorization"] = f"token {Keys.github}" # Since event periods are year-agnostic, we parse them into `datetime` objects with a manually inserted year. -# Please note that this is intentionally a leap year in order to allow Feb 29 to be valid. +# Please note that this is intentionally a leap year to allow Feb 29 to be valid. ARBITRARY_YEAR = 2020 # Format used to parse date strings after we inject `ARBITRARY_YEAR` at the end. -- cgit v1.2.3 From b778c25427108f4ffb20328c5977618e3f97c523 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Wed, 31 Mar 2021 20:19:28 +0200 Subject: Branding: log after successful fetch Co-authored-by: Shivansh-007 Co-authored-by: Joe Banks --- bot/exts/backend/branding/_repository.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 740a4a083..7b09d4641 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -107,6 +107,8 @@ class BrandingRepository: async with self.bot.http_session.get(full_url, params=PARAMS, headers=HEADERS) as response: if response.status != 200: raise RuntimeError(f"Failed to fetch directory due to status: {response.status}") + + log.debug("Fetch successful, reading JSON response.") json_directory = await response.json() return {file["name"]: RemoteObject(file) for file in json_directory if file["type"] in types} @@ -122,6 +124,8 @@ class BrandingRepository: async with self.bot.http_session.get(download_url, params=PARAMS, headers=HEADERS) as response: if response.status != 200: raise RuntimeError(f"Failed to fetch file due to status: {response.status}") + + log.debug("Fetch successful, reading payload.") return await response.read() def parse_meta_file(self, raw_file: bytes) -> MetaFile: -- cgit v1.2.3 From 2711a8456c3a1b30b0f14d184fcfeba122b07490 Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 31 Mar 2021 23:20:42 +0100 Subject: Simplify name of function param in stream cog --- bot/exts/moderation/stream.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index c61599278..a9d0199f4 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -23,10 +23,10 @@ class Stream(commands.Cog): self.scheduler = Scheduler(self.__class__.__name__) self.reload_task = self.bot.loop.create_task(self._reload_tasks_from_redis()) - async def _remove_streaming_permission(self, schedule_user: discord.Member) -> None: + async def _remove_streaming_permission(self, member: discord.Member) -> None: """Remove streaming permission from Member.""" - await self._delete_from_redis(schedule_user.id) - await schedule_user.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked") + await self._delete_from_redis(member.id) + await member.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked") async def _add_to_redis_cache(self, user_id: int, timestamp: float) -> None: """Adds 'task' to redis cache.""" -- cgit v1.2.3 From bb4f4fb3fec316efa8c3775f23115e1a153347eb Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 31 Mar 2021 23:22:11 +0100 Subject: Remove unnessisary functions in stream cog --- bot/exts/moderation/stream.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index a9d0199f4..16e62b5dc 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -28,10 +28,6 @@ class Stream(commands.Cog): await self._delete_from_redis(member.id) await member.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked") - async def _add_to_redis_cache(self, user_id: int, timestamp: float) -> None: - """Adds 'task' to redis cache.""" - await self.task_cache.set(user_id, timestamp) - async def _reload_tasks_from_redis(self) -> None: await self.bot.wait_until_guild_available() items = await self.task_cache.items() @@ -81,7 +77,7 @@ class Stream(commands.Cog): # Schedule task to remove streaming permission from Member and add it to task cache self.scheduler.schedule_at(duration, user.id, self._remove_streaming_permission(user)) - await self._add_to_redis_cache(user.id, duration.timestamp()) + await self.task_cache.set(user.id, duration.timestamp()) await user.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") duration = format_infraction_with_duration(str(duration)) await ctx.send(f"{Emojis.check_mark} {user.mention} can now stream until {duration}.") @@ -100,7 +96,7 @@ class Stream(commands.Cog): if already_allowed: if user.id in self.scheduler: self.scheduler.cancel(user.id) - await self._delete_from_redis(user.id) + await self.task_cache.delete(user.id) await ctx.send(f"{Emojis.check_mark} Moved temporary permission to permanent") return await ctx.send(f"{Emojis.cross_mark} This user can already stream.") -- cgit v1.2.3 From 92306df643cd51c837a60d346a59e3d52f7d60b3 Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 31 Mar 2021 23:24:31 +0100 Subject: Refactor code to match prevailing style, and reword some output in streaming cog --- bot/exts/moderation/stream.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 16e62b5dc..25e0f2c1b 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -33,9 +33,11 @@ class Stream(commands.Cog): items = await self.task_cache.items() for key, value in items: member = await self.bot.get_guild(Guild.id).fetch_member(key) - self.scheduler.schedule_at(datetime.datetime.utcfromtimestamp(value), - key, - self._remove_streaming_permission(member)) + self.scheduler.schedule_at( + datetime.datetime.utcfromtimestamp(value), + key, + self._remove_streaming_permission(member) + ) async def _delete_from_redis(self, key: str) -> None: await self.task_cache.delete(key) @@ -90,20 +92,20 @@ class Stream(commands.Cog): user: discord.Member, *_ ) -> None: - """Permanently give user a streaming permission.""" + """Permanently grant a user the permission to stream.""" # Check if user already has streaming permission already_allowed = any(Roles.video == role.id for role in user.roles) if already_allowed: if user.id in self.scheduler: self.scheduler.cancel(user.id) await self.task_cache.delete(user.id) - await ctx.send(f"{Emojis.check_mark} Moved temporary permission to permanent") + await ctx.send(f"{Emojis.check_mark} Changed temporary permission to permanent.") return await ctx.send(f"{Emojis.cross_mark} This user can already stream.") return await user.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted") - await ctx.send(f"{Emojis.check_mark} {user.mention} can now stream forever") + await ctx.send(f"{Emojis.check_mark} Permanently granted {user.mention} the permission to stream.") @commands.command(aliases=("unstream", )) @commands.has_any_role(*STAFF_ROLES) @@ -112,7 +114,7 @@ class Stream(commands.Cog): ctx: commands.Context, user: discord.Member ) -> None: - """Take away streaming permission from a user.""" + """Revoke the permissiont to stream from a user.""" # Check if user has the streaming permission to begin with allowed = any(Roles.video == role.id for role in user.roles) if allowed: @@ -120,7 +122,7 @@ class Stream(commands.Cog): if user.id in self.scheduler: self.scheduler.cancel(user.id) await self._remove_streaming_permission(user) - await ctx.send(f"{Emojis.check_mark} Streaming permission taken from {user.display_name}.") + await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {user.mention}.") else: await ctx.send(f"{Emojis.cross_mark} This user already can't stream.") -- cgit v1.2.3 From d34b95771f551a7dc1e5b3d422df39f95ff800a0 Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 31 Mar 2021 23:54:41 +0100 Subject: Atempt to get member from cache first, and handle errors from API member call --- bot/exts/moderation/stream.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 25e0f2c1b..66ca96339 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -1,4 +1,5 @@ import datetime +import logging import discord from async_rediscache import RedisCache @@ -10,6 +11,8 @@ from bot.converters import Expiry from bot.utils.scheduling import Scheduler from bot.utils.time import format_infraction_with_duration +log = logging.getLogger(__name__) + class Stream(commands.Cog): """Grant and revoke streaming permissions from users.""" @@ -32,7 +35,22 @@ class Stream(commands.Cog): await self.bot.wait_until_guild_available() items = await self.task_cache.items() for key, value in items: - member = await self.bot.get_guild(Guild.id).fetch_member(key) + member = self.bot.get_guild(Guild.id).get_member(key) + + if not member: + try: + member = await self.bot.get_guild(Guild.id).fetch_member(key) + except discord.errors.NotFound: + log.debug( + f"Member {key} left the guild before we could scheudle " + "the revoking of their streaming permissions." + ) + await self.task_cache.delete(key) + continue + except discord.HTTPException as e: + log.exception(f"Exception while trying to retrieve member {key} from discord\n{e}") + continue + self.scheduler.schedule_at( datetime.datetime.utcfromtimestamp(value), key, -- cgit v1.2.3 From 4dc697bfdd2ff064bee7e2a48e61c7136ed30c5c Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 31 Mar 2021 23:56:34 +0100 Subject: Remove empty test for stream cog --- tests/bot/exts/moderation/test_stream.py | 20 -------------------- 1 file changed, 20 deletions(-) delete mode 100644 tests/bot/exts/moderation/test_stream.py diff --git a/tests/bot/exts/moderation/test_stream.py b/tests/bot/exts/moderation/test_stream.py deleted file mode 100644 index 2ac274699..000000000 --- a/tests/bot/exts/moderation/test_stream.py +++ /dev/null @@ -1,20 +0,0 @@ -import unittest - - -from bot.constants import Roles -from tests.helpers import MockMember, MockRole - - -class StreamCommandTest(unittest.IsolatedAsyncioTestCase): - - def test_checking_if_user_has_streaming_permission(self): - """ - Test searching for video role in Member.roles - """ - user1 = MockMember(roles=[MockRole(id=Roles.video)]) - user2 = MockMember() - already_allowed_user1 = any(Roles.video == role.id for role in user1.roles) - self.assertEqual(already_allowed_user1, True) - - already_allowed_user2 = any(Roles.video == role.id for role in user2.roles) - self.assertEqual(already_allowed_user2, False) -- cgit v1.2.3 From 777e269290d84fae3d7a44f6b98b91ce8a478004 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 00:02:57 +0100 Subject: function sig formatting, function ordering and remove unnessisary function --- bot/exts/moderation/stream.py | 38 ++++++++++---------------------------- 1 file changed, 10 insertions(+), 28 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 66ca96339..c598ec879 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -26,9 +26,14 @@ class Stream(commands.Cog): self.scheduler = Scheduler(self.__class__.__name__) self.reload_task = self.bot.loop.create_task(self._reload_tasks_from_redis()) + def cog_unload(self) -> None: + """Cancel all scheduled tasks.""" + self.reload_task.cancel() + self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all()) + async def _remove_streaming_permission(self, member: discord.Member) -> None: """Remove streaming permission from Member.""" - await self._delete_from_redis(member.id) + await self.task_cache.delete(member.id) await member.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked") async def _reload_tasks_from_redis(self) -> None: @@ -57,18 +62,9 @@ class Stream(commands.Cog): self._remove_streaming_permission(member) ) - async def _delete_from_redis(self, key: str) -> None: - await self.task_cache.delete(key) - @commands.command(aliases=("streaming",)) @commands.has_any_role(*STAFF_ROLES) - async def stream( - self, - ctx: commands.Context, - user: discord.Member, - duration: Expiry = None, - *_ - ) -> None: + async def stream(self, ctx: commands.Context, user: discord.Member, duration: Expiry = None) -> None: """ Temporarily grant streaming permissions to a user for a given duration. @@ -104,12 +100,7 @@ class Stream(commands.Cog): @commands.command(aliases=("pstream",)) @commands.has_any_role(*STAFF_ROLES) - async def permanentstream( - self, - ctx: commands.Context, - user: discord.Member, - *_ - ) -> None: + async def permanentstream(self, ctx: commands.Context, user: discord.Member) -> None: """Permanently grant a user the permission to stream.""" # Check if user already has streaming permission already_allowed = any(Roles.video == role.id for role in user.roles) @@ -125,13 +116,9 @@ class Stream(commands.Cog): await user.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted") await ctx.send(f"{Emojis.check_mark} Permanently granted {user.mention} the permission to stream.") - @commands.command(aliases=("unstream", )) + @commands.command(aliases=("unstream", "rstream")) @commands.has_any_role(*STAFF_ROLES) - async def revokestream( - self, - ctx: commands.Context, - user: discord.Member - ) -> None: + async def revokestream(self, ctx: commands.Context, user: discord.Member) -> None: """Revoke the permissiont to stream from a user.""" # Check if user has the streaming permission to begin with allowed = any(Roles.video == role.id for role in user.roles) @@ -144,11 +131,6 @@ class Stream(commands.Cog): else: await ctx.send(f"{Emojis.cross_mark} This user already can't stream.") - def cog_unload(self) -> None: - """Cancel all scheduled tasks.""" - self.reload_task.cancel() - self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all()) - def setup(bot: Bot) -> None: """Loads the Stream cog.""" -- cgit v1.2.3 From e840ff6b55a4d0b4aeed2c9011915e9c44d3275f Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 00:05:20 +0100 Subject: Stream cog docstring updates --- bot/exts/moderation/stream.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index c598ec879..55763d6fd 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -32,11 +32,12 @@ class Stream(commands.Cog): self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all()) async def _remove_streaming_permission(self, member: discord.Member) -> None: - """Remove streaming permission from Member.""" + """Remove the streaming permission from the given Member.""" await self.task_cache.delete(member.id) await member.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked") async def _reload_tasks_from_redis(self) -> None: + """Reload outstanding tasks from redis on startup, delete the task if the member has since left the server.""" await self.bot.wait_until_guild_available() items = await self.task_cache.items() for key, value in items: @@ -101,7 +102,7 @@ class Stream(commands.Cog): @commands.command(aliases=("pstream",)) @commands.has_any_role(*STAFF_ROLES) async def permanentstream(self, ctx: commands.Context, user: discord.Member) -> None: - """Permanently grant a user the permission to stream.""" + """Permanently grants the given user the permission to stream.""" # Check if user already has streaming permission already_allowed = any(Roles.video == role.id for role in user.roles) if already_allowed: @@ -119,7 +120,7 @@ class Stream(commands.Cog): @commands.command(aliases=("unstream", "rstream")) @commands.has_any_role(*STAFF_ROLES) async def revokestream(self, ctx: commands.Context, user: discord.Member) -> None: - """Revoke the permissiont to stream from a user.""" + """Revoke the permission to stream from the given user.""" # Check if user has the streaming permission to begin with allowed = any(Roles.video == role.id for role in user.roles) if allowed: -- cgit v1.2.3 From da9ddfa56d8f89136d8e8ca1e61cc252e9ab0f21 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 00:17:58 +0100 Subject: Sprinkle some logging into the Stream cog --- bot/exts/moderation/stream.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 55763d6fd..87d3a345c 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -56,9 +56,10 @@ class Stream(commands.Cog): except discord.HTTPException as e: log.exception(f"Exception while trying to retrieve member {key} from discord\n{e}") continue - + revoke_time = datetime.datetime.utcfromtimestamp(value) + log.debug(f"Scheduling {member} ({member.id}) to have streaming permission revoked at {revoke_time}") self.scheduler.schedule_at( - datetime.datetime.utcfromtimestamp(value), + revoke_time, key, self._remove_streaming_permission(member) ) @@ -81,6 +82,7 @@ class Stream(commands.Cog): Alternatively, an ISO 8601 timestamp can be provided for the duration. """ + log.trace(f"Attempting to give temporary streaming permission to {user} ({user.id}).") # if duration is none then calculate default duration if duration is None: now = datetime.datetime.utcnow() @@ -90,19 +92,22 @@ class Stream(commands.Cog): already_allowed = any(Roles.video == role.id for role in user.roles) if already_allowed: await ctx.send(f"{Emojis.cross_mark} This user can already stream.") + log.debug(f"{user} ({user.id}) already has permission to stream.") return # Schedule task to remove streaming permission from Member and add it to task cache self.scheduler.schedule_at(duration, user.id, self._remove_streaming_permission(user)) await self.task_cache.set(user.id, duration.timestamp()) await user.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") - duration = format_infraction_with_duration(str(duration)) - await ctx.send(f"{Emojis.check_mark} {user.mention} can now stream until {duration}.") + revoke_time = format_infraction_with_duration(str(duration)) + await ctx.send(f"{Emojis.check_mark} {user.mention} can now stream until {revoke_time}.") + log.debug(f"Successfully given {user} ({user.id}) permission to stream until {revoke_time}.") @commands.command(aliases=("pstream",)) @commands.has_any_role(*STAFF_ROLES) async def permanentstream(self, ctx: commands.Context, user: discord.Member) -> None: """Permanently grants the given user the permission to stream.""" + log.trace(f"Attempting to give permenant streaming permission to {user} ({user.id}).") # Check if user already has streaming permission already_allowed = any(Roles.video == role.id for role in user.roles) if already_allowed: @@ -110,17 +115,21 @@ class Stream(commands.Cog): self.scheduler.cancel(user.id) await self.task_cache.delete(user.id) await ctx.send(f"{Emojis.check_mark} Changed temporary permission to permanent.") + log.debug(f"Successfully upgraded temporary streaming permission for {user} ({user.id}) to permanent.") return await ctx.send(f"{Emojis.cross_mark} This user can already stream.") + log.debug(f"{user} ({user.id}) already had permanent streaming permission.") return await user.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted") await ctx.send(f"{Emojis.check_mark} Permanently granted {user.mention} the permission to stream.") + log.debug(f"Successfully given {user} ({user.id}) permanent streaming permission.") @commands.command(aliases=("unstream", "rstream")) @commands.has_any_role(*STAFF_ROLES) async def revokestream(self, ctx: commands.Context, user: discord.Member) -> None: """Revoke the permission to stream from the given user.""" + log.trace(f"Attempting to remove streaming permission from {user} ({user.id}).") # Check if user has the streaming permission to begin with allowed = any(Roles.video == role.id for role in user.roles) if allowed: @@ -129,8 +138,10 @@ class Stream(commands.Cog): self.scheduler.cancel(user.id) await self._remove_streaming_permission(user) await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {user.mention}.") + log.debug(f"Successfully revoked streaming permission from {user} ({user.id}).") else: await ctx.send(f"{Emojis.cross_mark} This user already can't stream.") + log.debug(f"{user} ({user.id}) didn't have the streaming permission to remove!") def setup(bot: Bot) -> None: -- cgit v1.2.3 From 5da727234595fe7fb19fc04efa5ba1984328e06a Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Thu, 1 Apr 2021 08:37:22 +0000 Subject: Update policy documents --- CODE_OF_CONDUCT.md | 3 ++ CONTRIBUTING.md | 124 +---------------------------------------------------- SECURITY.md | 3 ++ 3 files changed, 8 insertions(+), 122 deletions(-) create mode 100644 CODE_OF_CONDUCT.md create mode 100644 SECURITY.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..57ccd80e7 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Code of Conduct + +The Python Discord Code of Conduct can be found [on our website](https://pydis.com/coc). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index addab32ff..f20b53162 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,123 +1,3 @@ -# Contributing to one of Our Projects +# Contributing Guidelines -Our projects are open-source and are automatically deployed whenever commits are pushed to the `main` branch on each repository, so we've created a set of guidelines in order to keep everything clean and in working order. - -Note that contributions may be rejected on the basis of a contributor failing to follow these guidelines. - -## Rules - -1. **No force-pushes** or modifying the Git history in any way. -2. If you have direct access to the repository, **create a branch for your changes** and create a pull request for that branch. If not, create a branch on a fork of the repository and create a pull request from there. - * It's common practice for a repository to reject direct pushes to `main`, so make branching a habit! - * If PRing from your own fork, **ensure that "Allow edits from maintainers" is checked**. This gives permission for maintainers to commit changes directly to your fork, speeding up the review process. -3. **Adhere to the prevailing code style**, which we enforce using [`flake8`](http://flake8.pycqa.org/en/latest/index.html) and [`pre-commit`](https://pre-commit.com/). - * Run `flake8` and `pre-commit` against your code [**before** you push it](https://soundcloud.com/lemonsaurusrex/lint-before-you-push). Your commit will be rejected by the build server if it fails to lint. - * [Git Hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) are a powerful git feature for executing custom scripts when certain important git actions occur. The pre-commit hook is the first hook executed during the commit process and can be used to check the code being committed & abort the commit if issues, such as linting failures, are detected. While git hooks can seem daunting to configure, the `pre-commit` framework abstracts this process away from you and is provided as a dev dependency for this project. Run `pipenv run precommit` when setting up the project and you'll never have to worry about committing code that fails linting. -4. **Make great commits**. A well structured git log is key to a project's maintainability; it efficiently provides insight into when and *why* things were done for future maintainers of the project. - * Commits should be as narrow in scope as possible. Commits that span hundreds of lines across multiple unrelated functions and/or files are very hard for maintainers to follow. After about a week they'll probably be hard for you to follow too. - * Avoid making minor commits for fixing typos or linting errors. Since you've already set up a `pre-commit` hook to run the linting pipeline before a commit, you shouldn't be committing linting issues anyway. - * A more in-depth guide to writing great commit messages can be found in Chris Beam's [*How to Write a Git Commit Message*](https://chris.beams.io/posts/git-commit/) -5. **Avoid frequent pushes to the main repository**. This goes for PRs opened against your fork as well. Our test build pipelines are triggered every time a push to the repository (or PR) is made. Try to batch your commits until you've finished working for that session, or you've reached a point where collaborators need your commits to continue their own work. This also provides you the opportunity to amend commits for minor changes rather than having to commit them on their own because you've already pushed. - * This includes merging main into your branch. Try to leave merging from main for after your PR passes review; a maintainer will bring your PR up to date before merging. Exceptions to this include: resolving merge conflicts, needing something that was pushed to main for your branch, or something was pushed to main that could potentionally affect the functionality of what you're writing. -6. **Don't fight the framework**. Every framework has its flaws, but the frameworks we've picked out have been carefully chosen for their particular merits. If you can avoid it, please resist reimplementing swathes of framework logic - the work has already been done for you! -7. If someone is working on an issue or pull request, **do not open your own pull request for the same task**. Instead, collaborate with the author(s) of the existing pull request. Duplicate PRs opened without communicating with the other author(s) and/or PyDis staff will be closed. Communication is key, and there's no point in two separate implementations of the same thing. - * One option is to fork the other contributor's repository and submit your changes to their branch with your own pull request. We suggest following these guidelines when interacting with their repository as well. - * The author(s) of inactive PRs and claimed issues will be be pinged after a week of inactivity for an update. Continued inactivity may result in the issue being released back to the community and/or PR closure. -8. **Work as a team** and collaborate wherever possible. Keep things friendly and help each other out - these are shared projects and nobody likes to have their feet trodden on. -9. All static content, such as images or audio, **must be licensed for open public use**. - * Static content must be hosted by a service designed to do so. Failing to do so is known as "leeching" and is frowned upon, as it generates extra bandwidth costs to the host without providing benefit. It would be best if appropriately licensed content is added to the repository itself so it can be served by PyDis' infrastructure. - -Above all, the needs of our community should come before the wants of an individual. Work together, build solutions to problems and try to do so in a way that people can learn from easily. Abuse of our trust may result in the loss of your Contributor role. - -## Changes to this Arrangement - -All projects evolve over time, and this contribution guide is no different. This document is open to pull requests or changes by contributors. If you believe you have something valuable to add or change, please don't hesitate to do so in a PR. - -## Supplemental Information -### Developer Environment -Instructions for setting the bot developer environment can be found on the [PyDis wiki](https://pythondiscord.com/pages/contributing/bot/) - -To provide a standalone development environment for this project, docker compose is utilized to pull the current version of the [site backend](https://github.com/python-discord/site). While appropriate for bot-only contributions, any contributions that necessitate backend changes will require the site repository to be appropriately configured as well. Instructions for setting up the site environment can be found on the [PyDis site](https://pythondiscord.com/pages/contributing/site/). - -When pulling down changes from GitHub, remember to sync your environment using `pipenv sync --dev` to ensure you're using the most up-to-date versions the project's dependencies. - -### Type Hinting -[PEP 484](https://www.python.org/dev/peps/pep-0484/) formally specifies type hints for Python functions, added to the Python Standard Library in version 3.5. Type hints are recognized by most modern code editing tools and provide useful insight into both the input and output types of a function, preventing the user from having to go through the codebase to determine these types. - -For example: - -```py -import typing as t - - -def foo(input_1: int, input_2: t.Dict[str, str]) -> bool: - ... -``` - -Tells us that `foo` accepts an `int` and a `dict`, with `str` keys and values, and returns a `bool`. - -All function declarations should be type hinted in code contributed to the PyDis organization. - -For more information, see *[PEP 483](https://www.python.org/dev/peps/pep-0483/) - The Theory of Type Hints* and Python's documentation for the [`typing`](https://docs.python.org/3/library/typing.html) module. - -### AutoDoc Formatting Directives -Many documentation packages provide support for automatic documentation generation from the codebase's docstrings. These tools utilize special formatting directives to enable richer formatting in the generated documentation. - -For example: - -```py -import typing as t - - -def foo(bar: int, baz: t.Optional[t.Dict[str, str]] = None) -> bool: - """ - Does some things with some stuff. - - :param bar: Some input - :param baz: Optional, some dictionary with string keys and values - - :return: Some boolean - """ - ... -``` - -Since PyDis does not utilize automatic documentation generation, use of this syntax should not be used in code contributed to the organization. Should the purpose and type of the input variables not be easily discernable from the variable name and type annotation, a prose explanation can be used. Explicit references to variables, functions, classes, etc. should be wrapped with backticks (`` ` ``). - -For example, the above docstring would become: - -```py -import typing as t - - -def foo(bar: int, baz: t.Optional[t.Dict[str, str]] = None) -> bool: - """ - Does some things with some stuff. - - This function takes an index, `bar` and checks for its presence in the database `baz`, passed as a dictionary. Returns `False` if `baz` is not passed. - """ - ... -``` - -### Logging Levels -The project currently defines [`logging`](https://docs.python.org/3/library/logging.html) levels as follows, from lowest to highest severity: -* **TRACE:** These events should be used to provide a *verbose* trace of every step of a complex process. This is essentially the `logging` equivalent of sprinkling `print` statements throughout the code. - * **Note:** This is a PyDis-implemented logging level. -* **DEBUG:** These events should add context to what's happening in a development setup to make it easier to follow what's going while working on a project. This is in the same vein as **TRACE** logging but at a much lower level of verbosity. -* **INFO:** These events are normal and don't need direct attention but are worth keeping track of in production, like checking which cogs were loaded during a start-up. -* **WARNING:** These events are out of the ordinary and should be fixed, but have not caused a failure. - * **NOTE:** Events at this logging level and higher should be reserved for events that require the attention of the DevOps team. -* **ERROR:** These events have caused a failure in a specific part of the application and require urgent attention. -* **CRITICAL:** These events have caused the whole application to fail and require immediate intervention. - -Ensure that log messages are succinct. Should you want to pass additional useful information that would otherwise make the log message overly verbose the `logging` module accepts an `extra` kwarg, which can be used to pass a dictionary. This is used to populate the `__dict__` of the `LogRecord` created for the logging event with user-defined attributes that can be accessed by a log handler. Additional information and caveats may be found [in Python's `logging` documentation](https://docs.python.org/3/library/logging.html#logging.Logger.debug). - -### Work in Progress (WIP) PRs -Github [provides a PR feature](https://github.blog/2019-02-14-introducing-draft-pull-requests/) that allows the PR author to mark it as a WIP. This provides both a visual and functional indicator that the contents of the PR are in a draft state and not yet ready for formal review. - -This feature should be utilized in place of the traditional method of prepending `[WIP]` to the PR title. - -As stated earlier, **ensure that "Allow edits from maintainers" is checked**. This gives permission for maintainers to commit changes directly to your fork, speeding up the review process. - -## Footnotes - -This document was inspired by the [Glowstone contribution guidelines](https://github.com/GlowstoneMC/Glowstone/blob/dev/docs/CONTRIBUTING.md). +The Contributing Guidelines for Python Discord projects can be found [on our website](https://pydis.com/contributing.md). diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..fa5a88a39 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,3 @@ +# Security Notice + +The Security Notice for Python Discord projects can be found [on our website](https://pydis.com/security.md). -- cgit v1.2.3 From 15523ae20ded473c56ca4cad18965b45833e5604 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 16:46:58 +0100 Subject: Describe redis cache var better in stream cog --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 87d3a345c..f2e34e910 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -18,7 +18,7 @@ class Stream(commands.Cog): """Grant and revoke streaming permissions from users.""" # Stores tasks to remove streaming permission - # User id : timestamp relation + # RedisCache[discord.Member.id, UtcPosixTimestamp] task_cache = RedisCache() def __init__(self, bot: Bot): -- cgit v1.2.3 From 4299b473f1b086db3443daf2ce1460d7f4429c9b Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 16:47:35 +0100 Subject: Rename function in stream cog for consistancy --- bot/exts/moderation/stream.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index f2e34e910..587dc6365 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -31,7 +31,7 @@ class Stream(commands.Cog): self.reload_task.cancel() self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all()) - async def _remove_streaming_permission(self, member: discord.Member) -> None: + async def _revoke_streaming_permission(self, member: discord.Member) -> None: """Remove the streaming permission from the given Member.""" await self.task_cache.delete(member.id) await member.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked") @@ -61,7 +61,7 @@ class Stream(commands.Cog): self.scheduler.schedule_at( revoke_time, key, - self._remove_streaming_permission(member) + self._revoke_streaming_permission(member) ) @commands.command(aliases=("streaming",)) @@ -96,7 +96,7 @@ class Stream(commands.Cog): return # Schedule task to remove streaming permission from Member and add it to task cache - self.scheduler.schedule_at(duration, user.id, self._remove_streaming_permission(user)) + self.scheduler.schedule_at(duration, user.id, self._revoke_streaming_permission(user)) await self.task_cache.set(user.id, duration.timestamp()) await user.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") revoke_time = format_infraction_with_duration(str(duration)) @@ -136,7 +136,7 @@ class Stream(commands.Cog): # Cancel scheduled task to take away streaming permission to avoid errors if user.id in self.scheduler: self.scheduler.cancel(user.id) - await self._remove_streaming_permission(user) + await self._revoke_streaming_permission(user) await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {user.mention}.") log.debug(f"Successfully revoked streaming permission from {user} ({user.id}).") else: -- cgit v1.2.3 From c38886d1bb8d0394166699d6356a5361646dd15f Mon Sep 17 00:00:00 2001 From: ChrisJL Date: Thu, 1 Apr 2021 16:48:15 +0100 Subject: Fix spelling errors in logging statements - Stream cog Co-authored-by: Shivansh-007 --- bot/exts/moderation/stream.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 87d3a345c..f260b9421 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -48,7 +48,7 @@ class Stream(commands.Cog): member = await self.bot.get_guild(Guild.id).fetch_member(key) except discord.errors.NotFound: log.debug( - f"Member {key} left the guild before we could scheudle " + f"Member {key} left the guild before we could schedule " "the revoking of their streaming permissions." ) await self.task_cache.delete(key) @@ -107,7 +107,7 @@ class Stream(commands.Cog): @commands.has_any_role(*STAFF_ROLES) async def permanentstream(self, ctx: commands.Context, user: discord.Member) -> None: """Permanently grants the given user the permission to stream.""" - log.trace(f"Attempting to give permenant streaming permission to {user} ({user.id}).") + log.trace(f"Attempting to give permanent streaming permission to {user} ({user.id}).") # Check if user already has streaming permission already_allowed = any(Roles.video == role.id for role in user.roles) if already_allowed: -- cgit v1.2.3 From 866337ba4f2355e731a0c52df8d1e62275bafff0 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 16:50:38 +0100 Subject: Change references to user's in Stream cog to members --- bot/exts/moderation/stream.py | 82 ++++++++++++++++++++++--------------------- 1 file changed, 42 insertions(+), 40 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 6df9b46d2..61dadc358 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -15,7 +15,7 @@ log = logging.getLogger(__name__) class Stream(commands.Cog): - """Grant and revoke streaming permissions from users.""" + """Grant and revoke streaming permissions from members.""" # Stores tasks to remove streaming permission # RedisCache[discord.Member.id, UtcPosixTimestamp] @@ -66,9 +66,9 @@ class Stream(commands.Cog): @commands.command(aliases=("streaming",)) @commands.has_any_role(*STAFF_ROLES) - async def stream(self, ctx: commands.Context, user: discord.Member, duration: Expiry = None) -> None: + async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None: """ - Temporarily grant streaming permissions to a user for a given duration. + Temporarily grant streaming permissions to a member for a given duration. A unit of time should be appended to the duration. Units (∗case-sensitive): @@ -82,66 +82,68 @@ class Stream(commands.Cog): Alternatively, an ISO 8601 timestamp can be provided for the duration. """ - log.trace(f"Attempting to give temporary streaming permission to {user} ({user.id}).") - # if duration is none then calculate default duration + log.trace(f"Attempting to give temporary streaming permission to {member} ({member.id}).") + # If duration is none then calculate default duration if duration is None: now = datetime.datetime.utcnow() duration = now + datetime.timedelta(minutes=VideoPermission.default_permission_duration) - # Check if user already has streaming permission - already_allowed = any(Roles.video == role.id for role in user.roles) + # Check if the member already has streaming permission + already_allowed = any(Roles.video == role.id for role in member.roles) if already_allowed: - await ctx.send(f"{Emojis.cross_mark} This user can already stream.") - log.debug(f"{user} ({user.id}) already has permission to stream.") + await ctx.send(f"{Emojis.cross_mark} This member can already stream.") + log.debug(f"{member} ({member.id}) already has permission to stream.") return # Schedule task to remove streaming permission from Member and add it to task cache - self.scheduler.schedule_at(duration, user.id, self._revoke_streaming_permission(user)) - await self.task_cache.set(user.id, duration.timestamp()) - await user.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") + self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member)) + await self.task_cache.set(member.id, duration.timestamp()) + await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") revoke_time = format_infraction_with_duration(str(duration)) - await ctx.send(f"{Emojis.check_mark} {user.mention} can now stream until {revoke_time}.") - log.debug(f"Successfully given {user} ({user.id}) permission to stream until {revoke_time}.") + await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {revoke_time}.") + log.debug(f"Successfully given {member} ({member.id}) permission to stream until {revoke_time}.") @commands.command(aliases=("pstream",)) @commands.has_any_role(*STAFF_ROLES) - async def permanentstream(self, ctx: commands.Context, user: discord.Member) -> None: - """Permanently grants the given user the permission to stream.""" - log.trace(f"Attempting to give permanent streaming permission to {user} ({user.id}).") - # Check if user already has streaming permission - already_allowed = any(Roles.video == role.id for role in user.roles) + async def permanentstream(self, ctx: commands.Context, member: discord.Member) -> None: + """Permanently grants the given member the permission to stream.""" + log.trace(f"Attempting to give permanent streaming permission to {member} ({member.id}).") + # Check if the member already has streaming permission + already_allowed = any(Roles.video == role.id for role in member.roles) if already_allowed: - if user.id in self.scheduler: - self.scheduler.cancel(user.id) - await self.task_cache.delete(user.id) + if member.id in self.scheduler: + self.scheduler.cancel(member.id) + await self.task_cache.delete(member.id) await ctx.send(f"{Emojis.check_mark} Changed temporary permission to permanent.") - log.debug(f"Successfully upgraded temporary streaming permission for {user} ({user.id}) to permanent.") + log.debug( + f"Successfully upgraded temporary streaming permission for {member} ({member.id}) to permanent." + ) return - await ctx.send(f"{Emojis.cross_mark} This user can already stream.") - log.debug(f"{user} ({user.id}) already had permanent streaming permission.") + await ctx.send(f"{Emojis.cross_mark} This member can already stream.") + log.debug(f"{member} ({member.id}) already had permanent streaming permission.") return - await user.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted") - await ctx.send(f"{Emojis.check_mark} Permanently granted {user.mention} the permission to stream.") - log.debug(f"Successfully given {user} ({user.id}) permanent streaming permission.") + await member.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted") + await ctx.send(f"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.") + log.debug(f"Successfully given {member} ({member.id}) permanent streaming permission.") @commands.command(aliases=("unstream", "rstream")) @commands.has_any_role(*STAFF_ROLES) - async def revokestream(self, ctx: commands.Context, user: discord.Member) -> None: - """Revoke the permission to stream from the given user.""" - log.trace(f"Attempting to remove streaming permission from {user} ({user.id}).") - # Check if user has the streaming permission to begin with - allowed = any(Roles.video == role.id for role in user.roles) + async def revokestream(self, ctx: commands.Context, member: discord.Member) -> None: + """Revoke the permission to stream from the given member.""" + log.trace(f"Attempting to remove streaming permission from {member} ({member.id}).") + # Check if the memeber has the streaming permission to begin with + allowed = any(Roles.video == role.id for role in member.roles) if allowed: # Cancel scheduled task to take away streaming permission to avoid errors - if user.id in self.scheduler: - self.scheduler.cancel(user.id) - await self._revoke_streaming_permission(user) - await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {user.mention}.") - log.debug(f"Successfully revoked streaming permission from {user} ({user.id}).") + if member.id in self.scheduler: + self.scheduler.cancel(member.id) + await self._revoke_streaming_permission(member) + await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {member.mention}.") + log.debug(f"Successfully revoked streaming permission from {member} ({member.id}).") else: - await ctx.send(f"{Emojis.cross_mark} This user already can't stream.") - log.debug(f"{user} ({user.id}) didn't have the streaming permission to remove!") + await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!") + log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!") def setup(bot: Bot) -> None: -- cgit v1.2.3 From 9f8ef6cae6eb2137fb0e2a8efdb29c22759f9783 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 16:51:44 +0100 Subject: Remove unnecessary wrapper variable --- bot/exts/moderation/stream.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 61dadc358..008ee8afe 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -85,8 +85,9 @@ class Stream(commands.Cog): log.trace(f"Attempting to give temporary streaming permission to {member} ({member.id}).") # If duration is none then calculate default duration if duration is None: - now = datetime.datetime.utcnow() - duration = now + datetime.timedelta(minutes=VideoPermission.default_permission_duration) + duration = datetime.datetime.utcnow() + datetime.timedelta( + minutes=VideoPermission.default_permission_duration + ) # Check if the member already has streaming permission already_allowed = any(Roles.video == role.id for role in member.roles) -- cgit v1.2.3 From de76dbcb40674573913a6a9b077f5dbffaaff9f0 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 17:05:04 +0100 Subject: Convert to arrow for datetime management --- bot/exts/moderation/stream.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 008ee8afe..4c86219f5 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -1,7 +1,9 @@ -import datetime import logging +from datetime import timedelta +import arrow import discord +from arrow import Arrow from async_rediscache import RedisCache from discord.ext import commands @@ -56,7 +58,7 @@ class Stream(commands.Cog): except discord.HTTPException as e: log.exception(f"Exception while trying to retrieve member {key} from discord\n{e}") continue - revoke_time = datetime.datetime.utcfromtimestamp(value) + revoke_time = Arrow.utcfromtimestamp(value) log.debug(f"Scheduling {member} ({member.id}) to have streaming permission revoked at {revoke_time}") self.scheduler.schedule_at( revoke_time, @@ -85,7 +87,7 @@ class Stream(commands.Cog): log.trace(f"Attempting to give temporary streaming permission to {member} ({member.id}).") # If duration is none then calculate default duration if duration is None: - duration = datetime.datetime.utcnow() + datetime.timedelta( + duration = arrow.utcnow().naive + timedelta( minutes=VideoPermission.default_permission_duration ) @@ -98,7 +100,7 @@ class Stream(commands.Cog): # Schedule task to remove streaming permission from Member and add it to task cache self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member)) - await self.task_cache.set(member.id, duration.timestamp()) + await self.task_cache.set(member.id, Arrow.fromdatetime(duration).timestamp()) await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") revoke_time = format_infraction_with_duration(str(duration)) await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {revoke_time}.") -- cgit v1.2.3 From 3cc04418cee734e3a58dc8d8d453e227085117aa Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 18:50:49 +0100 Subject: Only convert to naive when sending to scheduler, general refactor&comments --- bot/exts/moderation/stream.py | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 4c86219f5..304b92293 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -87,7 +87,7 @@ class Stream(commands.Cog): log.trace(f"Attempting to give temporary streaming permission to {member} ({member.id}).") # If duration is none then calculate default duration if duration is None: - duration = arrow.utcnow().naive + timedelta( + duration = arrow.utcnow() + timedelta( minutes=VideoPermission.default_permission_duration ) @@ -99,9 +99,12 @@ class Stream(commands.Cog): return # Schedule task to remove streaming permission from Member and add it to task cache - self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member)) + self.scheduler.schedule_at(duration.naive, member.id, self._revoke_streaming_permission(member)) await self.task_cache.set(member.id, Arrow.fromdatetime(duration).timestamp()) + await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") + + # Convert here for nicer logging and output revoke_time = format_infraction_with_duration(str(duration)) await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {revoke_time}.") log.debug(f"Successfully given {member} ({member.id}) permission to stream until {revoke_time}.") @@ -111,17 +114,20 @@ class Stream(commands.Cog): async def permanentstream(self, ctx: commands.Context, member: discord.Member) -> None: """Permanently grants the given member the permission to stream.""" log.trace(f"Attempting to give permanent streaming permission to {member} ({member.id}).") + # Check if the member already has streaming permission - already_allowed = any(Roles.video == role.id for role in member.roles) - if already_allowed: + if any(Roles.video == role.id for role in member.roles): if member.id in self.scheduler: + # Member has temp permission, so cancel the task to revoke later and delete from cache self.scheduler.cancel(member.id) await self.task_cache.delete(member.id) + await ctx.send(f"{Emojis.check_mark} Changed temporary permission to permanent.") log.debug( f"Successfully upgraded temporary streaming permission for {member} ({member.id}) to permanent." ) return + await ctx.send(f"{Emojis.cross_mark} This member can already stream.") log.debug(f"{member} ({member.id}) already had permanent streaming permission.") return @@ -135,18 +141,21 @@ class Stream(commands.Cog): async def revokestream(self, ctx: commands.Context, member: discord.Member) -> None: """Revoke the permission to stream from the given member.""" log.trace(f"Attempting to remove streaming permission from {member} ({member.id}).") - # Check if the memeber has the streaming permission to begin with - allowed = any(Roles.video == role.id for role in member.roles) - if allowed: - # Cancel scheduled task to take away streaming permission to avoid errors + + # Check if the member already has streaming permission + if any(Roles.video == role.id for role in member.roles): if member.id in self.scheduler: + # Member has temp permission, so cancel the task to revoke later and delete from cache self.scheduler.cancel(member.id) + await self.task_cache.delete(member.id) await self._revoke_streaming_permission(member) + await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {member.mention}.") log.debug(f"Successfully revoked streaming permission from {member} ({member.id}).") - else: - await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!") - log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!") + return + + await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!") + log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!") def setup(bot: Bot) -> None: -- cgit v1.2.3 From 509462b25fcc07430d33e0ddebd7be4ccd2600b5 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 18:52:00 +0100 Subject: Remove redundant traceback log in log.exception --- bot/exts/moderation/stream.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 304b92293..4a624ba2c 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -55,8 +55,8 @@ class Stream(commands.Cog): ) await self.task_cache.delete(key) continue - except discord.HTTPException as e: - log.exception(f"Exception while trying to retrieve member {key} from discord\n{e}") + except discord.HTTPException: + log.exception(f"Exception while trying to retrieve member {key} from Discord.") continue revoke_time = Arrow.utcfromtimestamp(value) log.debug(f"Scheduling {member} ({member.id}) to have streaming permission revoked at {revoke_time}") -- cgit v1.2.3 From e2f80e6914adefee712992fa56540872aef45468 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 1 Apr 2021 20:04:12 +0200 Subject: Add missing 'attempts' Co-authored-by: Kieran Siek --- bot/converters.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/converters.py b/bot/converters.py index 6ea2d887b..3bf05cfb3 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -192,7 +192,9 @@ class Inventory(Converter): """Convert url to Intersphinx inventory URL.""" await ctx.trigger_typing() if (inventory := await _inventory_parser.fetch_inventory(url)) is None: - raise BadArgument(f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS}.") + raise BadArgument( + f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS} attempts." + ) return url, inventory -- cgit v1.2.3 From ac660e67eab0407b65b6d8cbe0a69181901f06bb Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 19:16:46 +0100 Subject: Update schedule_at() to work with tz aware date times --- bot/utils/scheduling.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 4dd036e4f..b3f62257a 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -59,14 +59,18 @@ class Scheduler: def schedule_at(self, time: datetime, task_id: t.Hashable, coroutine: t.Coroutine) -> None: """ - Schedule `coroutine` to be executed at the given naïve UTC `time`. + Schedule `coroutine` to be executed at the given `time`. + + If `time` is timezone aware, then use that timezone to calculate now() when subtracting. + If `time` is naïve, then we use UTC. If `time` is in the past, schedule `coroutine` immediately. If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere. """ - delay = (time - datetime.utcnow()).total_seconds() + now_datetime = datetime.now(time.tzinfo) if time.tzinfo else datetime.utcnow() + delay = (time - now_datetime).total_seconds() if delay > 0: coroutine = self._await_later(delay, task_id, coroutine) -- cgit v1.2.3 From e07ebebfcbbbe4e530b9bf106e2aa5d2e7ad9838 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 19:18:51 +0100 Subject: Use tz aware timestamps and refactor for readibility - Stream cog --- bot/exts/moderation/stream.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 4a624ba2c..50ed01e78 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -87,9 +87,7 @@ class Stream(commands.Cog): log.trace(f"Attempting to give temporary streaming permission to {member} ({member.id}).") # If duration is none then calculate default duration if duration is None: - duration = arrow.utcnow() + timedelta( - minutes=VideoPermission.default_permission_duration - ) + duration = arrow.utcnow() + timedelta(minutes=VideoPermission.default_permission_duration) # Check if the member already has streaming permission already_allowed = any(Roles.video == role.id for role in member.roles) @@ -99,7 +97,7 @@ class Stream(commands.Cog): return # Schedule task to remove streaming permission from Member and add it to task cache - self.scheduler.schedule_at(duration.naive, member.id, self._revoke_streaming_permission(member)) + self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member)) await self.task_cache.set(member.id, Arrow.fromdatetime(duration).timestamp()) await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") @@ -134,7 +132,7 @@ class Stream(commands.Cog): await member.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted") await ctx.send(f"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.") - log.debug(f"Successfully given {member} ({member.id}) permanent streaming permission.") + log.debug(f"Successfully gave {member} ({member.id}) permanent streaming permission.") @commands.command(aliases=("unstream", "rstream")) @commands.has_any_role(*STAFF_ROLES) -- cgit v1.2.3 From 4962b10b20f90a849af34c4efea3512909616955 Mon Sep 17 00:00:00 2001 From: ChrisJL Date: Thu, 1 Apr 2021 20:44:03 +0100 Subject: Reword logging and docstrings to different mood Co-authored-by: Mark --- bot/exts/moderation/stream.py | 2 +- bot/utils/scheduling.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 50ed01e78..e5b2f2cc7 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -105,7 +105,7 @@ class Stream(commands.Cog): # Convert here for nicer logging and output revoke_time = format_infraction_with_duration(str(duration)) await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {revoke_time}.") - log.debug(f"Successfully given {member} ({member.id}) permission to stream until {revoke_time}.") + log.debug(f"Successfully gave {member} ({member.id}) permission to stream until {revoke_time}.") @commands.command(aliases=("pstream",)) @commands.has_any_role(*STAFF_ROLES) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index b3f62257a..6843bae88 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -62,7 +62,7 @@ class Scheduler: Schedule `coroutine` to be executed at the given `time`. If `time` is timezone aware, then use that timezone to calculate now() when subtracting. - If `time` is naïve, then we use UTC. + If `time` is naïve, then use UTC. If `time` is in the past, schedule `coroutine` immediately. -- cgit v1.2.3 From adf2801e1d0f1e2d4608d65ae37c072629ad32dc Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 21:20:04 +0100 Subject: Ensure duration is always tz-aware --- bot/exts/moderation/stream.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 50ed01e78..a54e95fd9 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -1,5 +1,5 @@ import logging -from datetime import timedelta +from datetime import timedelta, timezone import arrow import discord @@ -85,9 +85,14 @@ class Stream(commands.Cog): Alternatively, an ISO 8601 timestamp can be provided for the duration. """ log.trace(f"Attempting to give temporary streaming permission to {member} ({member.id}).") - # If duration is none then calculate default duration + if duration is None: + # If duration is None then calculate default duration duration = arrow.utcnow() + timedelta(minutes=VideoPermission.default_permission_duration) + elif duration.tzinfo is None: + # Make duration tz-aware. + # ISODateTime could already include tzinfo, this check is so it isn't overwritten. + duration.replace(tzinfo=timezone.utc) # Check if the member already has streaming permission already_allowed = any(Roles.video == role.id for role in member.roles) -- cgit v1.2.3 From b21618b544e60e775e7365075e35acbd772795b7 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 1 Apr 2021 21:27:17 +0100 Subject: Mention user when upgrading streaming permissions to permanent --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index a54e95fd9..eab47465f 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -125,7 +125,7 @@ class Stream(commands.Cog): self.scheduler.cancel(member.id) await self.task_cache.delete(member.id) - await ctx.send(f"{Emojis.check_mark} Changed temporary permission to permanent.") + await ctx.send(f"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.") log.debug( f"Successfully upgraded temporary streaming permission for {member} ({member.id}) to permanent." ) -- cgit v1.2.3 From 8f97c42f1248a0b678763d38cf2608fff6c87de2 Mon Sep 17 00:00:00 2001 From: Chris Date: Sat, 3 Apr 2021 21:46:07 +0100 Subject: Remove redundant conversion to Arrow datetime in Stream cog --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 2d1f12469..dab565e57 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -103,7 +103,7 @@ class Stream(commands.Cog): # Schedule task to remove streaming permission from Member and add it to task cache self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member)) - await self.task_cache.set(member.id, Arrow.fromdatetime(duration).timestamp()) + await self.task_cache.set(member.id, duration.timestamp()) await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") -- cgit v1.2.3 From 7175b9d587eed22d65ca74fc3f455303daabbbd1 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Sun, 4 Apr 2021 08:24:45 +0300 Subject: Blacklist staff_info for duckpond --- config-default.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/config-default.yml b/config-default.yml index 6dec75fdc..c5e830ec4 100644 --- a/config-default.yml +++ b/config-default.yml @@ -200,6 +200,7 @@ guild: nomination_voting: 822853512709931008 organisation: &ORGANISATION 551789653284356126 staff_lounge: &STAFF_LOUNGE 464905259261755392 + staff_info: &STAFF_INFO 396684402404622347 # Staff announcement channels admin_announcements: &ADMIN_ANNOUNCEMENTS 749736155569848370 @@ -520,6 +521,7 @@ duck_pond: - *STAFF_ANNOUNCEMENTS - *MOD_ANNOUNCEMENTS - *ADMIN_ANNOUNCEMENTS + - *STAFF_INFO python_news: -- cgit v1.2.3 From 6a7325c5f7fe32787fe5cb94ef4a3c35a0474d60 Mon Sep 17 00:00:00 2001 From: Ben Soyka Date: Sun, 4 Apr 2021 12:47:07 -0600 Subject: Ignore colourless roles in !user embed --- bot/exts/info/information.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index 0555544ce..a8844fe29 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -284,7 +284,9 @@ class Information(Cog): embed.add_field(name=field_name, value=field_content, inline=False) embed.set_thumbnail(url=user.avatar_url_as(static_format="png")) - embed.colour = user.top_role.colour if roles else Colour.blurple() + + role_colours = [role.colour for role in user.roles[1:] if role.colour != Colour.default()] + embed.colour = role_colours[-1] if role_colours else Colour.blurple() return embed -- cgit v1.2.3 From 508e400cca1d73a8693cd348886c1c7a8aac7725 Mon Sep 17 00:00:00 2001 From: Ben Soyka Date: Sun, 4 Apr 2021 21:03:22 -0600 Subject: Use Member.colour rather than a list comprehension for !user --- bot/exts/info/information.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index a8844fe29..5e2c4b417 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -284,9 +284,7 @@ class Information(Cog): embed.add_field(name=field_name, value=field_content, inline=False) embed.set_thumbnail(url=user.avatar_url_as(static_format="png")) - - role_colours = [role.colour for role in user.roles[1:] if role.colour != Colour.default()] - embed.colour = role_colours[-1] if role_colours else Colour.blurple() + embed.colour = user.colour if user.colour != Colour.default() else Colour.blurple() return embed -- cgit v1.2.3 From 61ab2cd4d434def0743e767acdb3f816c20e4dce Mon Sep 17 00:00:00 2001 From: Ben Soyka Date: Sun, 4 Apr 2021 22:35:53 -0600 Subject: Update information tests for new embed color logic --- tests/bot/exts/info/test_information.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py index 80731c9f0..b1b7d37ae 100644 --- a/tests/bot/exts/info/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -283,6 +283,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): user = helpers.MockMember() user.nick = None user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") + user.colour = 0 embed = await self.cog.create_user_embed(ctx, user) @@ -298,6 +299,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): user = helpers.MockMember() user.nick = "Cat lover" user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") + user.colour = 0 embed = await self.cog.create_user_embed(ctx, user) @@ -314,7 +316,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): admins_role.colour = 100 # A `MockMember` has the @Everyone role by default; we add the Admins to that. - user = helpers.MockMember(roles=[admins_role], top_role=admins_role) + user = helpers.MockMember(roles=[admins_role], top_role=admins_role, colour=100) embed = await self.cog.create_user_embed(ctx, user) @@ -337,7 +339,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): infraction_counts.return_value = ("Infractions", "expanded infractions info") nomination_counts.return_value = ("Nominations", "nomination info") - user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role) + user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role, colour=100) embed = await self.cog.create_user_embed(ctx, user) infraction_counts.assert_called_once_with(user) @@ -371,7 +373,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): infraction_counts.return_value = ("Infractions", "basic infractions info") - user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role) + user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role, colour=100) embed = await self.cog.create_user_embed(ctx, user) infraction_counts.assert_called_once_with(user) @@ -409,7 +411,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): moderators_role = helpers.MockRole(name='Moderators') moderators_role.colour = 100 - user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role) + user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role, colour=100) embed = await self.cog.create_user_embed(ctx, user) self.assertEqual(embed.colour, discord.Colour(moderators_role.colour)) @@ -422,7 +424,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed should be created with a blurple colour if the user has no assigned roles.""" ctx = helpers.MockContext() - user = helpers.MockMember(id=217) + user = helpers.MockMember(id=217, colour=discord.Colour.blurple()) embed = await self.cog.create_user_embed(ctx, user) self.assertEqual(embed.colour, discord.Colour.blurple()) @@ -435,7 +437,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed thumbnail should be set to the user's avatar in `png` format.""" ctx = helpers.MockContext() - user = helpers.MockMember(id=217) + user = helpers.MockMember(id=217, colour=0) user.avatar_url_as.return_value = "avatar url" embed = await self.cog.create_user_embed(ctx, user) -- cgit v1.2.3 From 56c675ad932dde14c970d48d864e3b80013f1399 Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Mon, 5 Apr 2021 14:24:30 +0100 Subject: chore: don't create an instance of intents before calling its classmethod --- bot/bot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/bot.py b/bot/bot.py index 3a2af472d..914da9c98 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -111,7 +111,7 @@ class Bot(commands.Bot): loop = asyncio.get_event_loop() allowed_roles = [discord.Object(id_) for id_ in constants.MODERATION_ROLES] - intents = discord.Intents().all() + intents = discord.Intents.all() intents.presences = False intents.dm_typing = False intents.dm_reactions = False -- cgit v1.2.3 From c68dba3b9d570292f5f47e066eb9866fae1001f2 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Mon, 5 Apr 2021 17:52:54 +0300 Subject: Migrate GHCR_TOKEN -> GITHUB_TOKEN in Docker build action GITHUB_TOKEN has now enough permissions to be used for pushing to GHCR, so we don't need PAT anymore. --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e6826e09b..84a671917 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -39,7 +39,7 @@ jobs: with: registry: ghcr.io username: ${{ github.repository_owner }} - password: ${{ secrets.GHCR_TOKEN }} + password: ${{ secrets.GITHUB_TOKEN }} # Build and push the container to the GitHub Container # Repository. The container will be tagged as "latest" -- cgit v1.2.3 From 835a0a6f4a45018d21dacbfbf69afe07361155aa Mon Sep 17 00:00:00 2001 From: Ben Soyka Date: Mon, 5 Apr 2021 08:56:22 -0600 Subject: Minor test changes for the !user embed --- tests/bot/exts/info/test_information.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py index b1b7d37ae..a996ce477 100644 --- a/tests/bot/exts/info/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -313,10 +313,9 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """Created `!user` embeds should not contain mention of the @everyone-role.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) admins_role = helpers.MockRole(name='Admins') - admins_role.colour = 100 # A `MockMember` has the @Everyone role by default; we add the Admins to that. - user = helpers.MockMember(roles=[admins_role], top_role=admins_role, colour=100) + user = helpers.MockMember(roles=[admins_role], colour=100) embed = await self.cog.create_user_embed(ctx, user) @@ -334,12 +333,11 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=50)) moderators_role = helpers.MockRole(name='Moderators') - moderators_role.colour = 100 infraction_counts.return_value = ("Infractions", "expanded infractions info") nomination_counts.return_value = ("Nominations", "nomination info") - user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role, colour=100) + user = helpers.MockMember(id=314, roles=[moderators_role], colour=100) embed = await self.cog.create_user_embed(ctx, user) infraction_counts.assert_called_once_with(user) @@ -369,11 +367,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=100)) moderators_role = helpers.MockRole(name='Moderators') - moderators_role.colour = 100 infraction_counts.return_value = ("Infractions", "basic infractions info") - user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role, colour=100) + user = helpers.MockMember(id=314, roles=[moderators_role], colour=100) embed = await self.cog.create_user_embed(ctx, user) infraction_counts.assert_called_once_with(user) @@ -409,12 +406,11 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): ctx = helpers.MockContext() moderators_role = helpers.MockRole(name='Moderators') - moderators_role.colour = 100 - user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role, colour=100) + user = helpers.MockMember(id=314, roles=[moderators_role], colour=100) embed = await self.cog.create_user_embed(ctx, user) - self.assertEqual(embed.colour, discord.Colour(moderators_role.colour)) + self.assertEqual(embed.colour, discord.Colour(100)) @unittest.mock.patch( f"{COG_PATH}.basic_user_infraction_counts", @@ -424,7 +420,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed should be created with a blurple colour if the user has no assigned roles.""" ctx = helpers.MockContext() - user = helpers.MockMember(id=217, colour=discord.Colour.blurple()) + user = helpers.MockMember(id=217, colour=discord.Colour.default()) embed = await self.cog.create_user_embed(ctx, user) self.assertEqual(embed.colour, discord.Colour.blurple()) -- cgit v1.2.3 From 44b74d3fb43ce50bda5e99ddd57146aecf4a5f0b Mon Sep 17 00:00:00 2001 From: Chris Date: Mon, 5 Apr 2021 17:21:35 +0100 Subject: Output stream command confimation in embed for automatic tz conversion --- bot/exts/moderation/stream.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index dab565e57..04c15e542 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -8,7 +8,7 @@ from async_rediscache import RedisCache from discord.ext import commands from bot.bot import Bot -from bot.constants import Emojis, Guild, Roles, STAFF_ROLES, VideoPermission +from bot.constants import Colours, Emojis, Guild, Roles, STAFF_ROLES, VideoPermission from bot.converters import Expiry from bot.utils.scheduling import Scheduler from bot.utils.time import format_infraction_with_duration @@ -97,7 +97,7 @@ class Stream(commands.Cog): # Check if the member already has streaming permission already_allowed = any(Roles.video == role.id for role in member.roles) if already_allowed: - await ctx.send(f"{Emojis.cross_mark} This member can already stream.") + await ctx.send(f"{Emojis.cross_mark} {member.mention} can already stream.") log.debug(f"{member} ({member.id}) already has permission to stream.") return @@ -107,9 +107,19 @@ class Stream(commands.Cog): await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") - # Convert here for nicer logging and output + # Use embed as embed timestamps do timezone conversions. + embed = discord.Embed( + description=f"{Emojis.check_mark} {member.mention} can now stream.", + colour=Colours.soft_green + ) + embed.set_footer(text=f"Streaming permission has been given to {member} until") + embed.timestamp = duration + + # Mention in content as mentions in embeds don't ping + await ctx.send(content=member.mention, embed=embed) + + # Convert here for nicer logging revoke_time = format_infraction_with_duration(str(duration)) - await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {revoke_time}.") log.debug(f"Successfully gave {member} ({member.id}) permission to stream until {revoke_time}.") @commands.command(aliases=("pstream",)) -- cgit v1.2.3 From 60a73ae2cc033babbef2589bfd729b75f8d8ad12 Mon Sep 17 00:00:00 2001 From: Chris Date: Mon, 5 Apr 2021 18:03:55 +0100 Subject: Convert back to datetime as Embed.timestamp doesn't support Arrow --- bot/exts/moderation/stream.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 04c15e542..1bdcdc7d8 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -87,8 +87,9 @@ class Stream(commands.Cog): log.trace(f"Attempting to give temporary streaming permission to {member} ({member.id}).") if duration is None: - # If duration is None then calculate default duration + # Use default duration and convert back to datetime as Embed.timestamp doesn't support Arrow duration = arrow.utcnow() + timedelta(minutes=VideoPermission.default_permission_duration) + duration = duration.datetime elif duration.tzinfo is None: # Make duration tz-aware. # ISODateTime could already include tzinfo, this check is so it isn't overwritten. -- cgit v1.2.3 From 2aa7e740d6a66e477895460ab51ac02c9b625ee1 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 6 Apr 2021 11:25:13 +0200 Subject: Add a !tp get_review command to get the nomination text without posting it This can be used when an information should be added to the post, or someone wants to review the user. --- bot/exts/recruitment/talentpool/_cog.py | 13 +++++++++++ bot/exts/recruitment/talentpool/_review.py | 37 +++++++++++++++++++----------- 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index fbe79382d..eeba1b187 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -1,8 +1,10 @@ import logging import textwrap from collections import ChainMap +from io import StringIO from typing import Union +import discord from discord import Color, Embed, Member, User from discord.ext.commands import Cog, Context, group, has_any_role @@ -332,6 +334,17 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): return await ctx.send(f"✅ The user with ID `{user_id}` was marked as reviewed.") + @nomination_group.command(aliases=('gr',)) + @has_any_role(*MODERATION_ROLES) + async def get_review(self, ctx: Context, user_id: int) -> None: + """Get the user's review as a markdown file.""" + review = StringIO((await self.reviewer.make_review(user_id))[0]) + if review: + file = discord.File(review, f"{user_id}_review.md") + await ctx.send(file=file) + else: + await ctx.send(f"There doesn't appear to be an active nomination for {user_id}") + @nomination_group.command(aliases=('review',)) @has_any_role(*MODERATION_ROLES) async def post_review(self, ctx: Context, user_id: int) -> None: diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index fb3461238..c46df4bcc 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -66,26 +66,40 @@ class Reviewer: self._review_scheduler.schedule_at(review_at, user_id, self.post_review(user_id, update_database=True)) async def post_review(self, user_id: int, update_database: bool) -> None: - """Format a generic review of a user and post it to the nomination voting channel.""" + """Format the review of a user and post it to the nomination voting channel.""" + review, seen_emoji = await self.make_review(user_id) + if not review: + return + + guild = self.bot.get_guild(Guild.id) + channel = guild.get_channel(Channels.nomination_voting) + log.trace(f"Posting the review of {user_id}") + message = (await self._bulk_send(channel, review))[-1] + if seen_emoji: + for reaction in (seen_emoji, "👍", "👎"): + await message.add_reaction(reaction) + + if update_database: + nomination = self._pool.watched_users[user_id] + await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) + + async def make_review(self, user_id: int) -> typing.Tuple[str, Optional[Emoji]]: + """Format a generic review of a user and return it with the seen emoji.""" + log.trace(f"Formatting the review of {user_id}") nomination = self._pool.watched_users[user_id] if not nomination: log.trace(f"There doesn't appear to be an active nomination for {user_id}") - return + return "", None guild = self.bot.get_guild(Guild.id) - channel = guild.get_channel(Channels.nomination_voting) member = guild.get_member(user_id) - if update_database: - await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) - if not member: - await channel.send( + return ( f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server 😔" - ) - return + ), None opening = f"<@&{Roles.moderators}> <@&{Roles.admins}>\n{member.mention} ({member}) for Helper!" @@ -104,10 +118,7 @@ class Reviewer: ) review = "\n\n".join(part for part in (opening, current_nominations, review_body, vote_request)) - - message = (await self._bulk_send(channel, review))[-1] - for reaction in (seen_emoji, "👍", "👎"): - await message.add_reaction(reaction) + return review, seen_emoji async def _construct_review_body(self, member: Member) -> str: """Formats the body of the nomination, with details of activity, infractions, and previous nominations.""" -- cgit v1.2.3 From 75af5eff5329cf015309f2664bd1af9eb4f2c95d Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 6 Apr 2021 14:48:59 +0200 Subject: Use emoji names in nomination body This caused unicode errors in Discord attachment previews. --- bot/exts/recruitment/talentpool/_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index c46df4bcc..e38b6bf19 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -114,7 +114,7 @@ class Reviewer: vote_request = ( "*Refer to their nomination and infraction histories for further details*.\n" f"*Please react {seen_emoji} if you've seen this post." - " Then react 👍 for approval, or 👎 for disapproval*." + " Then react :+1: for approval, or :-1: for disapproval*." ) review = "\n\n".join(part for part in (opening, current_nominations, review_body, vote_request)) -- cgit v1.2.3 From ae9afac95481a9bb5f3c96e2920ec372a8939cc8 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 6 Apr 2021 14:51:21 +0200 Subject: Properly handle the lack of nomination of a user --- bot/exts/recruitment/talentpool/_cog.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index eeba1b187..a49543806 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -338,9 +338,9 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): @has_any_role(*MODERATION_ROLES) async def get_review(self, ctx: Context, user_id: int) -> None: """Get the user's review as a markdown file.""" - review = StringIO((await self.reviewer.make_review(user_id))[0]) + review = (await self.reviewer.make_review(user_id))[0] if review: - file = discord.File(review, f"{user_id}_review.md") + file = discord.File(StringIO(review), f"{user_id}_review.md") await ctx.send(file=file) else: await ctx.send(f"There doesn't appear to be an active nomination for {user_id}") -- cgit v1.2.3 From cb87d95cccd2ed491de0de05745e5952b521b484 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 6 Apr 2021 14:57:54 +0200 Subject: Talentpool: loop style change Co-authored-by: ToxicKidz <78174417+ToxicKidz@users.noreply.github.com> --- bot/exts/recruitment/talentpool/_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index e38b6bf19..55b162cf0 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -117,7 +117,7 @@ class Reviewer: " Then react :+1: for approval, or :-1: for disapproval*." ) - review = "\n\n".join(part for part in (opening, current_nominations, review_body, vote_request)) + review = "\n\n".join((opening, current_nominations, review_body, vote_request)) return review, seen_emoji async def _construct_review_body(self, member: Member) -> str: -- cgit v1.2.3 From 8e04705d94299b1c3ed074b825ffb25a3c537d1d Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 6 Apr 2021 16:06:26 +0100 Subject: Add a comment to _reload_tasks_from_redis() in the Stream cog. --- bot/exts/moderation/stream.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 1bdcdc7d8..12e195172 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -46,6 +46,7 @@ class Stream(commands.Cog): member = self.bot.get_guild(Guild.id).get_member(key) if not member: + # Member isn't found in the cache try: member = await self.bot.get_guild(Guild.id).fetch_member(key) except discord.errors.NotFound: @@ -58,6 +59,7 @@ class Stream(commands.Cog): except discord.HTTPException: log.exception(f"Exception while trying to retrieve member {key} from Discord.") continue + revoke_time = Arrow.utcfromtimestamp(value) log.debug(f"Scheduling {member} ({member.id}) to have streaming permission revoked at {revoke_time}") self.scheduler.schedule_at( -- cgit v1.2.3 From 384ac5f9e1b83aa4ad3887d26a6745681d3214ea Mon Sep 17 00:00:00 2001 From: Chris Date: Tue, 6 Apr 2021 16:27:28 +0100 Subject: Reduce default duration of stream permission from 30m to 5m --- config-default.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config-default.yml b/config-default.yml index 4178fba32..9b07d026d 100644 --- a/config-default.yml +++ b/config-default.yml @@ -548,4 +548,4 @@ config: video_permission: - default_permission_duration: 30 # Default duration for stream command in minutes + default_permission_duration: 5 # Default duration for stream command in minutes -- cgit v1.2.3 From d14f83a4bc174a9c706552ba9b674cc1d9895efb Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Wed, 7 Apr 2021 03:28:05 +0100 Subject: add custom command checks tag --- bot/resources/tags/customchecks.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 bot/resources/tags/customchecks.md diff --git a/bot/resources/tags/customchecks.md b/bot/resources/tags/customchecks.md new file mode 100644 index 000000000..4f0d62c8d --- /dev/null +++ b/bot/resources/tags/customchecks.md @@ -0,0 +1,21 @@ +**Custom Command Checks in discord.py** + +You may find yourself in need of a decorator to do something that doesn't exist in discord.py by default, but fear not, you can make your own! Using discord.py you can use `discord.ext.commands.check` to create you own decorators like this: +```py +from discord.ext.commands import check, Context + +def in_channel(*channels): + async def predicate(ctx: Context): + return ctx.channel.id in channels + return check(predicate) +``` +There's a fair bit to break down here, so let's start with what we're trying to achieve with this decorator. As you can probably guess from the name it's locking a command to a list of channels. The inner function named `predicate` is used to perform the actual check on the command context. Here you can do anything that requires a `Context` object. This inner function should return `True` if the check is **successful** or `False` if the check **fails**. + +Here's how we might use our new decorator: +```py +@bot.command(name="ping") +@in_channel(728343273562701984) +async def ping(ctx: Context): + ... +``` +This would lock the `ping` command to only be used in the channel `728343273562701984`. -- cgit v1.2.3 From 029f4aaeb627326e2b34a1e88b8a3108f5565426 Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Wed, 7 Apr 2021 03:40:04 +0100 Subject: update wording to emphasise checks not decorators --- bot/resources/tags/customchecks.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/resources/tags/customchecks.md b/bot/resources/tags/customchecks.md index 4f0d62c8d..b4eb90872 100644 --- a/bot/resources/tags/customchecks.md +++ b/bot/resources/tags/customchecks.md @@ -1,6 +1,6 @@ **Custom Command Checks in discord.py** -You may find yourself in need of a decorator to do something that doesn't exist in discord.py by default, but fear not, you can make your own! Using discord.py you can use `discord.ext.commands.check` to create you own decorators like this: +You may find yourself in need of a check decorator to do something that doesn't exist in discord.py by default, but fear not, you can make your own! Using discord.py you can use `discord.ext.commands.check` to create you own checks like this: ```py from discord.ext.commands import check, Context @@ -9,9 +9,9 @@ def in_channel(*channels): return ctx.channel.id in channels return check(predicate) ``` -There's a fair bit to break down here, so let's start with what we're trying to achieve with this decorator. As you can probably guess from the name it's locking a command to a list of channels. The inner function named `predicate` is used to perform the actual check on the command context. Here you can do anything that requires a `Context` object. This inner function should return `True` if the check is **successful** or `False` if the check **fails**. +There's a fair bit to break down here, so let's start with what we're trying to achieve with this check. As you can probably guess from the name it's locking a command to a list of channels. The inner function named `predicate` is used to perform the actual check on the command context. Here you can do anything that requires a `Context` object. This inner function should return `True` if the check is **successful** or `False` if the check **fails**. -Here's how we might use our new decorator: +Here's how we might use our new check: ```py @bot.command(name="ping") @in_channel(728343273562701984) -- cgit v1.2.3 From fddfc7610a1402afaae3b1f5084b0735fa75afcf Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Wed, 7 Apr 2021 13:27:01 +0100 Subject: rename function to in_any_channel in accordance with d.py naming --- bot/resources/tags/customchecks.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/resources/tags/customchecks.md b/bot/resources/tags/customchecks.md index b4eb90872..96f833430 100644 --- a/bot/resources/tags/customchecks.md +++ b/bot/resources/tags/customchecks.md @@ -4,18 +4,18 @@ You may find yourself in need of a check decorator to do something that doesn't ```py from discord.ext.commands import check, Context -def in_channel(*channels): +def in_any_channel(*channels): async def predicate(ctx: Context): return ctx.channel.id in channels return check(predicate) ``` -There's a fair bit to break down here, so let's start with what we're trying to achieve with this check. As you can probably guess from the name it's locking a command to a list of channels. The inner function named `predicate` is used to perform the actual check on the command context. Here you can do anything that requires a `Context` object. This inner function should return `True` if the check is **successful** or `False` if the check **fails**. +There's a fair bit to break down here, so let's start with what we're trying to achieve with this check. As you can probably guess from the name it's locking a command to a **list of channels**. The inner function named `predicate` is used to perform the actual check on the command context. Here you can do anything that requires a `Context` object. This inner function should return `True` if the check is **successful** or `False` if the check **fails**. Here's how we might use our new check: ```py @bot.command(name="ping") -@in_channel(728343273562701984) +@in_any_channel(728343273562701984) async def ping(ctx: Context): ... ``` -This would lock the `ping` command to only be used in the channel `728343273562701984`. +This would lock the `ping` command to only be used in the channel `728343273562701984`. If this check function fails it will raise a `CheckFailure` exception, which can be handled in your error handler. -- cgit v1.2.3 From f66c63b4ae9a219130100b6f01c98e1b079cdf6e Mon Sep 17 00:00:00 2001 From: Ben Soyka Date: Wed, 7 Apr 2021 07:16:48 -0600 Subject: Update YouTube terms in the ytdl tag --- bot/resources/tags/ytdl.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/resources/tags/ytdl.md b/bot/resources/tags/ytdl.md index e34ecff44..2d87f75db 100644 --- a/bot/resources/tags/ytdl.md +++ b/bot/resources/tags/ytdl.md @@ -1,12 +1,12 @@ Per [PyDis' Rule 5](https://pythondiscord.com/pages/rules), we are unable to assist with questions related to youtube-dl, commonly used by Discord bots to stream audio, as its use violates YouTube's Terms of Service. -For reference, this usage is covered by the following clauses in [YouTube's TOS](https://www.youtube.com/static?template=terms), as of 2019-07-22: +For reference, this usage is covered by the following clauses in [YouTube's TOS](https://www.youtube.com/static?template=terms), as of 2021-03-17: ``` -The following restrictions apply to your use of the Service. You are not allowed to: +The following restrictions apply to your use of the Service. You are not allowed to: -1. access, reproduce, download, distribute, transmit, broadcast, display, sell, license, alter, modify or otherwise use any part of the Service or any Content except: (a) as specifically permitted by the Service; (b) with prior written permission from YouTube and, if applicable, the respective rights holders; or (c) as permitted by applicable law; +1. access, reproduce, download, distribute, transmit, broadcast, display, sell, license, alter, modify or otherwise use any part of the Service or any Content except: (a) as expressly authorized by the Service; or (b) with prior written permission from YouTube and, if applicable, the respective rights holders; -3. access the Service using any automated means (such as robots, botnets or scrapers) except: (a) in the case of public search engines, in accordance with YouTube’s robots.txt file; (b) with YouTube’s prior written permission; or (c) as permitted by applicable law; +3. access the Service using any automated means (such as robots, botnets or scrapers) except (a) in the case of public search engines, in accordance with YouTube’s robots.txt file; or (b) with YouTube’s prior written permission; 9. use the Service to view or listen to Content other than for personal, non-commercial use (for example, you may not publicly screen videos or stream music from the Service) ``` -- cgit v1.2.3 From e77098f5942de41728b95d4580e6189908deb6f9 Mon Sep 17 00:00:00 2001 From: Ben Soyka Date: Wed, 7 Apr 2021 14:23:46 -0600 Subject: Use the UK version of the YouTube TOS --- bot/resources/tags/ytdl.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/resources/tags/ytdl.md b/bot/resources/tags/ytdl.md index 2d87f75db..df28024a0 100644 --- a/bot/resources/tags/ytdl.md +++ b/bot/resources/tags/ytdl.md @@ -1,12 +1,12 @@ Per [PyDis' Rule 5](https://pythondiscord.com/pages/rules), we are unable to assist with questions related to youtube-dl, commonly used by Discord bots to stream audio, as its use violates YouTube's Terms of Service. -For reference, this usage is covered by the following clauses in [YouTube's TOS](https://www.youtube.com/static?template=terms), as of 2021-03-17: +For reference, this usage is covered by the following clauses in [YouTube's TOS](https://www.youtube.com/static?gl=GB&template=terms), as of 2021-03-17: ``` The following restrictions apply to your use of the Service. You are not allowed to: -1. access, reproduce, download, distribute, transmit, broadcast, display, sell, license, alter, modify or otherwise use any part of the Service or any Content except: (a) as expressly authorized by the Service; or (b) with prior written permission from YouTube and, if applicable, the respective rights holders; +1. access, reproduce, download, distribute, transmit, broadcast, display, sell, license, alter, modify or otherwise use any part of the Service or any Content except: (a) as specifically permitted by the Service; (b) with prior written permission from YouTube and, if applicable, the respective rights holders; or (c) as permitted by applicable law; -3. access the Service using any automated means (such as robots, botnets or scrapers) except (a) in the case of public search engines, in accordance with YouTube’s robots.txt file; or (b) with YouTube’s prior written permission; +3. access the Service using any automated means (such as robots, botnets or scrapers) except: (a) in the case of public search engines, in accordance with YouTube’s robots.txt file; (b) with YouTube’s prior written permission; or (c) as permitted by applicable law; 9. use the Service to view or listen to Content other than for personal, non-commercial use (for example, you may not publicly screen videos or stream music from the Service) ``` -- cgit v1.2.3 From b436dcf9aa11d188f5646fe795e428f99be30b6f Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 8 Apr 2021 10:11:42 +0200 Subject: Use 'Similar names' instead of 'moved' in footer The meaning of 'moved' may not have been clear for people that weren't familiar with how the system works Co-authored-by: MarkKoz --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index ff67b0e61..7352deb8c 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -284,7 +284,7 @@ class DocCog(commands.Cog): # with a max of 100 chars. if symbol_name in self.renamed_symbols: renamed_symbols = ", ".join(self.renamed_symbols[symbol_name]) - footer_text = textwrap.shorten("Moved: " + renamed_symbols, 200, placeholder=" ...") + footer_text = textwrap.shorten("Similar names: " + renamed_symbols, 200, placeholder=" ...") else: footer_text = "" -- cgit v1.2.3 From 417c6d321b0e384fe4c689b931c899f4f043d38e Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 8 Apr 2021 10:14:37 +0200 Subject: update comment --- bot/exts/info/doc/_cog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index 7352deb8c..2a8016fb8 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -281,7 +281,7 @@ class DocCog(commands.Cog): self.bot.stats.incr(f"doc_fetches.{doc_item.package}") # Show all symbols with the same name that were renamed in the footer, - # with a max of 100 chars. + # with a max of 200 chars. if symbol_name in self.renamed_symbols: renamed_symbols = ", ".join(self.renamed_symbols[symbol_name]) footer_text = textwrap.shorten("Similar names: " + renamed_symbols, 200, placeholder=" ...") -- cgit v1.2.3 From f79b7c8668505de059c19820b90e00ae462e3682 Mon Sep 17 00:00:00 2001 From: onerandomusername <71233171+onerandomusername@users.noreply.github.com> Date: Thu, 8 Apr 2021 15:24:46 -0400 Subject: Close voice channels with defcon shutdown --- bot/exts/moderation/defcon.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index bab95405c..dfb1afd19 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -181,7 +181,7 @@ class Defcon(Cog): role = ctx.guild.default_role permissions = role.permissions - permissions.update(send_messages=False, add_reactions=False) + permissions.update(send_messages=False, add_reactions=False, connect=False) await role.edit(reason="DEFCON shutdown", permissions=permissions) await ctx.send(f"{Action.SERVER_SHUTDOWN.value.emoji} Server shut down.") @@ -192,7 +192,7 @@ class Defcon(Cog): role = ctx.guild.default_role permissions = role.permissions - permissions.update(send_messages=True, add_reactions=True) + permissions.update(send_messages=True, add_reactions=True, connect=True) await role.edit(reason="DEFCON unshutdown", permissions=permissions) await ctx.send(f"{Action.SERVER_OPEN.value.emoji} Server reopened.") -- cgit v1.2.3 From c5806d0a7e682303d80a10955df9ddd64fd2942a Mon Sep 17 00:00:00 2001 From: Xithrius Date: Thu, 8 Apr 2021 19:44:57 -0700 Subject: Added how_to_get_help channel constant. --- bot/constants.py | 1 + config-default.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/bot/constants.py b/bot/constants.py index 547a94a0b..6d14bbb3a 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -412,6 +412,7 @@ class Channels(metaclass=YAMLGetter): python_general: int cooldown: int + how_to_get_help: int attachment_log: int message_log: int diff --git a/config-default.yml b/config-default.yml index 9b07d026d..8c6e18470 100644 --- a/config-default.yml +++ b/config-default.yml @@ -163,6 +163,7 @@ guild: # Python Help: Available cooldown: 720603994149486673 + how_to_get_help: 704250143020417084 # Topical discord_py: 343944376055103488 -- cgit v1.2.3 From 8f1294a133979cf91f953b1987853ab4e695764a Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Fri, 9 Apr 2021 15:08:00 -0700 Subject: Filtering: remove invisible characters before checking filters --- bot/exts/filters/filtering.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index c90b18dcb..1ae2610aa 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -178,6 +178,7 @@ class Filtering(Cog): def get_name_matches(self, name: str) -> List[re.Match]: """Check bad words from passed string (name). Return list of matches.""" + name = self.remove_invisible_chars(name) matches = [] watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False) for pattern in watchlist_patterns: @@ -444,6 +445,8 @@ class Filtering(Cog): if SPOILER_RE.search(text): text = self._expand_spoilers(text) + text = self.remove_invisible_chars(text) + # Make sure it's not a URL if URL_RE.search(text): return False, None @@ -462,6 +465,7 @@ class Filtering(Cog): Second return value is a reason of URL blacklisting (can be None). """ + text = self.remove_invisible_chars(text) if not URL_RE.search(text): return False, None @@ -492,6 +496,8 @@ class Filtering(Cog): Attempts to catch some of common ways to try to cheat the system. """ + text = self.remove_invisible_chars(text) + # Remove backslashes to prevent escape character aroundfuckery like # discord\.gg/gdudes-pony-farm text = text.replace("\\", "") @@ -628,6 +634,22 @@ class Filtering(Cog): await self.bot.api_client.delete(f'bot/offensive-messages/{msg["id"]}') log.info(f"Deleted the offensive message with id {msg['id']}.") + @staticmethod + def remove_invisible_chars(string: str) -> str: + """ + Remove invisible characters from `string`. + + Removed characters: + + - mongolian vowel separator + - zero width space + - zero width non-joiner + - zero width joiner + - word joiner + - zero width non-breaking space + """ + return re.sub("[\u180e\u200b\u200c\u200d\u2060\ufeff]", "", string) + def setup(bot: Bot) -> None: """Load the Filtering cog.""" -- cgit v1.2.3 From b38e645a66b76693ebc0cf0febc63187ab7a8b2f Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Fri, 9 Apr 2021 18:52:01 -0700 Subject: AntiSpam: prevent attempts to punish a user multiple times A user may manage to send multiple message that violate filters before the mute is applied. Because of a race condition, subsequent punish attempts did not detect the mute role exists and therefore proceeded to apply another mute. To avoid the race condition, abort any subsequent punish attempts while one is already ongoing for a given user. It could be possible to wait instead of abort, but the first attempt failing very likely means subsequent attempts would fail too. Fixes #902 --- bot/exts/filters/antispam.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py index af8528a68..c9052b138 100644 --- a/bot/exts/filters/antispam.py +++ b/bot/exts/filters/antispam.py @@ -3,7 +3,7 @@ import logging from collections.abc import Mapping from dataclasses import dataclass, field from datetime import datetime, timedelta -from operator import itemgetter +from operator import attrgetter, itemgetter from typing import Dict, Iterable, List, Set from discord import Colour, Member, Message, NotFound, Object, TextChannel @@ -18,6 +18,7 @@ from bot.constants import ( ) from bot.converters import Duration from bot.exts.moderation.modlog import ModLog +from bot.utils import lock from bot.utils.messages import format_user, send_attachments @@ -211,6 +212,7 @@ class AntiSpam(Cog): await self.maybe_delete_messages(channel, relevant_messages) break + @lock.lock_arg("antispam.punish", "member", attrgetter("id")) async def punish(self, msg: Message, member: Member, reason: str) -> None: """Punishes the given member for triggering an antispam rule.""" if not any(role.id == self.muted_role.id for role in member.roles): -- cgit v1.2.3 From 73b49b5b4d8f545da4d42b644907a34826757b3e Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Fri, 9 Apr 2021 18:59:49 -0700 Subject: AntiSpam: create tasks in a safer manner Name the tasks and use `scheduling.create_task` to ensure exceptions are caught. --- bot/exts/filters/antispam.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py index c9052b138..7555e25a2 100644 --- a/bot/exts/filters/antispam.py +++ b/bot/exts/filters/antispam.py @@ -18,7 +18,7 @@ from bot.constants import ( ) from bot.converters import Duration from bot.exts.moderation.modlog import ModLog -from bot.utils import lock +from bot.utils import lock, scheduling from bot.utils.messages import format_user, send_attachments @@ -115,7 +115,7 @@ class AntiSpam(Cog): self.message_deletion_queue = dict() - self.bot.loop.create_task(self.alert_on_validation_error()) + self.bot.loop.create_task(self.alert_on_validation_error(), name="AntiSpam.alert_on_validation_error") @property def mod_log(self) -> ModLog: @@ -192,7 +192,10 @@ class AntiSpam(Cog): if channel.id not in self.message_deletion_queue: log.trace(f"Creating queue for channel `{channel.id}`") self.message_deletion_queue[message.channel.id] = DeletionContext(channel) - self.bot.loop.create_task(self._process_deletion_context(message.channel.id)) + scheduling.create_task( + self._process_deletion_context(message.channel.id), + name=f"AntiSpam._process_deletion_context({message.channel.id})" + ) # Add the relevant of this trigger to the Deletion Context await self.message_deletion_queue[message.channel.id].add( @@ -202,11 +205,9 @@ class AntiSpam(Cog): ) for member in members: - - # Fire it off as a background task to ensure - # that the sleep doesn't block further tasks - self.bot.loop.create_task( - self.punish(message, member, full_reason) + scheduling.create_task( + self.punish(message, member, full_reason), + name=f"AntiSpam.punish(message={message.id}, member={member.id}, rule={rule_name})" ) await self.maybe_delete_messages(channel, relevant_messages) -- cgit v1.2.3 From e97c8cd0e737413fe6086899d0d7f7459ccadea1 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 6 Apr 2021 15:59:43 +0200 Subject: Recruitment: Don't use emoji literals --- bot/exts/recruitment/talentpool/_cog.py | 6 +++--- bot/exts/recruitment/talentpool/_review.py | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index a49543806..72604be51 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -10,7 +10,7 @@ from discord.ext.commands import Cog, Context, group, has_any_role from bot.api import ResponseCodeError from bot.bot import Bot -from bot.constants import Channels, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks +from bot.constants import Channels, Emojis, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks from bot.converters import FetchedMember from bot.exts.moderation.watchchannels._watchchannel import WatchChannel from bot.exts.recruitment.talentpool._review import Reviewer @@ -332,7 +332,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): """Mark a user's nomination as reviewed and cancel the review task.""" if not await self.reviewer.mark_reviewed(ctx, user_id): return - await ctx.send(f"✅ The user with ID `{user_id}` was marked as reviewed.") + await ctx.send(f"{Emojis.check_mark} The user with ID `{user_id}` was marked as reviewed.") @nomination_group.command(aliases=('gr',)) @has_any_role(*MODERATION_ROLES) @@ -353,7 +353,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): return await self.reviewer.post_review(user_id, update_database=False) - await ctx.message.add_reaction("✅") + await ctx.message.add_reaction(Emojis.check_mark) @Cog.listener() async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None: diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index 55b162cf0..11aa3b62b 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -77,7 +77,7 @@ class Reviewer: log.trace(f"Posting the review of {user_id}") message = (await self._bulk_send(channel, review))[-1] if seen_emoji: - for reaction in (seen_emoji, "👍", "👎"): + for reaction in (seen_emoji, "\N{THUMBS UP SIGN}", "\N{THUMBS DOWN SIGN}"): await message.add_reaction(reaction) if update_database: @@ -98,7 +98,7 @@ class Reviewer: if not member: return ( - f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server 😔" + f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server :pensive:" ), None opening = f"<@&{Roles.moderators}> <@&{Roles.admins}>\n{member.mention} ({member}) for Helper!" @@ -267,10 +267,10 @@ class Reviewer: @staticmethod def _random_ducky(guild: Guild) -> Union[Emoji, str]: - """Picks a random ducky emoji to be used to mark the vote as seen. If no duckies found returns 👀.""" + """Picks a random ducky emoji to be used to mark the vote as seen. If no duckies found returns :eyes:.""" duckies = [emoji for emoji in guild.emojis if emoji.name.startswith("ducky")] if not duckies: - return "👀" + return ":eyes:" return random.choice(duckies) @staticmethod @@ -300,12 +300,12 @@ class Reviewer: await self._pool.fetch_user_cache() if user_id not in self._pool.watched_users: log.trace(f"Can't find a nominated user with id {user_id}") - await ctx.send(f"❌ Can't find a currently nominated user with id `{user_id}`") + await ctx.send(f":x: Can't find a currently nominated user with id `{user_id}`") return False nomination = self._pool.watched_users[user_id] if nomination["reviewed"]: - await ctx.send("❌ This nomination was already reviewed, but here's a cookie 🍪") + await ctx.send(":x: This nomination was already reviewed, but here's a cookie :cookie:") return False await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) -- cgit v1.2.3 From 150713eaf7b5c61667385f3e46587aca059191f4 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 10 Apr 2021 13:10:51 -0700 Subject: Filtering: use a more thorough regex for zalgo & invisible chars Install the regex package to take advantage of its support for Unicode categories. --- Pipfile | 1 + Pipfile.lock | 89 +++++++++++++++++++++++++++++++++++-------- bot/exts/filters/filtering.py | 36 +++++++++-------- 3 files changed, 91 insertions(+), 35 deletions(-) diff --git a/Pipfile b/Pipfile index 7fab198f3..2ac5645dd 100644 --- a/Pipfile +++ b/Pipfile @@ -25,6 +25,7 @@ more_itertools = "~=8.2" python-dateutil = "~=2.8" python-frontmatter = "~=1.0.0" pyyaml = "~=5.1" +regex = "==2021.4.4" requests = "~=2.22" sentry-sdk = "~=0.19" sphinx = "~=2.2" diff --git a/Pipfile.lock b/Pipfile.lock index cbec48ef0..d6792ac35 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "91b5639198b35740611e7ac923cfc262e5897b8cbc3ca243dc98335705804ba7" + "sha256": "fc3421fc4c95d73b620f2b8b0a7dea288d4fc559e0d288ed4ad6cf4eb312f630" }, "pipfile-spec": 6, "requires": { @@ -221,6 +221,7 @@ "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], + "index": "pypi", "markers": "sys_platform == 'win32'", "version": "==0.4.4" }, @@ -250,11 +251,11 @@ }, "docutils": { "hashes": [ - "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", - "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" + "sha256:a71042bb7207c03d5647f280427f14bfbd1a65c9eb84f4b341d85fafb6bb4bdf", + "sha256:e2ffeea817964356ba4470efba7c2f42b6b0de0b04e66378507e3e2504bbff4c" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==0.16" + "version": "==0.17" }, "emoji": { "hashes": [ @@ -605,6 +606,15 @@ "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, + "pyreadline": { + "hashes": [ + "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1", + "sha256:65540c21bfe14405a3a77e4c085ecfce88724743a4ead47c66b84defcf82c32e", + "sha256:9ce5fa65b8992dfa373bddc5b6e0864ead8f291c94fbfec05fbd5c836162e67b" + ], + "markers": "sys_platform == 'win32'", + "version": "==2.1" + }, "python-dateutil": { "hashes": [ "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", @@ -671,6 +681,53 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.5.3" }, + "regex": { + "hashes": [ + "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5", + "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79", + "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31", + "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500", + "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11", + "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14", + "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3", + "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439", + "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c", + "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82", + "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711", + "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093", + "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a", + "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb", + "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8", + "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17", + "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000", + "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d", + "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480", + "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc", + "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0", + "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9", + "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765", + "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e", + "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a", + "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07", + "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f", + "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac", + "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7", + "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed", + "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968", + "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7", + "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2", + "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4", + "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87", + "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8", + "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10", + "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29", + "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605", + "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6", + "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042" + ], + "index": "pypi", + "version": "==2021.4.4" + }, "requests": { "hashes": [ "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", @@ -976,11 +1033,11 @@ }, "flake8-annotations": { "hashes": [ - "sha256:40a4d504cdf64126ea0bdca39edab1608bc6d515e96569b7e7c3c59c84f66c36", - "sha256:eabbfb2dd59ae0e9835f509f930e79cd99fa4ff1026fe6ca073503a57407037c" + "sha256:0d6cd2e770b5095f09689c9d84cc054c51b929c41a68969ea1beb4b825cac515", + "sha256:d10c4638231f8a50c0a597c4efce42bd7b7d85df4f620a0ddaca526138936a4f" ], "index": "pypi", - "version": "==2.6.1" + "version": "==2.6.2" }, "flake8-bugbear": { "hashes": [ @@ -1038,11 +1095,11 @@ }, "identify": { "hashes": [ - "sha256:43cb1965e84cdd247e875dec6d13332ef5be355ddc16776396d98089b9053d87", - "sha256:c7c0f590526008911ccc5ceee6ed7b085cbc92f7b6591d0ee5913a130ad64034" + "sha256:398cb92a7599da0b433c65301a1b62b9b1f4bb8248719b84736af6c0b22289d6", + "sha256:4537474817e0bbb8cea3e5b7504b7de6d44e3f169a90846cbc6adb0fc8294502" ], "markers": "python_full_version >= '3.6.1'", - "version": "==2.2.2" + "version": "==2.2.3" }, "idna": { "hashes": [ @@ -1061,10 +1118,10 @@ }, "nodeenv": { "hashes": [ - "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9", - "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c" + "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b", + "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7" ], - "version": "==1.5.0" + "version": "==1.6.0" }, "pep8-naming": { "hashes": [ @@ -1076,11 +1133,11 @@ }, "pre-commit": { "hashes": [ - "sha256:94c82f1bf5899d56edb1d926732f4e75a7df29a0c8c092559c77420c9d62428b", - "sha256:de55c5c72ce80d79106e48beb1b54104d16495ce7f95b0c7b13d4784193a00af" + "sha256:029d53cb83c241fe7d66eeee1e24db426f42c858f15a38d20bcefd8d8e05c9da", + "sha256:46b6ffbab37986c47d0a35e40906ae029376deed89a0eb2e446fb6e67b220427" ], "index": "pypi", - "version": "==2.11.1" + "version": "==2.12.0" }, "pycodestyle": { "hashes": [ diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index 1ae2610aa..464732453 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -6,6 +6,7 @@ from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union import dateutil import discord.errors +import regex from async_rediscache import RedisCache from dateutil.relativedelta import relativedelta from discord import Colour, HTTPException, Member, Message, NotFound, TextChannel @@ -34,7 +35,11 @@ CODE_BLOCK_RE = re.compile( EVERYONE_PING_RE = re.compile(rf"@everyone|<@&{Guild.id}>|@here") SPOILER_RE = re.compile(r"(\|\|.+?\|\|)", re.DOTALL) URL_RE = re.compile(r"(https?://[^\s]+)", flags=re.IGNORECASE) -ZALGO_RE = re.compile(r"[\u0300-\u036F\u0489]") + +# Exclude variation selectors from zalgo because they're actually invisible. +VARIATION_SELECTORS = r"\uFE00-\uFE0F\U000E0100-\U000E01EF" +INVISIBLE_RE = regex.compile(rf"[{VARIATION_SELECTORS}\p{{UNASSIGNED}}\p{{FORMAT}}\p{{CONTROL}}--\s]", regex.V1) +ZALGO_RE = regex.compile(rf"[\p{{NONSPACING MARK}}\p{{ENCLOSING MARK}}--[{VARIATION_SELECTORS}]]", regex.V1) # Other constants. DAYS_BETWEEN_ALERTS = 3 @@ -178,7 +183,7 @@ class Filtering(Cog): def get_name_matches(self, name: str) -> List[re.Match]: """Check bad words from passed string (name). Return list of matches.""" - name = self.remove_invisible_chars(name) + name = self.clean_input(name) matches = [] watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False) for pattern in watchlist_patterns: @@ -445,7 +450,7 @@ class Filtering(Cog): if SPOILER_RE.search(text): text = self._expand_spoilers(text) - text = self.remove_invisible_chars(text) + text = self.clean_input(text) # Make sure it's not a URL if URL_RE.search(text): @@ -465,7 +470,7 @@ class Filtering(Cog): Second return value is a reason of URL blacklisting (can be None). """ - text = self.remove_invisible_chars(text) + text = self.clean_input(text) if not URL_RE.search(text): return False, None @@ -496,7 +501,7 @@ class Filtering(Cog): Attempts to catch some of common ways to try to cheat the system. """ - text = self.remove_invisible_chars(text) + text = self.clean_input(text) # Remove backslashes to prevent escape character aroundfuckery like # discord\.gg/gdudes-pony-farm @@ -635,20 +640,13 @@ class Filtering(Cog): log.info(f"Deleted the offensive message with id {msg['id']}.") @staticmethod - def remove_invisible_chars(string: str) -> str: - """ - Remove invisible characters from `string`. - - Removed characters: - - - mongolian vowel separator - - zero width space - - zero width non-joiner - - zero width joiner - - word joiner - - zero width non-breaking space - """ - return re.sub("[\u180e\u200b\u200c\u200d\u2060\ufeff]", "", string) + def clean_input(string: str) -> str: + """Remove zalgo and invisible characters from `string`.""" + # For future consideration: remove characters in the Mc, Sk, and Lm categories too. + # Can be normalised with form C to merge char + combining char into a single char to avoid + # removing legit diacritics, but this would open up a way to bypass filters. + no_zalgo = ZALGO_RE.sub("", string) + return INVISIBLE_RE.sub("", no_zalgo) def setup(bot: Bot) -> None: -- cgit v1.2.3 From 3dd248c83792b423673c846c658e0e660b662f56 Mon Sep 17 00:00:00 2001 From: ToxicKidz <78174417+ToxicKidz@users.noreply.github.com> Date: Sun, 11 Apr 2021 15:31:05 -0400 Subject: Sort the available help channels in #how-to-get-help by position --- bot/exts/help_channels/_cog.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index c18fcf875..262b18e16 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -500,7 +500,9 @@ class HelpChannels(commands.Cog): ) available_channels = AVAILABLE_HELP_CHANNELS.format( - available=', '.join(c.mention for c in self.available_help_channels) or None + available=", ".join( + c.mention for c in sorted(self.available_help_channels, key=attrgetter("position")) + ) or None ) if self.dynamic_message is not None: -- cgit v1.2.3 From f27ea27c039ff1b28436320c10e3c2fc4f6c06f6 Mon Sep 17 00:00:00 2001 From: ToxicKidz <78174417+ToxicKidz@users.noreply.github.com> Date: Mon, 12 Apr 2021 11:53:10 -0400 Subject: Use == instead of str.startswith --- bot/exts/info/source.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py index 49e74f204..dc452d21d 100644 --- a/bot/exts/info/source.py +++ b/bot/exts/info/source.py @@ -14,9 +14,10 @@ SourceType = Union[commands.HelpCommand, commands.Command, commands.Cog, str, co class SourceConverter(commands.Converter): """Convert an argument into a help command, tag, command, or cog.""" - async def convert(self, ctx: commands.Context, argument: str) -> SourceType: + @staticmethod + async def convert(ctx: commands.Context, argument: str) -> SourceType: """Convert argument into source object.""" - if argument.lower().startswith("help"): + if argument.lower() == "help": return ctx.bot.help_command cog = ctx.bot.get_cog(argument) -- cgit v1.2.3 From fcfa287e96bf0eedcb2fe0bc2e004794324beeb2 Mon Sep 17 00:00:00 2001 From: ToxicKidz <78174417+ToxicKidz@users.noreply.github.com> Date: Mon, 12 Apr 2021 11:54:13 -0400 Subject: Remove reactions from everyone when paginating and waiting for trashcan reaction. (#1471) * Remove reactions from everyone * Make flake8 happy * Make flake8 happy again * Remove reactions in check functions * Make flake8 happy for the last time * Update bot/pagination.py Co-authored-by: Kieran Siek * Make create_task one line and return False in checks * Fix return so it returns either True or False * Use scheduling.create_task and suppress HTTPException * Suppress HTTPException in scheduling.create_task * Remove double if-statements Co-authored-by: Mark * change suppress_exceptions to suppressed_exceptions * Make suppressed_exceptions a kwargs for _log_task_exception * Update scheduling.create_task call to correspond with *args * Fix NameError: reaction, user -> reaction_, user_ * Update scheduling.create_task call to correspond with *args in messages.wait_for_deletion * reaction -> reaction_ * Ignore reactions from the bot * Fix type annotations for create_task * Refactor add_reaction check to a separate function * Name the remove_reaction task Co-authored-by: Kieran Siek Co-authored-by: Mark --- bot/pagination.py | 36 ++++++++------------------- bot/utils/messages.py | 66 +++++++++++++++++++++++++++++++++++++++---------- bot/utils/scheduling.py | 10 ++++---- 3 files changed, 68 insertions(+), 44 deletions(-) diff --git a/bot/pagination.py b/bot/pagination.py index 3b16cc9ff..c5c84afd9 100644 --- a/bot/pagination.py +++ b/bot/pagination.py @@ -2,14 +2,14 @@ import asyncio import logging import typing as t from contextlib import suppress +from functools import partial import discord -from discord import Member from discord.abc import User from discord.ext.commands import Context, Paginator from bot import constants -from bot.constants import MODERATION_ROLES +from bot.utils import messages FIRST_EMOJI = "\u23EE" # [:track_previous:] LEFT_EMOJI = "\u2B05" # [:arrow_left:] @@ -220,29 +220,6 @@ class LinePaginator(Paginator): >>> embed.set_author(name="Some Operation", url=url, icon_url=icon) >>> await LinePaginator.paginate([line for line in lines], ctx, embed) """ - def event_check(reaction_: discord.Reaction, user_: discord.Member) -> bool: - """Make sure that this reaction is what we want to operate on.""" - no_restrictions = ( - # The reaction was by a whitelisted user - user_.id == restrict_to_user.id - # The reaction was by a moderator - or isinstance(user_, Member) and any(role.id in MODERATION_ROLES for role in user_.roles) - ) - - return ( - # Conditions for a successful pagination: - all(( - # Reaction is on this message - reaction_.message.id == message.id, - # Reaction is one of the pagination emotes - str(reaction_.emoji) in PAGINATION_EMOJI, - # Reaction was not made by the Bot - user_.id != ctx.bot.user.id, - # There were no restrictions - no_restrictions - )) - ) - paginator = cls(prefix=prefix, suffix=suffix, max_size=max_size, max_lines=max_lines, scale_to_size=scale_to_size) current_page = 0 @@ -303,9 +280,16 @@ class LinePaginator(Paginator): log.trace(f"Adding reaction: {repr(emoji)}") await message.add_reaction(emoji) + check = partial( + messages.reaction_check, + message_id=message.id, + allowed_emoji=PAGINATION_EMOJI, + allowed_users=(restrict_to_user.id,), + ) + while True: try: - reaction, user = await ctx.bot.wait_for("reaction_add", timeout=timeout, check=event_check) + reaction, user = await ctx.bot.wait_for("reaction_add", timeout=timeout, check=check) log.trace(f"Got reaction: {reaction}") except asyncio.TimeoutError: log.debug("Timed out waiting for a reaction") diff --git a/bot/utils/messages.py b/bot/utils/messages.py index 0bcaed43d..2beead6af 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -3,6 +3,7 @@ import contextlib import logging import random import re +from functools import partial from io import BytesIO from typing import List, Optional, Sequence, Union @@ -12,24 +13,66 @@ from discord.ext.commands import Context import bot from bot.constants import Emojis, MODERATION_ROLES, NEGATIVE_REPLIES +from bot.utils import scheduling log = logging.getLogger(__name__) +def reaction_check( + reaction: discord.Reaction, + user: discord.abc.User, + *, + message_id: int, + allowed_emoji: Sequence[str], + allowed_users: Sequence[int], + allow_mods: bool = True, +) -> bool: + """ + Check if a reaction's emoji and author are allowed and the message is `message_id`. + + If the user is not allowed, remove the reaction. Ignore reactions made by the bot. + If `allow_mods` is True, allow users with moderator roles even if they're not in `allowed_users`. + """ + right_reaction = ( + user != bot.instance.user + and reaction.message.id == message_id + and str(reaction.emoji) in allowed_emoji + ) + if not right_reaction: + return False + + is_moderator = ( + allow_mods + and any(role.id in MODERATION_ROLES for role in getattr(user, "roles", [])) + ) + + if user.id in allowed_users or is_moderator: + log.trace(f"Allowed reaction {reaction} by {user} on {reaction.message.id}.") + return True + else: + log.trace(f"Removing reaction {reaction} by {user} on {reaction.message.id}: disallowed user.") + scheduling.create_task( + reaction.message.remove_reaction(reaction.emoji, user), + HTTPException, # Suppress the HTTPException if adding the reaction fails + name=f"remove_reaction-{reaction}-{reaction.message.id}-{user}" + ) + return False + + async def wait_for_deletion( message: discord.Message, - user_ids: Sequence[discord.abc.Snowflake], + user_ids: Sequence[int], deletion_emojis: Sequence[str] = (Emojis.trashcan,), timeout: float = 60 * 5, attach_emojis: bool = True, - allow_moderation_roles: bool = True + allow_mods: bool = True ) -> None: """ Wait for up to `timeout` seconds for a reaction by any of the specified `user_ids` to delete the message. An `attach_emojis` bool may be specified to determine whether to attach the given `deletion_emojis` to the message in the given `context`. - An `allow_moderation_roles` bool may also be specified to allow anyone with a role in `MODERATION_ROLES` to delete + An `allow_mods` bool may also be specified to allow anyone with a role in `MODERATION_ROLES` to delete the message. """ if message.guild is None: @@ -43,16 +86,13 @@ async def wait_for_deletion( log.trace(f"Aborting wait_for_deletion: message {message.id} deleted prematurely.") return - def check(reaction: discord.Reaction, user: discord.Member) -> bool: - """Check that the deletion emoji is reacted by the appropriate user.""" - return ( - reaction.message.id == message.id - and str(reaction.emoji) in deletion_emojis - and ( - user.id in user_ids - or allow_moderation_roles and any(role.id in MODERATION_ROLES for role in user.roles) - ) - ) + check = partial( + reaction_check, + message_id=message.id, + allowed_emoji=deletion_emojis, + allowed_users=user_ids, + allow_mods=allow_mods, + ) with contextlib.suppress(asyncio.TimeoutError): await bot.instance.wait_for('reaction_add', check=check, timeout=timeout) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 6843bae88..2dc485f24 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -161,18 +161,18 @@ class Scheduler: self._log.error(f"Error in task #{task_id} {id(done_task)}!", exc_info=exception) -def create_task(*args, **kwargs) -> asyncio.Task: +def create_task(coro: t.Awaitable, *suppressed_exceptions: t.Type[Exception], **kwargs) -> asyncio.Task: """Wrapper for `asyncio.create_task` which logs exceptions raised in the task.""" - task = asyncio.create_task(*args, **kwargs) - task.add_done_callback(_log_task_exception) + task = asyncio.create_task(coro, **kwargs) + task.add_done_callback(partial(_log_task_exception, suppressed_exceptions=suppressed_exceptions)) return task -def _log_task_exception(task: asyncio.Task) -> None: +def _log_task_exception(task: asyncio.Task, *, suppressed_exceptions: t.Tuple[t.Type[Exception]]) -> None: """Retrieve and log the exception raised in `task` if one exists.""" with contextlib.suppress(asyncio.CancelledError): exception = task.exception() # Log the exception if one exists. - if exception: + if exception and not isinstance(exception, suppressed_exceptions): log = logging.getLogger(__name__) log.error(f"Error in task {task.get_name()} {id(task)}!", exc_info=exception) -- cgit v1.2.3 From 9ab05cbe3f23d442b5bc73311e0c3e8b075e396e Mon Sep 17 00:00:00 2001 From: kwzrd Date: Tue, 13 Apr 2021 19:05:45 +0200 Subject: Branding: use tz-aware datetime representation Using `datetime.utcnow` produces a tz-naive object. When converting the object into a POSIX timestamp (L212), the library then converts the naive object into UTC, which will offset it unless the local timezone is UTC. We prevent this behaviour by using an Arrow repr instead, which is by default tz-aware. Since the object already knows it is in UTC, it does not shift when converting to a timestamp. Because L233 used `fromtimestamp` rather than `utcfromtimestamp`, the timestamp then got converted back into local time, canceling the previous error. Therefore, the bug wasn't observable from logs, as the times looked correct, but were being stored incorrectly. By using `Arrow.utcfromtimestamp`, the created object will be aware of being UTC again, which is more safe. --- bot/exts/backend/branding/_cog.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 0a4ddcc88..fdc4a4167 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -3,12 +3,13 @@ import contextlib import logging import random import typing as t -from datetime import datetime, time, timedelta +from datetime import timedelta from enum import Enum from operator import attrgetter import async_timeout import discord +from arrow import Arrow from async_rediscache import RedisCache from discord.ext import commands, tasks @@ -208,7 +209,7 @@ class Branding(commands.Cog): if success: await self.cache_icons.increment(next_icon) # Push the icon into the next iteration. - timestamp = datetime.utcnow().timestamp() + timestamp = Arrow.utcnow().timestamp() await self.cache_information.set("last_rotation_timestamp", timestamp) return success @@ -229,8 +230,8 @@ class Branding(commands.Cog): await self.rotate_icons() return - last_rotation = datetime.fromtimestamp(last_rotation_timestamp) - difference = (datetime.utcnow() - last_rotation) + timedelta(minutes=5) + last_rotation = Arrow.utcfromtimestamp(last_rotation_timestamp) + difference = (Arrow.utcnow() - last_rotation) + timedelta(minutes=5) log.trace(f"Icons last rotated at {last_rotation} (difference: {difference}).") @@ -485,11 +486,11 @@ class Branding(commands.Cog): await self.daemon_loop() log.trace("Daemon before: calculating time to sleep before loop begins.") - now = datetime.utcnow() + now = Arrow.utcnow() # The actual midnight moment is offset into the future to prevent issues with imprecise sleep. - tomorrow = now + timedelta(days=1) - midnight = datetime.combine(tomorrow, time(minute=1)) + tomorrow = now.shift(days=1) + midnight = tomorrow.replace(hour=0, minute=1, second=0, microsecond=0) sleep_secs = (midnight - now).total_seconds() log.trace(f"Daemon before: sleeping {sleep_secs} seconds before next-up midnight: {midnight}.") -- cgit v1.2.3 From d29e98ba6808104d10b519ea6bf062242d682f16 Mon Sep 17 00:00:00 2001 From: kwzrd Date: Tue, 13 Apr 2021 21:56:52 +0200 Subject: Branding: adjust duration string for 1-day events Instead of: 'January 1 - January 1' Do: 'January 1' --- bot/exts/backend/branding/_cog.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index fdc4a4167..47c379a34 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -58,6 +58,8 @@ def extract_event_duration(event: Event) -> str: Extract a human-readable, year-agnostic duration string from `event`. In the case that `event` is a fallback event, resolves to 'Fallback'. + + For 1-day events, only the single date is shown, instead of a period. """ if event.meta.is_fallback: return "Fallback" @@ -66,6 +68,9 @@ def extract_event_duration(event: Event) -> str: start_date = event.meta.start_date.strftime(fmt) end_date = event.meta.end_date.strftime(fmt) + if start_date == end_date: + return start_date + return f"{start_date} - {end_date}" -- cgit v1.2.3 From 475bd2124d56f6a59933b79b2f22c2b6c8896a25 Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 14 Apr 2021 00:19:25 +0100 Subject: Use a paginated embed to output multiple snowflakes Previously each snowflake passed to the command would have their own embed, which may cause the bot to send many embeds if a staff unknowingly passed it a bunch of snowflakes. This change makes sure that we don't run into rate limits on the bot by sending all of the snowflakes in one embed. --- bot/exts/utils/utils.py | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index cae7f2593..60383996d 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -162,17 +162,26 @@ class Utils(Cog): if len(snowflakes) > 1 and await has_no_roles_check(ctx, *STAFF_ROLES): raise BadArgument("Cannot process more than one snowflake in one invocation.") + embed = Embed( + colour=Colour.blue() + ) + embed.set_author( + name=f"Snowflake{'s'[:len(snowflakes)^1]}", # Deals with pluralisation + icon_url="https://github.com/twitter/twemoji/blob/master/assets/72x72/2744.png?raw=true" + ) + + lines = [] for snowflake in snowflakes: created_at = snowflake_time(snowflake) - embed = Embed( - description=f"**Created at {created_at}** ({time_since(created_at, max_units=3)}).", - colour=Colour.blue() - ) - embed.set_author( - name=f"Snowflake: {snowflake}", - icon_url="https://github.com/twitter/twemoji/blob/master/assets/72x72/2744.png?raw=true" - ) - await ctx.send(embed=embed) + lines.append(f"**{snowflake}**\nCreated at {created_at} ({time_since(created_at, max_units=3)}).") + + await LinePaginator.paginate( + lines, + ctx=ctx, + embed=embed, + max_lines=5, + max_size=1000 + ) @command(aliases=("poll",)) @has_any_role(*MODERATION_ROLES, Roles.project_leads, Roles.domain_leads) -- cgit v1.2.3 From f9fb8631ce8568e0c9f15ea4ff0977e722ede3ba Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 14 Apr 2021 10:23:38 +0100 Subject: Require at least one snowflake to be provided. --- bot/exts/utils/utils.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index 60383996d..0fe0cab78 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -162,6 +162,9 @@ class Utils(Cog): if len(snowflakes) > 1 and await has_no_roles_check(ctx, *STAFF_ROLES): raise BadArgument("Cannot process more than one snowflake in one invocation.") + if not snowflakes: + raise BadArgument("At least one snowflake must be provided.") + embed = Embed( colour=Colour.blue() ) -- cgit v1.2.3 From 347927cc6d86e852959db716b6de31c6a886640d Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 14 Apr 2021 16:54:27 +0100 Subject: Catch NotFound errors when trying to delete the invocation message when cleaning This often happens during a raid, when an int e script is added to ban & clean messages. Since the invocation message will be deleted on the first run, we should except subsequent NotFound errors. --- bot/exts/utils/clean.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/bot/exts/utils/clean.py b/bot/exts/utils/clean.py index 8acaf9131..cb662e852 100644 --- a/bot/exts/utils/clean.py +++ b/bot/exts/utils/clean.py @@ -3,7 +3,7 @@ import random import re from typing import Iterable, Optional -from discord import Colour, Embed, Message, TextChannel, User +from discord import Colour, Embed, Message, TextChannel, User, errors from discord.ext import commands from discord.ext.commands import Cog, Context, group, has_any_role @@ -115,7 +115,11 @@ class Clean(Cog): # Delete the invocation first self.mod_log.ignore(Event.message_delete, ctx.message.id) - await ctx.message.delete() + try: + await ctx.message.delete() + except errors.NotFound: + # Invocation message has already been deleted + log.info("Tried to delete invocation message, but it was already deleted.") messages = [] message_ids = [] -- cgit v1.2.3 From 56ff78ae70986dfbc01878e666a6aaf753739668 Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 14 Apr 2021 17:48:49 +0100 Subject: Refactor embed to use just one line --- bot/exts/utils/utils.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index 0fe0cab78..8d9d27c64 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -165,9 +165,7 @@ class Utils(Cog): if not snowflakes: raise BadArgument("At least one snowflake must be provided.") - embed = Embed( - colour=Colour.blue() - ) + embed = Embed(colour=Colour.blue()) embed.set_author( name=f"Snowflake{'s'[:len(snowflakes)^1]}", # Deals with pluralisation icon_url="https://github.com/twitter/twemoji/blob/master/assets/72x72/2744.png?raw=true" -- cgit v1.2.3 From e06f496a6e3f9a9d6cfaeb3902547aa9da1dd7c1 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Wed, 14 Apr 2021 20:05:04 +0300 Subject: Add Duty cog and new Moderators role Added a cog to allow moderators to go off and on duty. The off-duty state is cached via a redis cache, and its expiry is scheduled via the Scheduler. Additionally changes which roles are pinged on mod alerts. --- bot/constants.py | 1 + bot/exts/moderation/duty.py | 135 ++++++++++++++++++++++++++++++++++++++++++ bot/exts/moderation/modlog.py | 6 +- config-default.yml | 5 +- 4 files changed, 143 insertions(+), 4 deletions(-) create mode 100644 bot/exts/moderation/duty.py diff --git a/bot/constants.py b/bot/constants.py index 6d14bbb3a..cc3aa41a5 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -491,6 +491,7 @@ class Roles(metaclass=YAMLGetter): domain_leads: int helpers: int moderators: int + mod_team: int owners: int project_leads: int diff --git a/bot/exts/moderation/duty.py b/bot/exts/moderation/duty.py new file mode 100644 index 000000000..13be016f2 --- /dev/null +++ b/bot/exts/moderation/duty.py @@ -0,0 +1,135 @@ +import datetime +import logging + +from async_rediscache import RedisCache +from dateutil.parser import isoparse +from discord import Member +from discord.ext.commands import Cog, Context, group, has_any_role + +from bot.bot import Bot +from bot.constants import Emojis, Guild, MODERATION_ROLES, Roles +from bot.converters import Expiry +from bot.utils.scheduling import Scheduler + + +log = logging.getLogger(__name__) + + +class Duty(Cog): + """Commands for a moderator to go on and off duty.""" + + # RedisCache[str, str] + # The cache's keys are mods who are off-duty. + # The cache's values are the times when the role should be re-applied to them, stored in ISO format. + off_duty_mods = RedisCache() + + def __init__(self, bot: Bot): + self.bot = bot + self._role_scheduler = Scheduler(self.__class__.__name__) + + self.guild = None + self.moderators_role = None + + self.bot.loop.create_task(self.reschedule_roles()) + + async def reschedule_roles(self) -> None: + """Reschedule moderators role re-apply times.""" + await self.bot.wait_until_guild_available() + self.guild = self.bot.get_guild(Guild.id) + self.moderators_role = self.guild.get_role(Roles.moderators) + + mod_team = self.guild.get_role(Roles.mod_team) + on_duty = self.moderators_role.members + off_duty = await self.off_duty_mods.to_dict() + + log.trace("Applying the moderators role to the mod team where necessary.") + for mod in mod_team.members: + if mod in on_duty: # Make sure that on-duty mods aren't in the cache. + if mod in off_duty: + await self.off_duty_mods.delete(mod.id) + continue + + # Keep the role off only for those in the cache. + if mod.id not in off_duty: + await self.reapply_role(mod) + else: + expiry = isoparse(off_duty[mod.id]).replace(tzinfo=None) + self._role_scheduler.schedule_at(expiry, mod.id, self.reapply_role(mod)) + + async def reapply_role(self, mod: Member) -> None: + """Reapply the moderator's role to the given moderator.""" + log.trace(f"Re-applying role to mod with ID {mod.id}.") + await mod.add_roles(self.moderators_role, reason="Off-duty period expired.") + + @group(name='duty', invoke_without_command=True) + @has_any_role(*MODERATION_ROLES) + async def duty_group(self, ctx: Context) -> None: + """Allow the removal and re-addition of the pingable moderators role.""" + await ctx.send_help(ctx.command) + + @duty_group.command(name='off') + @has_any_role(*MODERATION_ROLES) + async def off_command(self, ctx: Context, duration: Expiry) -> None: + """ + Temporarily removes the pingable moderators role for a set amount of time. + + A unit of time should be appended to the duration. + Units (∗case-sensitive): + \u2003`y` - years + \u2003`m` - months∗ + \u2003`w` - weeks + \u2003`d` - days + \u2003`h` - hours + \u2003`M` - minutes∗ + \u2003`s` - seconds + + Alternatively, an ISO 8601 timestamp can be provided for the duration. + + The duration cannot be longer than 30 days. + """ + duration: datetime.datetime + delta = duration - datetime.datetime.utcnow() + if delta > datetime.timedelta(days=30): + await ctx.send(":x: Cannot remove the role for longer than 30 days.") + return + + mod = ctx.author + + await mod.remove_roles(self.moderators_role, reason="Entered off-duty period.") + + await self.off_duty_mods.update({mod.id: duration.isoformat()}) + + if mod.id in self._role_scheduler: + self._role_scheduler.cancel(mod.id) + self._role_scheduler.schedule_at(duration, mod.id, self.reapply_role(mod)) + + until_date = duration.replace(microsecond=0).isoformat() + await ctx.send(f"{Emojis.check_mark} Moderators role has been removed until {until_date}.") + + @duty_group.command(name='on') + @has_any_role(*MODERATION_ROLES) + async def on_command(self, ctx: Context) -> None: + """Re-apply the pingable moderators role.""" + mod = ctx.author + if mod in self.moderators_role.members: + await ctx.send(":question: You already have the role.") + return + + await mod.add_roles(self.moderators_role, reason="Off-duty period canceled.") + + await self.off_duty_mods.delete(mod.id) + + if mod.id in self._role_scheduler: + self._role_scheduler.cancel(mod.id) + + await ctx.send(f"{Emojis.check_mark} Moderators role has been re-applied.") + + def cog_unload(self) -> None: + """Cancel role tasks when the cog unloads.""" + log.trace("Cog unload: canceling role tasks.") + self._role_scheduler.cancel_all() + + +def setup(bot: Bot) -> None: + """Load the Slowmode cog.""" + bot.add_cog(Duty(bot)) diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py index 2dae9d268..f68a1880e 100644 --- a/bot/exts/moderation/modlog.py +++ b/bot/exts/moderation/modlog.py @@ -14,7 +14,7 @@ from discord.abc import GuildChannel from discord.ext.commands import Cog, Context from bot.bot import Bot -from bot.constants import Categories, Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, URLs +from bot.constants import Categories, Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, Roles, URLs from bot.utils.messages import format_user from bot.utils.time import humanize_delta @@ -115,9 +115,9 @@ class ModLog(Cog, name="ModLog"): if ping_everyone: if content: - content = f"@everyone\n{content}" + content = f"<@&{Roles.moderators}> @here\n{content}" else: - content = "@everyone" + content = f"<@&{Roles.moderators}> @here" # Truncate content to 2000 characters and append an ellipsis. if content and len(content) > 2000: diff --git a/config-default.yml b/config-default.yml index 8c6e18470..6eb954cd5 100644 --- a/config-default.yml +++ b/config-default.yml @@ -260,7 +260,8 @@ guild: devops: 409416496733880320 domain_leads: 807415650778742785 helpers: &HELPERS_ROLE 267630620367257601 - moderators: &MODS_ROLE 267629731250176001 + moderators: &MODS_ROLE 831776746206265384 + mod_team: &MOD_TEAM_ROLE 267629731250176001 owners: &OWNERS_ROLE 267627879762755584 project_leads: 815701647526330398 @@ -274,12 +275,14 @@ guild: moderation_roles: - *ADMINS_ROLE - *MODS_ROLE + - *MOD_TEAM_ROLE - *OWNERS_ROLE staff_roles: - *ADMINS_ROLE - *HELPERS_ROLE - *MODS_ROLE + - *MOD_TEAM_ROLE - *OWNERS_ROLE webhooks: -- cgit v1.2.3 From 65df8e24874cda7b9525acde346199f66e59650f Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Thu, 15 Apr 2021 00:55:29 +0300 Subject: Remove extra newline Co-authored-by: ks129 <45097959+ks129@users.noreply.github.com> --- bot/exts/moderation/duty.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bot/exts/moderation/duty.py b/bot/exts/moderation/duty.py index 13be016f2..94eed9331 100644 --- a/bot/exts/moderation/duty.py +++ b/bot/exts/moderation/duty.py @@ -11,7 +11,6 @@ from bot.constants import Emojis, Guild, MODERATION_ROLES, Roles from bot.converters import Expiry from bot.utils.scheduling import Scheduler - log = logging.getLogger(__name__) -- cgit v1.2.3 From 38714aef8c5b71c5e8313a82bef18947f1f1395a Mon Sep 17 00:00:00 2001 From: mbaruh Date: Thu, 15 Apr 2021 01:00:31 +0300 Subject: Fix setup docstring to specify correct cog --- bot/exts/moderation/duty.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/duty.py b/bot/exts/moderation/duty.py index 94eed9331..3f34e366c 100644 --- a/bot/exts/moderation/duty.py +++ b/bot/exts/moderation/duty.py @@ -130,5 +130,5 @@ class Duty(Cog): def setup(bot: Bot) -> None: - """Load the Slowmode cog.""" + """Load the Duty cog.""" bot.add_cog(Duty(bot)) -- cgit v1.2.3 From 6c00f74c8dcd2f3f1aaa4eff89e72cc135b75357 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Thu, 15 Apr 2021 01:04:16 +0300 Subject: Add off-duty expiration date to audit log --- bot/exts/moderation/duty.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/duty.py b/bot/exts/moderation/duty.py index 3f34e366c..265261be8 100644 --- a/bot/exts/moderation/duty.py +++ b/bot/exts/moderation/duty.py @@ -94,7 +94,8 @@ class Duty(Cog): mod = ctx.author - await mod.remove_roles(self.moderators_role, reason="Entered off-duty period.") + until_date = duration.replace(microsecond=0).isoformat() + await mod.remove_roles(self.moderators_role, reason=f"Entered off-duty period until {until_date}.") await self.off_duty_mods.update({mod.id: duration.isoformat()}) @@ -102,7 +103,6 @@ class Duty(Cog): self._role_scheduler.cancel(mod.id) self._role_scheduler.schedule_at(duration, mod.id, self.reapply_role(mod)) - until_date = duration.replace(microsecond=0).isoformat() await ctx.send(f"{Emojis.check_mark} Moderators role has been removed until {until_date}.") @duty_group.command(name='on') -- cgit v1.2.3 From b5fbca6f32c437aa45e28916451de39fb1485a75 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Thu, 15 Apr 2021 01:10:37 +0300 Subject: Use set instead of update in duty off --- bot/exts/moderation/duty.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/duty.py b/bot/exts/moderation/duty.py index 265261be8..0b07510db 100644 --- a/bot/exts/moderation/duty.py +++ b/bot/exts/moderation/duty.py @@ -97,7 +97,7 @@ class Duty(Cog): until_date = duration.replace(microsecond=0).isoformat() await mod.remove_roles(self.moderators_role, reason=f"Entered off-duty period until {until_date}.") - await self.off_duty_mods.update({mod.id: duration.isoformat()}) + await self.off_duty_mods.set(mod.id, duration.isoformat()) if mod.id in self._role_scheduler: self._role_scheduler.cancel(mod.id) -- cgit v1.2.3 From 15aa872d6ff7de253e3383380013aa7e52bab6c0 Mon Sep 17 00:00:00 2001 From: vcokltfre Date: Thu, 15 Apr 2021 06:40:57 +0100 Subject: chore: update wording as requested --- bot/resources/tags/customchecks.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/resources/tags/customchecks.md b/bot/resources/tags/customchecks.md index 96f833430..23ff7a66f 100644 --- a/bot/resources/tags/customchecks.md +++ b/bot/resources/tags/customchecks.md @@ -1,6 +1,6 @@ **Custom Command Checks in discord.py** -You may find yourself in need of a check decorator to do something that doesn't exist in discord.py by default, but fear not, you can make your own! Using discord.py you can use `discord.ext.commands.check` to create you own checks like this: +Often you may find the need to use checks that don't exist by default in discord.py. Fortunately, discord.py provides `discord.ext.commands.check` which allows you to create you own checks like this: ```py from discord.ext.commands import check, Context @@ -9,9 +9,9 @@ def in_any_channel(*channels): return ctx.channel.id in channels return check(predicate) ``` -There's a fair bit to break down here, so let's start with what we're trying to achieve with this check. As you can probably guess from the name it's locking a command to a **list of channels**. The inner function named `predicate` is used to perform the actual check on the command context. Here you can do anything that requires a `Context` object. This inner function should return `True` if the check is **successful** or `False` if the check **fails**. +This check is to check whether the invoked command is in a given set of channels. The inner function, named `predicate` here, is used to perform the actual check on the command, and check logic should go in this function. It must be an async function, and always provides a single `commands.Context` argument which you can use to create check logic. This check function should return a boolean value indicating whether the check passed (return `True`) or failed (return `False`). -Here's how we might use our new check: +The check can now be used like any other commands check as a decorator of a command, such as this: ```py @bot.command(name="ping") @in_any_channel(728343273562701984) -- cgit v1.2.3 From 6a875a0b0a6aca8dd33e711d00d5e9b92095918e Mon Sep 17 00:00:00 2001 From: mbaruh Date: Thu, 15 Apr 2021 20:33:35 +0300 Subject: Allow eval almost everywhere Adds a check to blacklist a command only in a specific context, with an option for a role override. The check is applied to the eval command to blacklist it only from python-general. --- bot/decorators.py | 41 ++++++++++++++++++++++++++++++++++++++++- bot/exts/utils/snekbox.py | 8 ++++---- 2 files changed, 44 insertions(+), 5 deletions(-) diff --git a/bot/decorators.py b/bot/decorators.py index 1d30317ef..5a49d64fc 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -11,7 +11,7 @@ from discord.ext.commands import Cog, Context from bot.constants import Channels, DEBUG_MODE, RedirectOutput from bot.utils import function -from bot.utils.checks import in_whitelist_check +from bot.utils.checks import InWhitelistCheckFailure, in_whitelist_check from bot.utils.function import command_wraps log = logging.getLogger(__name__) @@ -45,6 +45,45 @@ def in_whitelist( return commands.check(predicate) +def not_in_blacklist( + *, + channels: t.Container[int] = (), + categories: t.Container[int] = (), + roles: t.Container[int] = (), + override_roles: t.Container[int] = (), + redirect: t.Optional[int] = Channels.bot_commands, + fail_silently: bool = False, +) -> t.Callable: + """ + Check if a command was not issued in a blacklisted context. + + The blacklists that can be provided are: + + - `channels`: a container with channel ids for blacklisted channels + - `categories`: a container with category ids for blacklisted categories + - `roles`: a container with role ids for blacklisted roles + + If the command was invoked in a context that was blacklisted, the member is either + redirected to the `redirect` channel that was passed (default: #bot-commands) or simply + told that they're not allowed to use this particular command (if `None` was passed). + + The blacklist can be overridden through the roles specified in `override_roles`. + """ + def predicate(ctx: Context) -> bool: + """Check if command was issued in a blacklisted context.""" + not_blacklisted = not in_whitelist_check(ctx, channels, categories, roles, fail_silently=True) + overridden = in_whitelist_check(ctx, roles=override_roles, fail_silently=True) + + success = not_blacklisted or overridden + + if not success and not fail_silently: + raise InWhitelistCheckFailure(redirect) + + return success + + return commands.check(predicate) + + def has_no_roles(*roles: t.Union[str, int]) -> t.Callable: """ Returns True if the user does not have any of the roles specified. diff --git a/bot/exts/utils/snekbox.py b/bot/exts/utils/snekbox.py index 9f480c067..6ea588888 100644 --- a/bot/exts/utils/snekbox.py +++ b/bot/exts/utils/snekbox.py @@ -13,7 +13,7 @@ from discord.ext.commands import Cog, Context, command, guild_only from bot.bot import Bot from bot.constants import Categories, Channels, Roles, URLs -from bot.decorators import in_whitelist +from bot.decorators import not_in_blacklist from bot.utils import send_to_paste_service from bot.utils.messages import wait_for_deletion @@ -39,8 +39,8 @@ RAW_CODE_REGEX = re.compile( MAX_PASTE_LEN = 10000 # `!eval` command whitelists -EVAL_CHANNELS = (Channels.bot_commands, Channels.esoteric) -EVAL_CATEGORIES = (Categories.help_available, Categories.help_in_use, Categories.voice) +NO_EVAL_CHANNELS = (Channels.python_general,) +NO_EVAL_CATEGORIES = () EVAL_ROLES = (Roles.helpers, Roles.moderators, Roles.admins, Roles.owners, Roles.python_community, Roles.partners) SIGKILL = 9 @@ -280,7 +280,7 @@ class Snekbox(Cog): @command(name="eval", aliases=("e",)) @guild_only() - @in_whitelist(channels=EVAL_CHANNELS, categories=EVAL_CATEGORIES, roles=EVAL_ROLES) + @not_in_blacklist(channels=NO_EVAL_CHANNELS, categories=NO_EVAL_CATEGORIES, override_roles=EVAL_ROLES) async def eval_command(self, ctx: Context, *, code: str = None) -> None: """ Run Python code and get the results. -- cgit v1.2.3 From f80303718eed9bc676fe2e3e3fc06cffffbf1a92 Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Thu, 15 Apr 2021 22:10:26 +0200 Subject: Make trace logging optional and allow selective enabling Because coloredlogs' install changes the level of the root handler, the setLevel call had to be moved to after the install. --- bot/constants.py | 1 + bot/log.py | 20 ++++++++++++++------ config-default.yml | 7 ++++--- 3 files changed, 19 insertions(+), 9 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index 6d14bbb3a..14400700f 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -199,6 +199,7 @@ class Bot(metaclass=YAMLGetter): prefix: str sentry_dsn: Optional[str] token: str + trace_loggers: Optional[str] class Redis(metaclass=YAMLGetter): diff --git a/bot/log.py b/bot/log.py index e92233a33..339ed63a7 100644 --- a/bot/log.py +++ b/bot/log.py @@ -20,7 +20,6 @@ def setup() -> None: logging.addLevelName(TRACE_LEVEL, "TRACE") Logger.trace = _monkeypatch_trace - log_level = TRACE_LEVEL if constants.DEBUG_MODE else logging.INFO format_string = "%(asctime)s | %(name)s | %(levelname)s | %(message)s" log_format = logging.Formatter(format_string) @@ -30,7 +29,6 @@ def setup() -> None: file_handler.setFormatter(log_format) root_log = logging.getLogger() - root_log.setLevel(log_level) root_log.addHandler(file_handler) if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: @@ -44,11 +42,9 @@ def setup() -> None: if "COLOREDLOGS_LOG_FORMAT" not in os.environ: coloredlogs.DEFAULT_LOG_FORMAT = format_string - if "COLOREDLOGS_LOG_LEVEL" not in os.environ: - coloredlogs.DEFAULT_LOG_LEVEL = log_level - - coloredlogs.install(logger=root_log, stream=sys.stdout) + coloredlogs.install(level=logging.TRACE, logger=root_log, stream=sys.stdout) + root_log.setLevel(logging.DEBUG if constants.DEBUG_MODE else logging.INFO) logging.getLogger("discord").setLevel(logging.WARNING) logging.getLogger("websockets").setLevel(logging.WARNING) logging.getLogger("chardet").setLevel(logging.WARNING) @@ -57,6 +53,8 @@ def setup() -> None: # Set back to the default of INFO even if asyncio's debug mode is enabled. logging.getLogger("asyncio").setLevel(logging.INFO) + _set_trace_loggers() + def setup_sentry() -> None: """Set up the Sentry logging integrations.""" @@ -86,3 +84,13 @@ def _monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None: """ if self.isEnabledFor(TRACE_LEVEL): self._log(TRACE_LEVEL, msg, args, **kwargs) + + +def _set_trace_loggers() -> None: + """Set loggers to the trace level according to the value from the BOT_TRACE_LOGGERS env var.""" + if constants.Bot.trace_loggers: + if constants.Bot.trace_loggers in {"*", "ROOT"}: + logging.getLogger().setLevel(logging.TRACE) + else: + for logger_name in constants.Bot.trace_loggers.split(","): + logging.getLogger(logger_name).setLevel(logging.TRACE) diff --git a/config-default.yml b/config-default.yml index 8c6e18470..b9786925d 100644 --- a/config-default.yml +++ b/config-default.yml @@ -1,7 +1,8 @@ bot: - prefix: "!" - sentry_dsn: !ENV "BOT_SENTRY_DSN" - token: !ENV "BOT_TOKEN" + prefix: "!" + sentry_dsn: !ENV "BOT_SENTRY_DSN" + token: !ENV "BOT_TOKEN" + trace_loggers: !ENV "BOT_TRACE_LOGGERS" clean: # Maximum number of messages to traverse for clean commands -- cgit v1.2.3 From 854b0f4944700cb7a5b6a032029e513cef390e7e Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 16 Apr 2021 00:06:31 +0300 Subject: Raise a new NotInBlacklistCheckFailure instead This creates a new baseclass called ContextCheckFailure, and the new error as well as InWhitelistCheckFailure now derive it. --- bot/decorators.py | 8 ++++++-- bot/exts/backend/error_handler.py | 4 ++-- bot/exts/utils/snekbox.py | 2 +- bot/utils/checks.py | 8 ++++++-- 4 files changed, 15 insertions(+), 7 deletions(-) diff --git a/bot/decorators.py b/bot/decorators.py index 5a49d64fc..e971a5bd3 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -11,7 +11,7 @@ from discord.ext.commands import Cog, Context from bot.constants import Channels, DEBUG_MODE, RedirectOutput from bot.utils import function -from bot.utils.checks import InWhitelistCheckFailure, in_whitelist_check +from bot.utils.checks import ContextCheckFailure, in_whitelist_check from bot.utils.function import command_wraps log = logging.getLogger(__name__) @@ -45,6 +45,10 @@ def in_whitelist( return commands.check(predicate) +class NotInBlacklistCheckFailure(ContextCheckFailure): + """Raised when the 'not_in_blacklist' check fails.""" + + def not_in_blacklist( *, channels: t.Container[int] = (), @@ -77,7 +81,7 @@ def not_in_blacklist( success = not_blacklisted or overridden if not success and not fail_silently: - raise InWhitelistCheckFailure(redirect) + raise NotInBlacklistCheckFailure(redirect) return success diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py index 76ab7dfc2..da0e94a7e 100644 --- a/bot/exts/backend/error_handler.py +++ b/bot/exts/backend/error_handler.py @@ -12,7 +12,7 @@ from bot.bot import Bot from bot.constants import Colours, Icons, MODERATION_ROLES from bot.converters import TagNameConverter from bot.errors import InvalidInfractedUser, LockedResourceError -from bot.utils.checks import InWhitelistCheckFailure +from bot.utils.checks import ContextCheckFailure log = logging.getLogger(__name__) @@ -274,7 +274,7 @@ class ErrorHandler(Cog): await ctx.send( "Sorry, it looks like I don't have the permissions or roles I need to do that." ) - elif isinstance(e, (InWhitelistCheckFailure, errors.NoPrivateMessage)): + elif isinstance(e, (ContextCheckFailure, errors.NoPrivateMessage)): ctx.bot.stats.incr("errors.wrong_channel_or_dm_error") await ctx.send(e) diff --git a/bot/exts/utils/snekbox.py b/bot/exts/utils/snekbox.py index 6ea588888..da95240bb 100644 --- a/bot/exts/utils/snekbox.py +++ b/bot/exts/utils/snekbox.py @@ -38,7 +38,7 @@ RAW_CODE_REGEX = re.compile( MAX_PASTE_LEN = 10000 -# `!eval` command whitelists +# `!eval` command whitelists and blacklists. NO_EVAL_CHANNELS = (Channels.python_general,) NO_EVAL_CATEGORIES = () EVAL_ROLES = (Roles.helpers, Roles.moderators, Roles.admins, Roles.owners, Roles.python_community, Roles.partners) diff --git a/bot/utils/checks.py b/bot/utils/checks.py index 460a937d8..3d0c8a50c 100644 --- a/bot/utils/checks.py +++ b/bot/utils/checks.py @@ -20,8 +20,8 @@ from bot import constants log = logging.getLogger(__name__) -class InWhitelistCheckFailure(CheckFailure): - """Raised when the `in_whitelist` check fails.""" +class ContextCheckFailure(CheckFailure): + """Raised when a context-specific check fails.""" def __init__(self, redirect_channel: Optional[int]) -> None: self.redirect_channel = redirect_channel @@ -36,6 +36,10 @@ class InWhitelistCheckFailure(CheckFailure): super().__init__(error_message) +class InWhitelistCheckFailure(ContextCheckFailure): + """Raised when the `in_whitelist` check fails.""" + + def in_whitelist_check( ctx: Context, channels: Container[int] = (), -- cgit v1.2.3 From f11ebfde17634eed7fa242f72b309c4a75c885cd Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 17 Apr 2021 01:15:38 +0300 Subject: Keep config succint A moderator is expected to have the mod-team role and therefore it's enough to specify the latter in the mod and staff roles. --- config-default.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/config-default.yml b/config-default.yml index 6eb954cd5..b19164d3f 100644 --- a/config-default.yml +++ b/config-default.yml @@ -274,14 +274,12 @@ guild: moderation_roles: - *ADMINS_ROLE - - *MODS_ROLE - *MOD_TEAM_ROLE - *OWNERS_ROLE staff_roles: - *ADMINS_ROLE - *HELPERS_ROLE - - *MODS_ROLE - *MOD_TEAM_ROLE - *OWNERS_ROLE -- cgit v1.2.3 From 2053b2e36ece02680ed85b970c4fbf687fe07e0f Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 17 Apr 2021 01:19:54 +0300 Subject: Assume a scheduled task exists for `duty on` The lack of such a task may be indicative of a bug. --- bot/exts/moderation/duty.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bot/exts/moderation/duty.py b/bot/exts/moderation/duty.py index 0b07510db..8d0c96363 100644 --- a/bot/exts/moderation/duty.py +++ b/bot/exts/moderation/duty.py @@ -118,8 +118,7 @@ class Duty(Cog): await self.off_duty_mods.delete(mod.id) - if mod.id in self._role_scheduler: - self._role_scheduler.cancel(mod.id) + self._role_scheduler.cancel(mod.id) await ctx.send(f"{Emojis.check_mark} Moderators role has been re-applied.") -- cgit v1.2.3 From 5506fb74f90831e686f4636595f62e4bcc72a703 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 17 Apr 2021 01:24:17 +0300 Subject: Improve documentation --- bot/exts/moderation/duty.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/duty.py b/bot/exts/moderation/duty.py index 8d0c96363..eab0fd99f 100644 --- a/bot/exts/moderation/duty.py +++ b/bot/exts/moderation/duty.py @@ -94,11 +94,12 @@ class Duty(Cog): mod = ctx.author - until_date = duration.replace(microsecond=0).isoformat() + until_date = duration.replace(microsecond=0).isoformat() # Looks noisy with microseconds. await mod.remove_roles(self.moderators_role, reason=f"Entered off-duty period until {until_date}.") await self.off_duty_mods.set(mod.id, duration.isoformat()) + # Allow rescheduling the task without cancelling it separately via the `on` command. if mod.id in self._role_scheduler: self._role_scheduler.cancel(mod.id) self._role_scheduler.schedule_at(duration, mod.id, self.reapply_role(mod)) @@ -118,6 +119,7 @@ class Duty(Cog): await self.off_duty_mods.delete(mod.id) + # We assume the task exists. Lack of it may indicate a bug. self._role_scheduler.cancel(mod.id) await ctx.send(f"{Emojis.check_mark} Moderators role has been re-applied.") -- cgit v1.2.3 From 4a051cdb016748daca724e95957bd011cc3f6c3f Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 17 Apr 2021 01:43:17 +0300 Subject: Name the rescheduling task, and cancel it on cog unload --- bot/exts/moderation/duty.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/duty.py b/bot/exts/moderation/duty.py index eab0fd99f..e05472448 100644 --- a/bot/exts/moderation/duty.py +++ b/bot/exts/moderation/duty.py @@ -29,7 +29,7 @@ class Duty(Cog): self.guild = None self.moderators_role = None - self.bot.loop.create_task(self.reschedule_roles()) + self.reschedule_task = self.bot.loop.create_task(self.reschedule_roles(), name="duty-reschedule") async def reschedule_roles(self) -> None: """Reschedule moderators role re-apply times.""" @@ -127,6 +127,7 @@ class Duty(Cog): def cog_unload(self) -> None: """Cancel role tasks when the cog unloads.""" log.trace("Cog unload: canceling role tasks.") + self.reschedule_task.cancel() self._role_scheduler.cancel_all() -- cgit v1.2.3 From d2d939c96de22ae174072dd8cc2bad2fe4f2174a Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Sat, 17 Apr 2021 13:19:08 +0300 Subject: Remove here ping Kinda defeats the purpose of being off-duty. --- bot/exts/moderation/modlog.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py index f68a1880e..5e8ea595b 100644 --- a/bot/exts/moderation/modlog.py +++ b/bot/exts/moderation/modlog.py @@ -115,9 +115,9 @@ class ModLog(Cog, name="ModLog"): if ping_everyone: if content: - content = f"<@&{Roles.moderators}> @here\n{content}" + content = f"<@&{Roles.moderators}>\n{content}" else: - content = f"<@&{Roles.moderators}> @here" + content = f"<@&{Roles.moderators}>" # Truncate content to 2000 characters and append an ellipsis. if content and len(content) > 2000: -- cgit v1.2.3 From 0e4fd3d2d0ae4b0f403cc8f163c783284aefae56 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sat, 17 Apr 2021 18:37:44 +0200 Subject: Make YAMLGetter raise AttributeError instead of KeyError Utility functions such as hasattr or getattr except __getattribute__ to raise AttributeError not KeyError. This commit also lowers the logging level of the error message to info since it is up to the caller to decide if this is an expected failure or not. --- bot/constants.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index dc9cd4dfb..3254c2761 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -175,13 +175,14 @@ class YAMLGetter(type): if cls.subsection is not None: return _CONFIG_YAML[cls.section][cls.subsection][name] return _CONFIG_YAML[cls.section][name] - except KeyError: + except KeyError as e: dotted_path = '.'.join( (cls.section, cls.subsection, name) if cls.subsection is not None else (cls.section, name) ) - log.critical(f"Tried accessing configuration variable at `{dotted_path}`, but it could not be found.") - raise + # Only an INFO log since this can be caught through `hasattr` or `getattr`. + log.info(f"Tried accessing configuration variable at `{dotted_path}`, but it could not be found.") + raise AttributeError(repr(name)) from e def __getitem__(cls, name): return cls.__getattr__(name) -- cgit v1.2.3 From c910427937760f50fe7df3851989170c3494cde2 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sat, 17 Apr 2021 18:38:09 +0200 Subject: Move the verified developer badge to the embed title --- bot/constants.py | 2 +- bot/exts/info/information.py | 4 +++- config-default.yml | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index 3254c2761..813f970cd 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -280,7 +280,7 @@ class Emojis(metaclass=YAMLGetter): badge_partner: str badge_staff: str badge_verified_bot_developer: str - badge_verified_bot: str + verified_bot: str bot: str defcon_shutdown: str # noqa: E704 diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index 226e4992e..834fee1b4 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -230,7 +230,9 @@ class Information(Cog): if on_server and user.nick: name = f"{user.nick} ({name})" - if user.bot: + if user.public_flags.verified_bot: + name += f" {constants.Emojis.verified_bot}" + elif user.bot: name += f" {constants.Emojis.bot}" badges = [] diff --git a/config-default.yml b/config-default.yml index dba354117..b6955c63c 100644 --- a/config-default.yml +++ b/config-default.yml @@ -46,8 +46,8 @@ style: badge_partner: "<:partner:748666453242413136>" badge_staff: "<:discord_staff:743882896498098226>" badge_verified_bot_developer: "<:verified_bot_dev:743882897299210310>" - badge_verified_bot: "<:verified_bot:811645219220750347>" bot: "<:bot:812712599464443914>" + verified_bot: "<:verified_bot:811645219220750347>" defcon_shutdown: "<:defcondisabled:470326273952972810>" defcon_unshutdown: "<:defconenabled:470326274213150730>" -- cgit v1.2.3 From 93c9e536a3e771db2ac03054a5c2470883d59f1f Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sat, 17 Apr 2021 18:52:19 +0200 Subject: Tests: members shouldn't have any public flags --- tests/bot/exts/info/test_information.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py index a996ce477..d2ecee033 100644 --- a/tests/bot/exts/info/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -281,9 +281,13 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed should use the string representation of the user if they don't have a nick.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) user = helpers.MockMember() + public_flags = unittest.mock.MagicMock() + public_flags.__iter__.return_value = iter(()) + public_flags.verified_bot = False user.nick = None user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") user.colour = 0 + user.public_flags = public_flags embed = await self.cog.create_user_embed(ctx, user) @@ -297,9 +301,13 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed should use the nick if it's available.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) user = helpers.MockMember() + public_flags = unittest.mock.MagicMock() + public_flags.__iter__.return_value = iter(()) + public_flags.verified_bot = False user.nick = "Cat lover" user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") user.colour = 0 + user.public_flags = public_flags embed = await self.cog.create_user_embed(ctx, user) -- cgit v1.2.3 From 17770021be89e82c0e3edf1d01a6e10775fd871a Mon Sep 17 00:00:00 2001 From: Andi Qu <31325319+dolphingarlic@users.noreply.github.com> Date: Sat, 17 Apr 2021 19:02:20 +0200 Subject: Sort snippet matches by their start position --- bot/exts/info/code_snippets.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index f0cd54c0c..b9e7cc3d0 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -205,9 +205,9 @@ class CodeSnippets(Cog): ret = f'`{file_path}` lines {start_line} to {end_line}\n' if len(required) != 0: - return f'{ret}```{language}\n{required}```\n' + return f'{ret}```{language}\n{required}```' # Returns an empty codeblock if the snippet is empty - return f'{ret}``` ```\n' + return f'{ret}``` ```' def __init__(self, bot: Bot): """Initializes the cog's bot.""" @@ -224,13 +224,18 @@ class CodeSnippets(Cog): async def on_message(self, message: Message) -> None: """Checks if the message has a snippet link, removes the embed, then sends the snippet contents.""" if not message.author.bot: - message_to_send = '' + all_snippets = [] for pattern, handler in self.pattern_handlers: for match in pattern.finditer(message.content): - message_to_send += await handler(**match.groupdict()) + snippet = await handler(**match.groupdict()) + all_snippets.append((match.start(), snippet)) - if 0 < len(message_to_send) <= 2000 and message_to_send.count('\n') <= 15: + # Sorts the list of snippets by their match index and joins them into + # a single message + message_to_send = '\n'.join(map(lambda x: x[1], sorted(all_snippets))) + + if 0 < len(message_to_send) <= 2000 and len(all_snippets) <= 15: await message.edit(suppress=True) await wait_for_deletion( await message.channel.send(message_to_send), -- cgit v1.2.3 From 94af3c07678f1f2dee722f4780a816426efd0851 Mon Sep 17 00:00:00 2001 From: Vivaan Verma Date: Sun, 18 Apr 2021 21:12:08 +0100 Subject: Added default duration of 1h to superstarify --- bot/exts/moderation/infraction/superstarify.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 704dddf9c..245f14905 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -109,7 +109,7 @@ class Superstarify(InfractionScheduler, Cog): self, ctx: Context, member: Member, - duration: Expiry, + duration: Expiry = "1h", *, reason: str = '', ) -> None: -- cgit v1.2.3 From 3126e00a28e498afc8ecef1ed87b356f0e4a38c4 Mon Sep 17 00:00:00 2001 From: Vivaan Verma Date: Sun, 18 Apr 2021 22:11:46 +0100 Subject: Make duration an optional arg and default it to 1 hour --- bot/exts/moderation/infraction/superstarify.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 245f14905..8a6d14d41 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -1,3 +1,4 @@ +import datetime import json import logging import random @@ -109,7 +110,7 @@ class Superstarify(InfractionScheduler, Cog): self, ctx: Context, member: Member, - duration: Expiry = "1h", + duration: t.Optional[Expiry], *, reason: str = '', ) -> None: @@ -134,6 +135,9 @@ class Superstarify(InfractionScheduler, Cog): if await _utils.get_active_infraction(ctx, member, "superstar"): return + # Set the duration to 1 hour if none was provided + duration = datetime.datetime.now() + datetime.timedelta(hours=1) + # Post the infraction to the API old_nick = member.display_name infraction_reason = f'Old nickname: {old_nick}. {reason}' -- cgit v1.2.3 From 7fc5e37ecd2e1589b77b7fa16af26ee42e72dcdc Mon Sep 17 00:00:00 2001 From: Vivaan Verma Date: Sun, 18 Apr 2021 22:17:27 +0100 Subject: Check if a duration was provided --- bot/exts/moderation/infraction/superstarify.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 8a6d14d41..f5d6259cd 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -136,8 +136,9 @@ class Superstarify(InfractionScheduler, Cog): return # Set the duration to 1 hour if none was provided - duration = datetime.datetime.now() + datetime.timedelta(hours=1) - + if not duration: + duration = datetime.datetime.now() + datetime.timedelta(hours=1) + # Post the infraction to the API old_nick = member.display_name infraction_reason = f'Old nickname: {old_nick}. {reason}' -- cgit v1.2.3 From 6169ed2b73a5f2d763a2758e69ba4983127a1373 Mon Sep 17 00:00:00 2001 From: Vivaan Verma <54081925+doublevcodes@users.noreply.github.com> Date: Sun, 18 Apr 2021 22:31:40 +0100 Subject: Fix linting errors --- bot/exts/moderation/infraction/superstarify.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index f5d6259cd..6fa0d550f 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -138,7 +138,7 @@ class Superstarify(InfractionScheduler, Cog): # Set the duration to 1 hour if none was provided if not duration: duration = datetime.datetime.now() + datetime.timedelta(hours=1) - + # Post the infraction to the API old_nick = member.display_name infraction_reason = f'Old nickname: {old_nick}. {reason}' -- cgit v1.2.3 From bd54449e8994c38b2fd073056f82e6c52785d4c6 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Mon, 19 Apr 2021 15:43:33 +0300 Subject: Renamed Duty cog to Modpings The renaming includes the commands inside it. --- bot/exts/moderation/duty.py | 46 ++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/bot/exts/moderation/duty.py b/bot/exts/moderation/duty.py index e05472448..c351db615 100644 --- a/bot/exts/moderation/duty.py +++ b/bot/exts/moderation/duty.py @@ -14,13 +14,13 @@ from bot.utils.scheduling import Scheduler log = logging.getLogger(__name__) -class Duty(Cog): - """Commands for a moderator to go on and off duty.""" +class Modpings(Cog): + """Commands for a moderator to turn moderator pings on and off.""" # RedisCache[str, str] - # The cache's keys are mods who are off-duty. + # The cache's keys are mods who have pings off. # The cache's values are the times when the role should be re-applied to them, stored in ISO format. - off_duty_mods = RedisCache() + pings_off_mods = RedisCache() def __init__(self, bot: Bot): self.bot = bot @@ -29,7 +29,7 @@ class Duty(Cog): self.guild = None self.moderators_role = None - self.reschedule_task = self.bot.loop.create_task(self.reschedule_roles(), name="duty-reschedule") + self.reschedule_task = self.bot.loop.create_task(self.reschedule_roles(), name="mod-pings-reschedule") async def reschedule_roles(self) -> None: """Reschedule moderators role re-apply times.""" @@ -38,35 +38,35 @@ class Duty(Cog): self.moderators_role = self.guild.get_role(Roles.moderators) mod_team = self.guild.get_role(Roles.mod_team) - on_duty = self.moderators_role.members - off_duty = await self.off_duty_mods.to_dict() + pings_on = self.moderators_role.members + pings_off = await self.pings_off_mods.to_dict() log.trace("Applying the moderators role to the mod team where necessary.") for mod in mod_team.members: - if mod in on_duty: # Make sure that on-duty mods aren't in the cache. - if mod in off_duty: - await self.off_duty_mods.delete(mod.id) + if mod in pings_on: # Make sure that on-duty mods aren't in the cache. + if mod in pings_off: + await self.pings_off_mods.delete(mod.id) continue # Keep the role off only for those in the cache. - if mod.id not in off_duty: + if mod.id not in pings_off: await self.reapply_role(mod) else: - expiry = isoparse(off_duty[mod.id]).replace(tzinfo=None) + expiry = isoparse(pings_off[mod.id]).replace(tzinfo=None) self._role_scheduler.schedule_at(expiry, mod.id, self.reapply_role(mod)) async def reapply_role(self, mod: Member) -> None: """Reapply the moderator's role to the given moderator.""" log.trace(f"Re-applying role to mod with ID {mod.id}.") - await mod.add_roles(self.moderators_role, reason="Off-duty period expired.") + await mod.add_roles(self.moderators_role, reason="Pings off period expired.") - @group(name='duty', invoke_without_command=True) + @group(name='modpings', aliases=('modping',), invoke_without_command=True) @has_any_role(*MODERATION_ROLES) - async def duty_group(self, ctx: Context) -> None: + async def modpings_group(self, ctx: Context) -> None: """Allow the removal and re-addition of the pingable moderators role.""" await ctx.send_help(ctx.command) - @duty_group.command(name='off') + @modpings_group.command(name='off') @has_any_role(*MODERATION_ROLES) async def off_command(self, ctx: Context, duration: Expiry) -> None: """ @@ -95,9 +95,9 @@ class Duty(Cog): mod = ctx.author until_date = duration.replace(microsecond=0).isoformat() # Looks noisy with microseconds. - await mod.remove_roles(self.moderators_role, reason=f"Entered off-duty period until {until_date}.") + await mod.remove_roles(self.moderators_role, reason=f"Turned pings off until {until_date}.") - await self.off_duty_mods.set(mod.id, duration.isoformat()) + await self.pings_off_mods.set(mod.id, duration.isoformat()) # Allow rescheduling the task without cancelling it separately via the `on` command. if mod.id in self._role_scheduler: @@ -106,7 +106,7 @@ class Duty(Cog): await ctx.send(f"{Emojis.check_mark} Moderators role has been removed until {until_date}.") - @duty_group.command(name='on') + @modpings_group.command(name='on') @has_any_role(*MODERATION_ROLES) async def on_command(self, ctx: Context) -> None: """Re-apply the pingable moderators role.""" @@ -115,9 +115,9 @@ class Duty(Cog): await ctx.send(":question: You already have the role.") return - await mod.add_roles(self.moderators_role, reason="Off-duty period canceled.") + await mod.add_roles(self.moderators_role, reason="Pings off period canceled.") - await self.off_duty_mods.delete(mod.id) + await self.pings_off_mods.delete(mod.id) # We assume the task exists. Lack of it may indicate a bug. self._role_scheduler.cancel(mod.id) @@ -132,5 +132,5 @@ class Duty(Cog): def setup(bot: Bot) -> None: - """Load the Duty cog.""" - bot.add_cog(Duty(bot)) + """Load the Modpings cog.""" + bot.add_cog(Modpings(bot)) -- cgit v1.2.3 From e30667fb4e23648c3f308bfc06cf643852d0c29c Mon Sep 17 00:00:00 2001 From: mbaruh Date: Mon, 19 Apr 2021 15:44:58 +0300 Subject: Renamed duty.py to modpings.py --- bot/exts/moderation/duty.py | 136 ---------------------------------------- bot/exts/moderation/modpings.py | 136 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 136 insertions(+), 136 deletions(-) delete mode 100644 bot/exts/moderation/duty.py create mode 100644 bot/exts/moderation/modpings.py diff --git a/bot/exts/moderation/duty.py b/bot/exts/moderation/duty.py deleted file mode 100644 index c351db615..000000000 --- a/bot/exts/moderation/duty.py +++ /dev/null @@ -1,136 +0,0 @@ -import datetime -import logging - -from async_rediscache import RedisCache -from dateutil.parser import isoparse -from discord import Member -from discord.ext.commands import Cog, Context, group, has_any_role - -from bot.bot import Bot -from bot.constants import Emojis, Guild, MODERATION_ROLES, Roles -from bot.converters import Expiry -from bot.utils.scheduling import Scheduler - -log = logging.getLogger(__name__) - - -class Modpings(Cog): - """Commands for a moderator to turn moderator pings on and off.""" - - # RedisCache[str, str] - # The cache's keys are mods who have pings off. - # The cache's values are the times when the role should be re-applied to them, stored in ISO format. - pings_off_mods = RedisCache() - - def __init__(self, bot: Bot): - self.bot = bot - self._role_scheduler = Scheduler(self.__class__.__name__) - - self.guild = None - self.moderators_role = None - - self.reschedule_task = self.bot.loop.create_task(self.reschedule_roles(), name="mod-pings-reschedule") - - async def reschedule_roles(self) -> None: - """Reschedule moderators role re-apply times.""" - await self.bot.wait_until_guild_available() - self.guild = self.bot.get_guild(Guild.id) - self.moderators_role = self.guild.get_role(Roles.moderators) - - mod_team = self.guild.get_role(Roles.mod_team) - pings_on = self.moderators_role.members - pings_off = await self.pings_off_mods.to_dict() - - log.trace("Applying the moderators role to the mod team where necessary.") - for mod in mod_team.members: - if mod in pings_on: # Make sure that on-duty mods aren't in the cache. - if mod in pings_off: - await self.pings_off_mods.delete(mod.id) - continue - - # Keep the role off only for those in the cache. - if mod.id not in pings_off: - await self.reapply_role(mod) - else: - expiry = isoparse(pings_off[mod.id]).replace(tzinfo=None) - self._role_scheduler.schedule_at(expiry, mod.id, self.reapply_role(mod)) - - async def reapply_role(self, mod: Member) -> None: - """Reapply the moderator's role to the given moderator.""" - log.trace(f"Re-applying role to mod with ID {mod.id}.") - await mod.add_roles(self.moderators_role, reason="Pings off period expired.") - - @group(name='modpings', aliases=('modping',), invoke_without_command=True) - @has_any_role(*MODERATION_ROLES) - async def modpings_group(self, ctx: Context) -> None: - """Allow the removal and re-addition of the pingable moderators role.""" - await ctx.send_help(ctx.command) - - @modpings_group.command(name='off') - @has_any_role(*MODERATION_ROLES) - async def off_command(self, ctx: Context, duration: Expiry) -> None: - """ - Temporarily removes the pingable moderators role for a set amount of time. - - A unit of time should be appended to the duration. - Units (∗case-sensitive): - \u2003`y` - years - \u2003`m` - months∗ - \u2003`w` - weeks - \u2003`d` - days - \u2003`h` - hours - \u2003`M` - minutes∗ - \u2003`s` - seconds - - Alternatively, an ISO 8601 timestamp can be provided for the duration. - - The duration cannot be longer than 30 days. - """ - duration: datetime.datetime - delta = duration - datetime.datetime.utcnow() - if delta > datetime.timedelta(days=30): - await ctx.send(":x: Cannot remove the role for longer than 30 days.") - return - - mod = ctx.author - - until_date = duration.replace(microsecond=0).isoformat() # Looks noisy with microseconds. - await mod.remove_roles(self.moderators_role, reason=f"Turned pings off until {until_date}.") - - await self.pings_off_mods.set(mod.id, duration.isoformat()) - - # Allow rescheduling the task without cancelling it separately via the `on` command. - if mod.id in self._role_scheduler: - self._role_scheduler.cancel(mod.id) - self._role_scheduler.schedule_at(duration, mod.id, self.reapply_role(mod)) - - await ctx.send(f"{Emojis.check_mark} Moderators role has been removed until {until_date}.") - - @modpings_group.command(name='on') - @has_any_role(*MODERATION_ROLES) - async def on_command(self, ctx: Context) -> None: - """Re-apply the pingable moderators role.""" - mod = ctx.author - if mod in self.moderators_role.members: - await ctx.send(":question: You already have the role.") - return - - await mod.add_roles(self.moderators_role, reason="Pings off period canceled.") - - await self.pings_off_mods.delete(mod.id) - - # We assume the task exists. Lack of it may indicate a bug. - self._role_scheduler.cancel(mod.id) - - await ctx.send(f"{Emojis.check_mark} Moderators role has been re-applied.") - - def cog_unload(self) -> None: - """Cancel role tasks when the cog unloads.""" - log.trace("Cog unload: canceling role tasks.") - self.reschedule_task.cancel() - self._role_scheduler.cancel_all() - - -def setup(bot: Bot) -> None: - """Load the Modpings cog.""" - bot.add_cog(Modpings(bot)) diff --git a/bot/exts/moderation/modpings.py b/bot/exts/moderation/modpings.py new file mode 100644 index 000000000..c351db615 --- /dev/null +++ b/bot/exts/moderation/modpings.py @@ -0,0 +1,136 @@ +import datetime +import logging + +from async_rediscache import RedisCache +from dateutil.parser import isoparse +from discord import Member +from discord.ext.commands import Cog, Context, group, has_any_role + +from bot.bot import Bot +from bot.constants import Emojis, Guild, MODERATION_ROLES, Roles +from bot.converters import Expiry +from bot.utils.scheduling import Scheduler + +log = logging.getLogger(__name__) + + +class Modpings(Cog): + """Commands for a moderator to turn moderator pings on and off.""" + + # RedisCache[str, str] + # The cache's keys are mods who have pings off. + # The cache's values are the times when the role should be re-applied to them, stored in ISO format. + pings_off_mods = RedisCache() + + def __init__(self, bot: Bot): + self.bot = bot + self._role_scheduler = Scheduler(self.__class__.__name__) + + self.guild = None + self.moderators_role = None + + self.reschedule_task = self.bot.loop.create_task(self.reschedule_roles(), name="mod-pings-reschedule") + + async def reschedule_roles(self) -> None: + """Reschedule moderators role re-apply times.""" + await self.bot.wait_until_guild_available() + self.guild = self.bot.get_guild(Guild.id) + self.moderators_role = self.guild.get_role(Roles.moderators) + + mod_team = self.guild.get_role(Roles.mod_team) + pings_on = self.moderators_role.members + pings_off = await self.pings_off_mods.to_dict() + + log.trace("Applying the moderators role to the mod team where necessary.") + for mod in mod_team.members: + if mod in pings_on: # Make sure that on-duty mods aren't in the cache. + if mod in pings_off: + await self.pings_off_mods.delete(mod.id) + continue + + # Keep the role off only for those in the cache. + if mod.id not in pings_off: + await self.reapply_role(mod) + else: + expiry = isoparse(pings_off[mod.id]).replace(tzinfo=None) + self._role_scheduler.schedule_at(expiry, mod.id, self.reapply_role(mod)) + + async def reapply_role(self, mod: Member) -> None: + """Reapply the moderator's role to the given moderator.""" + log.trace(f"Re-applying role to mod with ID {mod.id}.") + await mod.add_roles(self.moderators_role, reason="Pings off period expired.") + + @group(name='modpings', aliases=('modping',), invoke_without_command=True) + @has_any_role(*MODERATION_ROLES) + async def modpings_group(self, ctx: Context) -> None: + """Allow the removal and re-addition of the pingable moderators role.""" + await ctx.send_help(ctx.command) + + @modpings_group.command(name='off') + @has_any_role(*MODERATION_ROLES) + async def off_command(self, ctx: Context, duration: Expiry) -> None: + """ + Temporarily removes the pingable moderators role for a set amount of time. + + A unit of time should be appended to the duration. + Units (∗case-sensitive): + \u2003`y` - years + \u2003`m` - months∗ + \u2003`w` - weeks + \u2003`d` - days + \u2003`h` - hours + \u2003`M` - minutes∗ + \u2003`s` - seconds + + Alternatively, an ISO 8601 timestamp can be provided for the duration. + + The duration cannot be longer than 30 days. + """ + duration: datetime.datetime + delta = duration - datetime.datetime.utcnow() + if delta > datetime.timedelta(days=30): + await ctx.send(":x: Cannot remove the role for longer than 30 days.") + return + + mod = ctx.author + + until_date = duration.replace(microsecond=0).isoformat() # Looks noisy with microseconds. + await mod.remove_roles(self.moderators_role, reason=f"Turned pings off until {until_date}.") + + await self.pings_off_mods.set(mod.id, duration.isoformat()) + + # Allow rescheduling the task without cancelling it separately via the `on` command. + if mod.id in self._role_scheduler: + self._role_scheduler.cancel(mod.id) + self._role_scheduler.schedule_at(duration, mod.id, self.reapply_role(mod)) + + await ctx.send(f"{Emojis.check_mark} Moderators role has been removed until {until_date}.") + + @modpings_group.command(name='on') + @has_any_role(*MODERATION_ROLES) + async def on_command(self, ctx: Context) -> None: + """Re-apply the pingable moderators role.""" + mod = ctx.author + if mod in self.moderators_role.members: + await ctx.send(":question: You already have the role.") + return + + await mod.add_roles(self.moderators_role, reason="Pings off period canceled.") + + await self.pings_off_mods.delete(mod.id) + + # We assume the task exists. Lack of it may indicate a bug. + self._role_scheduler.cancel(mod.id) + + await ctx.send(f"{Emojis.check_mark} Moderators role has been re-applied.") + + def cog_unload(self) -> None: + """Cancel role tasks when the cog unloads.""" + log.trace("Cog unload: canceling role tasks.") + self.reschedule_task.cancel() + self._role_scheduler.cancel_all() + + +def setup(bot: Bot) -> None: + """Load the Modpings cog.""" + bot.add_cog(Modpings(bot)) -- cgit v1.2.3 From 0204f7cc73bcf803fe86ca45cbdca19432b83cb6 Mon Sep 17 00:00:00 2001 From: francisdbillones <57383750+francisdbillones@users.noreply.github.com> Date: Mon, 19 Apr 2021 21:42:40 +0800 Subject: Fix zen's negative indexing Negative indexing starts at -1, not 0, meaning lower bound should be -1 * len(zen_lines), not -1 * upper_bound. --- bot/exts/utils/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index 8d9d27c64..4c39a7c2a 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -109,7 +109,7 @@ class Utils(Cog): # handle if it's an index int if isinstance(search_value, int): upper_bound = len(zen_lines) - 1 - lower_bound = -1 * upper_bound + lower_bound = -1 * len(zen_lines) if not (lower_bound <= search_value <= upper_bound): raise BadArgument(f"Please provide an index between {lower_bound} and {upper_bound}.") -- cgit v1.2.3 From 2ede01f32a49c3c1d4376b542789e770106711bc Mon Sep 17 00:00:00 2001 From: Numerlor <25886452+Numerlor@users.noreply.github.com> Date: Mon, 19 Apr 2021 15:46:14 +0200 Subject: Add blacklist format to the BOT_TRACE_LOGGERS env var To mimic the same behaviour, setting all of the loggers to the trace level was changed to a "*" prefix without looking at other contents instead of setting it exactly to "ROOT" or "*" --- bot/log.py | 25 +++++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/bot/log.py b/bot/log.py index 339ed63a7..4e20c005e 100644 --- a/bot/log.py +++ b/bot/log.py @@ -87,10 +87,27 @@ def _monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None: def _set_trace_loggers() -> None: - """Set loggers to the trace level according to the value from the BOT_TRACE_LOGGERS env var.""" - if constants.Bot.trace_loggers: - if constants.Bot.trace_loggers in {"*", "ROOT"}: + """ + Set loggers to the trace level according to the value from the BOT_TRACE_LOGGERS env var. + + When the env var is a list of logger names delimited by a comma, + each of the listed loggers will be set to the trace level. + + If this list is prefixed with a "!", all of the loggers except the listed ones will be set to the trace level. + + Otherwise if the env var begins with a "*", + the root logger is set to the trace level and other contents are ignored. + """ + level_filter = constants.Bot.trace_loggers + if level_filter: + if level_filter.startswith("*"): + logging.getLogger().setLevel(logging.TRACE) + + elif level_filter.startswith("!"): logging.getLogger().setLevel(logging.TRACE) + for logger_name in level_filter.strip("!,").split(","): + logging.getLogger(logger_name).setLevel(logging.DEBUG) + else: - for logger_name in constants.Bot.trace_loggers.split(","): + for logger_name in level_filter.strip(",").split(","): logging.getLogger(logger_name).setLevel(logging.TRACE) -- cgit v1.2.3 From cb253750a5597d8ca63e8742307bafc096c7e189 Mon Sep 17 00:00:00 2001 From: Chris Date: Mon, 19 Apr 2021 18:05:11 +0100 Subject: Require a mod role for stream commands Previously any staff member (including helpers) could use the stream commands. --- bot/exts/moderation/stream.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 12e195172..7ea7f635b 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -8,7 +8,7 @@ from async_rediscache import RedisCache from discord.ext import commands from bot.bot import Bot -from bot.constants import Colours, Emojis, Guild, Roles, STAFF_ROLES, VideoPermission +from bot.constants import Colours, Emojis, Guild, MODERATION_ROLES, Roles, VideoPermission from bot.converters import Expiry from bot.utils.scheduling import Scheduler from bot.utils.time import format_infraction_with_duration @@ -69,7 +69,7 @@ class Stream(commands.Cog): ) @commands.command(aliases=("streaming",)) - @commands.has_any_role(*STAFF_ROLES) + @commands.has_any_role(*MODERATION_ROLES) async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None: """ Temporarily grant streaming permissions to a member for a given duration. @@ -126,7 +126,7 @@ class Stream(commands.Cog): log.debug(f"Successfully gave {member} ({member.id}) permission to stream until {revoke_time}.") @commands.command(aliases=("pstream",)) - @commands.has_any_role(*STAFF_ROLES) + @commands.has_any_role(*MODERATION_ROLES) async def permanentstream(self, ctx: commands.Context, member: discord.Member) -> None: """Permanently grants the given member the permission to stream.""" log.trace(f"Attempting to give permanent streaming permission to {member} ({member.id}).") @@ -153,7 +153,7 @@ class Stream(commands.Cog): log.debug(f"Successfully gave {member} ({member.id}) permanent streaming permission.") @commands.command(aliases=("unstream", "rstream")) - @commands.has_any_role(*STAFF_ROLES) + @commands.has_any_role(*MODERATION_ROLES) async def revokestream(self, ctx: commands.Context, member: discord.Member) -> None: """Revoke the permission to stream from the given member.""" log.trace(f"Attempting to remove streaming permission from {member} ({member.id}).") -- cgit v1.2.3 From 90ed28f4cb31b5b41f7a395abfe61f4f9e49e091 Mon Sep 17 00:00:00 2001 From: Chris Date: Mon, 19 Apr 2021 18:08:39 +0100 Subject: Add command to list users with streaming perms This is useful to audit users who still have the permission to stream. I have chosen to also sort and paginate the embed to make it easier to read. The sorting is based on how long until the user's streaming permissions are revoked, with permanent streamers at the end. --- bot/exts/moderation/stream.py | 44 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 7ea7f635b..5f3820748 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -1,5 +1,6 @@ import logging from datetime import timedelta, timezone +from operator import itemgetter import arrow import discord @@ -8,8 +9,9 @@ from async_rediscache import RedisCache from discord.ext import commands from bot.bot import Bot -from bot.constants import Colours, Emojis, Guild, MODERATION_ROLES, Roles, VideoPermission +from bot.constants import Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES, VideoPermission from bot.converters import Expiry +from bot.pagination import LinePaginator from bot.utils.scheduling import Scheduler from bot.utils.time import format_infraction_with_duration @@ -173,6 +175,46 @@ class Stream(commands.Cog): await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!") log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!") + @commands.command(aliases=('lstream',)) + @commands.has_any_role(*MODERATION_ROLES) + async def liststream(self, ctx: commands.Context) -> None: + """Lists all non-staff users who have permission to stream.""" + non_staff_members_with_stream = [ + _member + for _member in ctx.guild.get_role(Roles.video).members + if not any(role.id in STAFF_ROLES for role in _member.roles) + ] + + # List of tuples (UtcPosixTimestamp, str) + # This is so that we can sort before outputting to the paginator + streamer_info = [] + for member in non_staff_members_with_stream: + if revoke_time := await self.task_cache.get(member.id): + # Member only has temporary streaming perms + revoke_delta = Arrow.utcfromtimestamp(revoke_time).humanize() + message = f"{member.mention} will have stream permissions revoked {revoke_delta}." + else: + message = f"{member.mention} has permanent streaming permissions." + + # If revoke_time is None use max timestamp to force sort to put them at the end + streamer_info.append( + (revoke_time or Arrow.max.timestamp(), message) + ) + + if streamer_info: + # Sort based on duration left of streaming perms + streamer_info.sort(key=itemgetter(0)) + + # Only output the message in the pagination + lines = [line[1] for line in streamer_info] + embed = discord.Embed( + title=f"Members who can stream (`{len(lines)}` total)", + colour=Colours.soft_green + ) + await LinePaginator.paginate(lines, ctx, embed, max_size=400, empty=False) + else: + await ctx.send("No members with stream permissions found.") + def setup(bot: Bot) -> None: """Loads the Stream cog.""" -- cgit v1.2.3 From a6b76092e6e6005fc98c9863db051804d7bb963a Mon Sep 17 00:00:00 2001 From: Chris Date: Mon, 19 Apr 2021 18:16:42 +0100 Subject: Update wording of comment to be clearer. --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 5f3820748..d9837b5ed 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -186,7 +186,7 @@ class Stream(commands.Cog): ] # List of tuples (UtcPosixTimestamp, str) - # This is so that we can sort before outputting to the paginator + # This is so that output can be sorted on [0] before passed it's to the paginator streamer_info = [] for member in non_staff_members_with_stream: if revoke_time := await self.task_cache.get(member.id): -- cgit v1.2.3 From 94db90b038574077beb2fafb4f17741061ee8152 Mon Sep 17 00:00:00 2001 From: Chris Date: Mon, 19 Apr 2021 18:23:34 +0100 Subject: Remove unnecessary _ in variable name --- bot/exts/moderation/stream.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index d9837b5ed..e541baeb2 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -180,9 +180,9 @@ class Stream(commands.Cog): async def liststream(self, ctx: commands.Context) -> None: """Lists all non-staff users who have permission to stream.""" non_staff_members_with_stream = [ - _member - for _member in ctx.guild.get_role(Roles.video).members - if not any(role.id in STAFF_ROLES for role in _member.roles) + member + for member in ctx.guild.get_role(Roles.video).members + if not any(role.id in STAFF_ROLES for role in member.roles) ] # List of tuples (UtcPosixTimestamp, str) -- cgit v1.2.3 From 131dab3754da9fc1c3cf770d76bb9deea46f2f8d Mon Sep 17 00:00:00 2001 From: ChrisJL Date: Mon, 19 Apr 2021 18:40:23 +0100 Subject: Improve the wording of the list streamers embed Co-authored-by: Matteo Bertucci --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index e541baeb2..bd93ea492 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -208,7 +208,7 @@ class Stream(commands.Cog): # Only output the message in the pagination lines = [line[1] for line in streamer_info] embed = discord.Embed( - title=f"Members who can stream (`{len(lines)}` total)", + title=f"Members with streaming permission (`{len(lines)}` total)", colour=Colours.soft_green ) await LinePaginator.paginate(lines, ctx, embed, max_size=400, empty=False) -- cgit v1.2.3 From a7581a4f9f2724672eebfdf541a922973c018c23 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Mon, 19 Apr 2021 20:48:26 +0300 Subject: CamelCase the cog name --- bot/exts/moderation/modpings.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/exts/moderation/modpings.py b/bot/exts/moderation/modpings.py index c351db615..690aa7c68 100644 --- a/bot/exts/moderation/modpings.py +++ b/bot/exts/moderation/modpings.py @@ -14,7 +14,7 @@ from bot.utils.scheduling import Scheduler log = logging.getLogger(__name__) -class Modpings(Cog): +class ModPings(Cog): """Commands for a moderator to turn moderator pings on and off.""" # RedisCache[str, str] @@ -132,5 +132,5 @@ class Modpings(Cog): def setup(bot: Bot) -> None: - """Load the Modpings cog.""" - bot.add_cog(Modpings(bot)) + """Load the ModPings cog.""" + bot.add_cog(ModPings(bot)) -- cgit v1.2.3 From c001456cf29f944deb632b28130fb16a170092e9 Mon Sep 17 00:00:00 2001 From: Chris Date: Mon, 19 Apr 2021 18:49:09 +0100 Subject: Update comment in list stream for readibility --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index bd93ea492..1dbb2a46b 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -186,7 +186,7 @@ class Stream(commands.Cog): ] # List of tuples (UtcPosixTimestamp, str) - # This is so that output can be sorted on [0] before passed it's to the paginator + # So that the list can be sorted on the UtcPosixTimestamp before the message is passed to the paginator. streamer_info = [] for member in non_staff_members_with_stream: if revoke_time := await self.task_cache.get(member.id): -- cgit v1.2.3 From 40d21cf112b28858aad2508bf147b019314dd4ee Mon Sep 17 00:00:00 2001 From: Rohan Date: Mon, 19 Apr 2021 23:55:55 +0530 Subject: Add afk voice channel to constants. --- bot/constants.py | 1 + config-default.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/bot/constants.py b/bot/constants.py index 6d14bbb3a..b9444c989 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -444,6 +444,7 @@ class Channels(metaclass=YAMLGetter): mod_announcements: int staff_announcements: int + afk_voice: int admins_voice: int code_help_voice_1: int code_help_voice_2: int diff --git a/config-default.yml b/config-default.yml index 8c6e18470..204397f7f 100644 --- a/config-default.yml +++ b/config-default.yml @@ -206,6 +206,7 @@ guild: staff_announcements: &STAFF_ANNOUNCEMENTS 464033278631084042 # Voice Channels + afk_voice: 756327105389920306 admins_voice: &ADMINS_VOICE 500734494840717332 code_help_voice_1: 751592231726481530 code_help_voice_2: 764232549840846858 -- cgit v1.2.3 From fbbe1a861aa725d1f327716177b383ea38f20f0c Mon Sep 17 00:00:00 2001 From: Rohan Date: Tue, 20 Apr 2021 00:02:22 +0530 Subject: Add method for suspending member's stream when revoking stream perms. --- bot/exts/moderation/stream.py | 31 +++++++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 1dbb2a46b..a2ebb6205 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -9,7 +9,7 @@ from async_rediscache import RedisCache from discord.ext import commands from bot.bot import Bot -from bot.constants import Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES, VideoPermission +from bot.constants import Channels, Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES, VideoPermission from bot.converters import Expiry from bot.pagination import LinePaginator from bot.utils.scheduling import Scheduler @@ -70,6 +70,27 @@ class Stream(commands.Cog): self._revoke_streaming_permission(member) ) + async def _suspend_stream(self, ctx: commands.Context, member: discord.Member) -> None: + """Suspend a member's stream.""" + voice_state = member.voice + + if not voice_state: + return + + # If the user is streaming. + if voice_state.self_stream: + # End user's stream by moving them to AFK voice channel and back. + original_vc = voice_state.channel + await member.move_to(self.bot.get_channel(Channels.afk_voice)) + await member.move_to(original_vc) + + # Notify. + await ctx.send(f"{member.mention}'s stream has been suspended!") + log.debug(f"Successfully suspended stream from {member} ({member.id}).") + return + + log.debug(f"No stream found to suspend from {member} ({member.id}).") + @commands.command(aliases=("streaming",)) @commands.has_any_role(*MODERATION_ROLES) async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None: @@ -170,10 +191,12 @@ class Stream(commands.Cog): await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {member.mention}.") log.debug(f"Successfully revoked streaming permission from {member} ({member.id}).") - return - await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!") - log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!") + else: + await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!") + log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!") + + await self._suspend_stream(ctx, member) @commands.command(aliases=('lstream',)) @commands.has_any_role(*MODERATION_ROLES) -- cgit v1.2.3 From b8b920bfa5c4d918d41bfe06d85b1e85f4bec0da Mon Sep 17 00:00:00 2001 From: Vivaan Verma <54081925+doublevcodes@users.noreply.github.com> Date: Mon, 19 Apr 2021 20:01:41 +0100 Subject: Inline duration assignment Co-authored-by: Rohan Reddy Alleti --- bot/exts/moderation/infraction/superstarify.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 6fa0d550f..3d880dec3 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -136,8 +136,7 @@ class Superstarify(InfractionScheduler, Cog): return # Set the duration to 1 hour if none was provided - if not duration: - duration = datetime.datetime.now() + datetime.timedelta(hours=1) + duration = duration or datetime.datetime.utcnow() + datetime.timedelta(hours=1) # Post the infraction to the API old_nick = member.display_name -- cgit v1.2.3 From ae5d1cb65ddec0e70df00a4051a5bf813d4e6e20 Mon Sep 17 00:00:00 2001 From: Vivaan Verma Date: Mon, 19 Apr 2021 21:06:15 +0100 Subject: Add default duration as constant and use Duration converter --- bot/exts/moderation/infraction/superstarify.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 3d880dec3..0bc2198c3 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -1,4 +1,3 @@ -import datetime import json import logging import random @@ -12,7 +11,7 @@ from discord.utils import escape_markdown from bot import constants from bot.bot import Bot -from bot.converters import Expiry +from bot.converters import Duration from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction._scheduler import InfractionScheduler from bot.utils.messages import format_user @@ -20,6 +19,7 @@ from bot.utils.time import format_infraction log = logging.getLogger(__name__) NICKNAME_POLICY_URL = "https://pythondiscord.com/pages/rules/#nickname-policy" +SUPERSTARIFY_DEFAULT_DURATION = "1h" with Path("bot/resources/stars.json").open(encoding="utf-8") as stars_file: STAR_NAMES = json.load(stars_file) @@ -110,7 +110,7 @@ class Superstarify(InfractionScheduler, Cog): self, ctx: Context, member: Member, - duration: t.Optional[Expiry], + duration: t.Optional[Duration], *, reason: str = '', ) -> None: @@ -136,7 +136,7 @@ class Superstarify(InfractionScheduler, Cog): return # Set the duration to 1 hour if none was provided - duration = duration or datetime.datetime.utcnow() + datetime.timedelta(hours=1) + duration = duration or await Duration().convert(ctx, SUPERSTARIFY_DEFAULT_DURATION) # Post the infraction to the API old_nick = member.display_name -- cgit v1.2.3 From 03f909df6758a10c95f0b63df487f1acd97ec36d Mon Sep 17 00:00:00 2001 From: Vivaan Verma Date: Mon, 19 Apr 2021 21:15:11 +0100 Subject: Change type hint from duration to expiry --- bot/exts/moderation/infraction/superstarify.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 0bc2198c3..ef88fb43f 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -11,7 +11,7 @@ from discord.utils import escape_markdown from bot import constants from bot.bot import Bot -from bot.converters import Duration +from bot.converters import Duration, Expiry from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction._scheduler import InfractionScheduler from bot.utils.messages import format_user @@ -110,7 +110,7 @@ class Superstarify(InfractionScheduler, Cog): self, ctx: Context, member: Member, - duration: t.Optional[Duration], + duration: t.Optional[Expiry], *, reason: str = '', ) -> None: -- cgit v1.2.3 From 91bdf9415ec88715fadf2e0a56b900b376b638db Mon Sep 17 00:00:00 2001 From: Vivaan Verma <54081925+doublevcodes@users.noreply.github.com> Date: Mon, 19 Apr 2021 22:02:45 +0100 Subject: Update bot/exts/moderation/infraction/superstarify.py Co-authored-by: Boris Muratov <8bee278@gmail.com> --- bot/exts/moderation/infraction/superstarify.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index ef88fb43f..07e79b9fe 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -135,7 +135,7 @@ class Superstarify(InfractionScheduler, Cog): if await _utils.get_active_infraction(ctx, member, "superstar"): return - # Set the duration to 1 hour if none was provided + # Set to default duration if none was provided. duration = duration or await Duration().convert(ctx, SUPERSTARIFY_DEFAULT_DURATION) # Post the infraction to the API -- cgit v1.2.3 From 9aa2b42aa04724a4ebc74d3ff6c339c33547dce3 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 20 Apr 2021 17:20:44 +0200 Subject: Tests: AsyncMock is now in the standard library! The `tests/README.md` file still referenced our old custom `AsyncMock` that has been removed in favour of the standard library one that has been introduced in 3.8. This commit fixes this by updating the section. --- tests/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/README.md b/tests/README.md index 4f62edd68..092324123 100644 --- a/tests/README.md +++ b/tests/README.md @@ -114,7 +114,7 @@ class BotCogTests(unittest.TestCase): ### Mocking coroutines -By default, the `unittest.mock.Mock` and `unittest.mock.MagicMock` classes cannot mock coroutines, since the `__call__` method they provide is synchronous. In anticipation of the `AsyncMock` that will be [introduced in Python 3.8](https://docs.python.org/3.9/whatsnew/3.8.html#unittest), we have added an `AsyncMock` helper to [`helpers.py`](/tests/helpers.py). Do note that this drop-in replacement only implements an asynchronous `__call__` method, not the additional assertions that will come with the new `AsyncMock` type in Python 3.8. +By default, the `unittest.mock.Mock` and `unittest.mock.MagicMock` classes cannot mock coroutines, since the `__call__` method they provide is synchronous. The [`AsyncMock`](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.AsyncMock) that has been [introduced in Python 3.8](https://docs.python.org/3.9/whatsnew/3.8.html#unittest) is an asynchronous version of `MagicMock` that can be used anywhere a coroutine is expected. ### Special mocks for some `discord.py` types -- cgit v1.2.3 From b12666dc4b75146b150c0812c5cb56f4317773ae Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Tue, 20 Apr 2021 18:48:12 +0300 Subject: Improve rediscache doc Co-authored-by: ChrisJL --- bot/exts/moderation/modpings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/modpings.py b/bot/exts/moderation/modpings.py index 690aa7c68..2f180e594 100644 --- a/bot/exts/moderation/modpings.py +++ b/bot/exts/moderation/modpings.py @@ -17,7 +17,7 @@ log = logging.getLogger(__name__) class ModPings(Cog): """Commands for a moderator to turn moderator pings on and off.""" - # RedisCache[str, str] + # RedisCache[discord.Member.id, 'Naïve ISO 8601 string'] # The cache's keys are mods who have pings off. # The cache's values are the times when the role should be re-applied to them, stored in ISO format. pings_off_mods = RedisCache() -- cgit v1.2.3 From 8a73d2b5e71444595b72155d7106c0fc48eeb027 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Tue, 20 Apr 2021 19:14:10 +0300 Subject: Remove allowed mentions in modlog alert The modlog alert embed no longer pings everyone. --- bot/exts/moderation/modlog.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py index 5e8ea595b..e92f76c9a 100644 --- a/bot/exts/moderation/modlog.py +++ b/bot/exts/moderation/modlog.py @@ -127,8 +127,7 @@ class ModLog(Cog, name="ModLog"): log_message = await channel.send( content=content, embed=embed, - files=files, - allowed_mentions=discord.AllowedMentions(everyone=True) + files=files ) if additional_embeds: -- cgit v1.2.3 From c20f84ff95671527e6fbacb04f07bcee3baaafcd Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Tue, 20 Apr 2021 17:54:44 +0100 Subject: Add the Moderators role to moderation_roles in config This allows mod alert pings to go through in #mod-alerts, the allowed mentions only included the mods team role which is not pinged on mod alerts. --- config-default.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/config-default.yml b/config-default.yml index b19164d3f..b7c446889 100644 --- a/config-default.yml +++ b/config-default.yml @@ -275,6 +275,7 @@ guild: moderation_roles: - *ADMINS_ROLE - *MOD_TEAM_ROLE + - *MODS_ROLE - *OWNERS_ROLE staff_roles: -- cgit v1.2.3 From 1a65e2a0505c719a77ccf9b0832f44ac035c4f1c Mon Sep 17 00:00:00 2001 From: ToxicKidz <78174417+ToxicKidz@users.noreply.github.com> Date: Tue, 20 Apr 2021 18:07:16 -0400 Subject: chore: Use Embed.timestamp for showing when the reminder will be sent --- bot/exts/utils/reminders.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/bot/exts/utils/reminders.py b/bot/exts/utils/reminders.py index 3113a1149..1d0832d9a 100644 --- a/bot/exts/utils/reminders.py +++ b/bot/exts/utils/reminders.py @@ -90,15 +90,18 @@ class Reminders(Cog): delivery_dt: t.Optional[datetime], ) -> None: """Send an embed confirming the reminder change was made successfully.""" - embed = discord.Embed() - embed.colour = discord.Colour.green() - embed.title = random.choice(POSITIVE_REPLIES) - embed.description = on_success + embed = discord.Embed( + description=on_success, + colour=discord.Colour.green(), + title=random.choice(POSITIVE_REPLIES) + ) footer_str = f"ID: {reminder_id}" + if delivery_dt: # Reminder deletion will have a `None` `delivery_dt` - footer_str = f"{footer_str}, Due: {delivery_dt.strftime('%Y-%m-%dT%H:%M:%S')}" + footer_str += ', Done at' + embed.timestamp = delivery_dt embed.set_footer(text=footer_str) -- cgit v1.2.3 From 3188d61f9f6ef871864aed273844ff6a57eb36a0 Mon Sep 17 00:00:00 2001 From: ToxicKidz <78174417+ToxicKidz@users.noreply.github.com> Date: Wed, 21 Apr 2021 10:42:31 -0400 Subject: chore: Revert back to 'Due' --- bot/exts/utils/reminders.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/utils/reminders.py b/bot/exts/utils/reminders.py index 1d0832d9a..6c21920a1 100644 --- a/bot/exts/utils/reminders.py +++ b/bot/exts/utils/reminders.py @@ -100,7 +100,7 @@ class Reminders(Cog): if delivery_dt: # Reminder deletion will have a `None` `delivery_dt` - footer_str += ', Done at' + footer_str += ', Due' embed.timestamp = delivery_dt embed.set_footer(text=footer_str) -- cgit v1.2.3 From 1fdd5aabd4ef5e356f358fdb6e9b26a5b5da99ce Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Sat, 24 Apr 2021 17:04:48 +0200 Subject: Tests: simplify public flags handling Co_authored-by: Numerlor <25886452+Numerlor@users.noreply.github.com> --- tests/bot/exts/info/test_information.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py index d2ecee033..770660fe3 100644 --- a/tests/bot/exts/info/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -281,13 +281,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed should use the string representation of the user if they don't have a nick.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) user = helpers.MockMember() - public_flags = unittest.mock.MagicMock() - public_flags.__iter__.return_value = iter(()) - public_flags.verified_bot = False + user.public_flags = unittest.mock.MagicMock(verified_bot=False) user.nick = None user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") user.colour = 0 - user.public_flags = public_flags embed = await self.cog.create_user_embed(ctx, user) @@ -301,13 +298,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed should use the nick if it's available.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) user = helpers.MockMember() - public_flags = unittest.mock.MagicMock() - public_flags.__iter__.return_value = iter(()) - public_flags.verified_bot = False + user.public_flags = unittest.mock.MagicMock(verified_bot=False) user.nick = "Cat lover" user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") user.colour = 0 - user.public_flags = public_flags embed = await self.cog.create_user_embed(ctx, user) -- cgit v1.2.3 From 9affdb92bb67794fd11732376ec64362da932817 Mon Sep 17 00:00:00 2001 From: rohan Date: Sun, 25 Apr 2021 21:05:09 +0530 Subject: Wait for cache to be loaded before accesing member voice state and channels. --- bot/exts/moderation/stream.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index a2ebb6205..ebcc00ace 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -72,6 +72,7 @@ class Stream(commands.Cog): async def _suspend_stream(self, ctx: commands.Context, member: discord.Member) -> None: """Suspend a member's stream.""" + await self.bot.wait_until_guild_available() voice_state = member.voice if not voice_state: -- cgit v1.2.3 From 3fa889aaee4a4d901ce17a24dd6760a4fea88fd7 Mon Sep 17 00:00:00 2001 From: Andi Qu <31325319+dolphingarlic@users.noreply.github.com> Date: Tue, 27 Apr 2021 08:39:51 +0200 Subject: Merge two comments into one Co-authored-by: Xithrius <15021300+Xithrius@users.noreply.github.com> --- bot/exts/info/code_snippets.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index b9e7cc3d0..c20115830 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -231,8 +231,7 @@ class CodeSnippets(Cog): snippet = await handler(**match.groupdict()) all_snippets.append((match.start(), snippet)) - # Sorts the list of snippets by their match index and joins them into - # a single message + # Sorts the list of snippets by their match index and joins them into a single message message_to_send = '\n'.join(map(lambda x: x[1], sorted(all_snippets))) if 0 < len(message_to_send) <= 2000 and len(all_snippets) <= 15: -- cgit v1.2.3 From 99549d7e76556c09d27148ee43fa61a38bc9a0b4 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 27 Apr 2021 16:58:41 +0200 Subject: Use a specific error message when a warned user isn't in the guild This commit changes sighly how the warn, kick and mute commands to take a fetched member as their argument and to return a little error message if the user isn't in the guild rather than showing the whole help page. --- bot/exts/moderation/infraction/infractions.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py index d89e80acc..38d1ffc0e 100644 --- a/bot/exts/moderation/infraction/infractions.py +++ b/bot/exts/moderation/infraction/infractions.py @@ -54,8 +54,12 @@ class Infractions(InfractionScheduler, commands.Cog): # region: Permanent infractions @command() - async def warn(self, ctx: Context, user: Member, *, reason: t.Optional[str] = None) -> None: + async def warn(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: """Warn a user for the given reason.""" + if not isinstance(user, Member): + await ctx.send(":x: The user doesn't appear to be on the server.") + return + infraction = await _utils.post_infraction(ctx, user, "warning", reason, active=False) if infraction is None: return @@ -63,8 +67,12 @@ class Infractions(InfractionScheduler, commands.Cog): await self.apply_infraction(ctx, infraction, user) @command() - async def kick(self, ctx: Context, user: Member, *, reason: t.Optional[str] = None) -> None: + async def kick(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: """Kick a user for the given reason.""" + if not isinstance(user, Member): + await ctx.send(":x: The user doesn't appear to be on the server.") + return + await self.apply_kick(ctx, user, reason) @command() @@ -100,7 +108,7 @@ class Infractions(InfractionScheduler, commands.Cog): @command(aliases=["mute"]) async def tempmute( self, ctx: Context, - user: Member, + user: FetchedMember, duration: t.Optional[Expiry] = None, *, reason: t.Optional[str] = None @@ -122,6 +130,10 @@ class Infractions(InfractionScheduler, commands.Cog): If no duration is given, a one hour duration is used by default. """ + if not isinstance(user, Member): + await ctx.send(":x: The user doesn't appear to be on the server.") + return + if duration is None: duration = await Duration().convert(ctx, "1h") await self.apply_mute(ctx, user, reason, expires_at=duration) -- cgit v1.2.3 From 2edba253c93a9272f9a6a579981c7dfb9358f80c Mon Sep 17 00:00:00 2001 From: rohan Date: Wed, 28 Apr 2021 11:30:39 +0530 Subject: Use guild.afk_channel atr to retrieve afk Channel instance. --- bot/constants.py | 1 - bot/exts/moderation/stream.py | 2 +- config-default.yml | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/bot/constants.py b/bot/constants.py index b9444c989..6d14bbb3a 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -444,7 +444,6 @@ class Channels(metaclass=YAMLGetter): mod_announcements: int staff_announcements: int - afk_voice: int admins_voice: int code_help_voice_1: int code_help_voice_2: int diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index ebcc00ace..1710d4c7c 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -82,7 +82,7 @@ class Stream(commands.Cog): if voice_state.self_stream: # End user's stream by moving them to AFK voice channel and back. original_vc = voice_state.channel - await member.move_to(self.bot.get_channel(Channels.afk_voice)) + await member.move_to(ctx.guild.afk_channel) await member.move_to(original_vc) # Notify. diff --git a/config-default.yml b/config-default.yml index 204397f7f..8c6e18470 100644 --- a/config-default.yml +++ b/config-default.yml @@ -206,7 +206,6 @@ guild: staff_announcements: &STAFF_ANNOUNCEMENTS 464033278631084042 # Voice Channels - afk_voice: 756327105389920306 admins_voice: &ADMINS_VOICE 500734494840717332 code_help_voice_1: 751592231726481530 code_help_voice_2: 764232549840846858 -- cgit v1.2.3 From 32b783f0b207450b46510a810a36999189b97985 Mon Sep 17 00:00:00 2001 From: rohan Date: Wed, 28 Apr 2021 12:37:12 +0530 Subject: Make flake8 happy :D --- bot/exts/moderation/stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 1710d4c7c..fd856a7f4 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -9,7 +9,7 @@ from async_rediscache import RedisCache from discord.ext import commands from bot.bot import Bot -from bot.constants import Channels, Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES, VideoPermission +from bot.constants import Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES, VideoPermission from bot.converters import Expiry from bot.pagination import LinePaginator from bot.utils.scheduling import Scheduler -- cgit v1.2.3