aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Hassan Abouelela <[email protected]>2020-12-29 17:47:06 +0300
committerGravatar Hassan Abouelela <[email protected]>2020-12-29 17:47:06 +0300
commitd7522eb4b194bd541bf77e54d7e3d75880c0a46f (patch)
tree068d423911749ab16d7d3e79d23643d33c51ad3a
parentMerge branch 'master' into voicechannel-mute (diff)
parentMerge pull request #1016 from ks129/bug-fixes (diff)
Merge branch 'master' into voicechannel-mute
-rw-r--r--bot/bot.py20
-rw-r--r--bot/constants.py1
-rw-r--r--bot/exts/info/pep.py164
-rw-r--r--bot/exts/info/reddit.py2
-rw-r--r--bot/exts/moderation/infraction/_scheduler.py34
-rw-r--r--bot/exts/moderation/verification.py22
-rw-r--r--bot/exts/moderation/watchchannels/_watchchannel.py17
-rw-r--r--bot/exts/utils/utils.py137
-rw-r--r--config-default.yml1
9 files changed, 238 insertions, 160 deletions
diff --git a/bot/bot.py b/bot/bot.py
index 4ebe0a5c3..d5f108575 100644
--- a/bot/bot.py
+++ b/bot/bot.py
@@ -3,7 +3,8 @@ import logging
import socket
import warnings
from collections import defaultdict
-from typing import Dict, Optional
+from contextlib import suppress
+from typing import Dict, List, Optional
import aiohttp
import discord
@@ -70,6 +71,9 @@ class Bot(commands.Bot):
attempt + 1
)
+ # All tasks that need to block closing until finished
+ self.closing_tasks: List[asyncio.Task] = []
+
async def cache_filter_list_data(self) -> None:
"""Cache all the data in the FilterList on the site."""
full_cache = await self.api_client.get('bot/filter-lists')
@@ -145,6 +149,20 @@ class Bot(commands.Bot):
async def close(self) -> None:
"""Close the Discord connection and the aiohttp session, connector, statsd client, and resolver."""
+ # Done before super().close() to allow tasks finish before the HTTP session closes.
+ for ext in list(self.extensions):
+ with suppress(Exception):
+ self.unload_extension(ext)
+
+ for cog in list(self.cogs):
+ with suppress(Exception):
+ self.remove_cog(cog)
+
+ # Wait until all tasks that have to be completed before bot is closing is done
+ log.trace("Waiting for tasks before closing.")
+ await asyncio.gather(*self.closing_tasks)
+
+ # Now actually do full close of bot
await super().close()
if self.api_client:
diff --git a/bot/constants.py b/bot/constants.py
index f07677a92..d93b44b64 100644
--- a/bot/constants.py
+++ b/bot/constants.py
@@ -495,6 +495,7 @@ class Keys(metaclass=YAMLGetter):
section = "keys"
site_api: Optional[str]
+ github: Optional[str]
class URLs(metaclass=YAMLGetter):
diff --git a/bot/exts/info/pep.py b/bot/exts/info/pep.py
new file mode 100644
index 000000000..8ac96bbdb
--- /dev/null
+++ b/bot/exts/info/pep.py
@@ -0,0 +1,164 @@
+import logging
+from datetime import datetime, timedelta
+from email.parser import HeaderParser
+from io import StringIO
+from typing import Dict, Optional, Tuple
+
+from discord import Colour, Embed
+from discord.ext.commands import Cog, Context, command
+
+from bot.bot import Bot
+from bot.constants import Keys
+from bot.utils.cache import AsyncCache
+
+log = logging.getLogger(__name__)
+
+ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png"
+BASE_PEP_URL = "http://www.python.org/dev/peps/pep-"
+PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=master"
+
+pep_cache = AsyncCache()
+
+GITHUB_API_HEADERS = {}
+if Keys.github:
+ GITHUB_API_HEADERS["Authorization"] = f"token {Keys.github}"
+
+
+class PythonEnhancementProposals(Cog):
+ """Cog for displaying information about PEPs."""
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+ self.peps: Dict[int, str] = {}
+ # To avoid situations where we don't have last datetime, set this to now.
+ self.last_refreshed_peps: datetime = datetime.now()
+ self.bot.loop.create_task(self.refresh_peps_urls())
+
+ async def refresh_peps_urls(self) -> None:
+ """Refresh PEP URLs listing in every 3 hours."""
+ # Wait until HTTP client is available
+ await self.bot.wait_until_ready()
+ log.trace("Started refreshing PEP URLs.")
+ self.last_refreshed_peps = datetime.now()
+
+ async with self.bot.http_session.get(
+ PEPS_LISTING_API_URL,
+ headers=GITHUB_API_HEADERS
+ ) as resp:
+ if resp.status != 200:
+ log.warning(f"Fetching PEP URLs from GitHub API failed with code {resp.status}")
+ return
+
+ listing = await resp.json()
+
+ log.trace("Got PEP URLs listing from GitHub API")
+
+ for file in listing:
+ name = file["name"]
+ if name.startswith("pep-") and name.endswith((".rst", ".txt")):
+ pep_number = name.replace("pep-", "").split(".")[0]
+ self.peps[int(pep_number)] = file["download_url"]
+
+ log.info("Successfully refreshed PEP URLs listing.")
+
+ @staticmethod
+ def get_pep_zero_embed() -> Embed:
+ """Get information embed about PEP 0."""
+ pep_embed = Embed(
+ title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
+ url="https://www.python.org/dev/peps/"
+ )
+ pep_embed.set_thumbnail(url=ICON_URL)
+ pep_embed.add_field(name="Status", value="Active")
+ pep_embed.add_field(name="Created", value="13-Jul-2000")
+ pep_embed.add_field(name="Type", value="Informational")
+
+ return pep_embed
+
+ async def validate_pep_number(self, pep_nr: int) -> Optional[Embed]:
+ """Validate is PEP number valid. When it isn't, return error embed, otherwise None."""
+ if (
+ pep_nr not in self.peps
+ and (self.last_refreshed_peps + timedelta(minutes=30)) <= datetime.now()
+ and len(str(pep_nr)) < 5
+ ):
+ await self.refresh_peps_urls()
+
+ if pep_nr not in self.peps:
+ log.trace(f"PEP {pep_nr} was not found")
+ return Embed(
+ title="PEP not found",
+ description=f"PEP {pep_nr} does not exist.",
+ colour=Colour.red()
+ )
+
+ return None
+
+ def generate_pep_embed(self, pep_header: Dict, pep_nr: int) -> Embed:
+ """Generate PEP embed based on PEP headers data."""
+ # Assemble the embed
+ pep_embed = Embed(
+ title=f"**PEP {pep_nr} - {pep_header['Title']}**",
+ description=f"[Link]({BASE_PEP_URL}{pep_nr:04})",
+ )
+
+ pep_embed.set_thumbnail(url=ICON_URL)
+
+ # Add the interesting information
+ fields_to_check = ("Status", "Python-Version", "Created", "Type")
+ for field in fields_to_check:
+ # Check for a PEP metadata field that is present but has an empty value
+ # embed field values can't contain an empty string
+ if pep_header.get(field, ""):
+ pep_embed.add_field(name=field, value=pep_header[field])
+
+ return pep_embed
+
+ @pep_cache(arg_offset=1)
+ async def get_pep_embed(self, pep_nr: int) -> Tuple[Embed, bool]:
+ """Fetch, generate and return PEP embed. Second item of return tuple show does getting success."""
+ response = await self.bot.http_session.get(self.peps[pep_nr])
+
+ if response.status == 200:
+ log.trace(f"PEP {pep_nr} found")
+ pep_content = await response.text()
+
+ # Taken from https://github.com/python/peps/blob/master/pep0/pep.py#L179
+ pep_header = HeaderParser().parse(StringIO(pep_content))
+ return self.generate_pep_embed(pep_header, pep_nr), True
+ else:
+ log.trace(
+ f"The user requested PEP {pep_nr}, but the response had an unexpected status code: {response.status}."
+ )
+ return Embed(
+ title="Unexpected error",
+ description="Unexpected HTTP error during PEP search. Please let us know.",
+ colour=Colour.red()
+ ), False
+
+ @command(name='pep', aliases=('get_pep', 'p'))
+ async def pep_command(self, ctx: Context, pep_number: int) -> None:
+ """Fetches information about a PEP and sends it to the channel."""
+ # Trigger typing in chat to show users that bot is responding
+ await ctx.trigger_typing()
+
+ # Handle PEP 0 directly because it's not in .rst or .txt so it can't be accessed like other PEPs.
+ if pep_number == 0:
+ pep_embed = self.get_pep_zero_embed()
+ success = True
+ else:
+ success = False
+ if not (pep_embed := await self.validate_pep_number(pep_number)):
+ pep_embed, success = await self.get_pep_embed(pep_number)
+
+ await ctx.send(embed=pep_embed)
+ if success:
+ log.trace(f"PEP {pep_number} getting and sending finished successfully. Increasing stat.")
+ self.bot.stats.incr(f"pep_fetches.{pep_number}")
+ else:
+ log.trace(f"Getting PEP {pep_number} failed. Error embed sent.")
+
+
+def setup(bot: Bot) -> None:
+ """Load the PEP cog."""
+ bot.add_cog(PythonEnhancementProposals(bot))
diff --git a/bot/exts/info/reddit.py b/bot/exts/info/reddit.py
index bad4c504d..6790be762 100644
--- a/bot/exts/info/reddit.py
+++ b/bot/exts/info/reddit.py
@@ -45,7 +45,7 @@ class Reddit(Cog):
"""Stop the loop task and revoke the access token when the cog is unloaded."""
self.auto_poster_loop.cancel()
if self.access_token and self.access_token.expires_at > datetime.utcnow():
- asyncio.create_task(self.revoke_access_token())
+ self.bot.closing_tasks.append(asyncio.create_task(self.revoke_access_token()))
async def init_reddit_ready(self) -> None:
"""Sets the reddit webhook when the cog is loaded."""
diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py
index c062ae7f8..242b2d30f 100644
--- a/bot/exts/moderation/infraction/_scheduler.py
+++ b/bot/exts/moderation/infraction/_scheduler.py
@@ -74,8 +74,21 @@ class InfractionScheduler:
return
# Allowing mod log since this is a passive action that should be logged.
- await apply_coro
- log.info(f"Re-applied {infraction['type']} to user {infraction['user']} upon rejoining.")
+ try:
+ await apply_coro
+ except discord.HTTPException as e:
+ # When user joined and then right after this left again before action completed, this can't apply roles
+ if e.code == 10007 or e.status == 404:
+ log.info(
+ f"Can't reapply {infraction['type']} to user {infraction['user']} because user left the guild."
+ )
+ else:
+ log.exception(
+ f"Got unexpected HTTPException (HTTP {e.status}, Discord code {e.code})"
+ f"when awaiting {infraction['type']} coroutine for {infraction['user']}."
+ )
+ else:
+ log.info(f"Re-applied {infraction['type']} to user {infraction['user']} upon rejoining.")
async def apply_infraction(
self,
@@ -178,6 +191,10 @@ class InfractionScheduler:
log_msg = f"Failed to apply {' '.join(infr_type.split('_'))} infraction #{id_} to {user}"
if isinstance(e, discord.Forbidden):
log.warning(f"{log_msg}: bot lacks permissions.")
+ elif e.code == 10007 or e.status == 404:
+ log.info(
+ f"Can't apply {infraction['type']} to user {infraction['user']} because user left from guild."
+ )
else:
log.exception(log_msg)
failed = True
@@ -352,9 +369,16 @@ class InfractionScheduler:
log_text["Failure"] = "The bot lacks permissions to do this (role hierarchy?)"
log_content = mod_role.mention
except discord.HTTPException as e:
- log.exception(f"Failed to deactivate infraction #{id_} ({type_})")
- log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}."
- log_content = mod_role.mention
+ if e.code == 10007 or e.status == 404:
+ log.info(
+ f"Can't pardon {infraction['type']} for user {infraction['user']} because user left the guild."
+ )
+ log_text["Failure"] = "User left the guild."
+ log_content = mod_role.mention
+ else:
+ log.exception(f"Failed to deactivate infraction #{id_} ({type_})")
+ log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}."
+ log_content = mod_role.mention
# Check if the user is currently being watched by Big Brother.
try:
diff --git a/bot/exts/moderation/verification.py b/bot/exts/moderation/verification.py
index 6239cf522..ce91dcb15 100644
--- a/bot/exts/moderation/verification.py
+++ b/bot/exts/moderation/verification.py
@@ -55,7 +55,7 @@ If you'd like to unsubscribe from the announcement notifications, simply send `!
"""
ALTERNATE_VERIFIED_MESSAGE = f"""
-Thanks for accepting our rules!
+You are now verified!
You can find a copy of our rules for reference at <https://pythondiscord.com/pages/rules>.
@@ -834,19 +834,21 @@ class Verification(Cog):
@command(name='verify')
@has_any_role(*constants.MODERATION_ROLES)
- async def apply_developer_role(self, ctx: Context, user: discord.Member) -> None:
- """Command for moderators to apply the Developer role to any user."""
+ async def perform_manual_verification(self, ctx: Context, user: discord.Member) -> None:
+ """Command for moderators to verify any user."""
log.trace(f'verify command called by {ctx.author} for {user.id}.')
- developer_role = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.verified)
- if developer_role in user.roles:
- log.trace(f'{user.id} is already a developer, aborting.')
- await ctx.send(f'{constants.Emojis.cross_mark} {user} is already a developer.')
+ if not user.pending:
+ log.trace(f'{user.id} is already verified, aborting.')
+ await ctx.send(f'{constants.Emojis.cross_mark} {user.mention} is already verified.')
return
- await user.add_roles(developer_role)
- log.trace(f'Developer role successfully applied to {user.id}')
- await ctx.send(f'{constants.Emojis.check_mark} Developer role applied to {user}.')
+ # Adding a role automatically verifies the user, so we add and remove the Announcements role.
+ temporary_role = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.announcements)
+ await user.add_roles(temporary_role)
+ await user.remove_roles(temporary_role)
+ log.trace(f'{user.id} manually verified.')
+ await ctx.send(f'{constants.Emojis.check_mark} {user.mention} is now verified.')
# endregion
diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py
index 7118dee02..f9fc12dc3 100644
--- a/bot/exts/moderation/watchchannels/_watchchannel.py
+++ b/bot/exts/moderation/watchchannels/_watchchannel.py
@@ -342,11 +342,14 @@ class WatchChannel(metaclass=CogABCMeta):
"""Takes care of unloading the cog and canceling the consumption task."""
self.log.trace("Unloading the cog")
if self._consume_task and not self._consume_task.done():
+ def done_callback(task: asyncio.Task) -> None:
+ """Send exception when consuming task have been cancelled."""
+ try:
+ task.result()
+ except asyncio.CancelledError:
+ self.log.info(
+ f"The consume task of {type(self).__name__} was canceled. Messages may be lost."
+ )
+
+ self._consume_task.add_done_callback(done_callback)
self._consume_task.cancel()
- try:
- self._consume_task.result()
- except asyncio.CancelledError as e:
- self.log.exception(
- "The consume task was canceled. Messages may be lost.",
- exc_info=e
- )
diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py
index 8e7e6ba36..eb92dfca7 100644
--- a/bot/exts/utils/utils.py
+++ b/bot/exts/utils/utils.py
@@ -2,10 +2,7 @@ import difflib
import logging
import re
import unicodedata
-from datetime import datetime, timedelta
-from email.parser import HeaderParser
-from io import StringIO
-from typing import Dict, Optional, Tuple, Union
+from typing import Tuple, Union
from discord import Colour, Embed, utils
from discord.ext.commands import BadArgument, Cog, Context, clean_content, command, has_any_role
@@ -17,7 +14,6 @@ from bot.converters import Snowflake
from bot.decorators import in_whitelist
from bot.pagination import LinePaginator
from bot.utils import messages
-from bot.utils.cache import AsyncCache
from bot.utils.time import time_since
log = logging.getLogger(__name__)
@@ -44,23 +40,12 @@ If the implementation is easy to explain, it may be a good idea.
Namespaces are one honking great idea -- let's do more of those!
"""
-ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png"
-
-pep_cache = AsyncCache()
-
class Utils(Cog):
"""A selection of utilities which don't have a clear category."""
- BASE_PEP_URL = "http://www.python.org/dev/peps/pep-"
- BASE_GITHUB_PEP_URL = "https://raw.githubusercontent.com/python/peps/master/pep-"
- PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=master"
-
def __init__(self, bot: Bot):
self.bot = bot
- self.peps: Dict[int, str] = {}
- self.last_refreshed_peps: Optional[datetime] = None
- self.bot.loop.create_task(self.refresh_peps_urls())
@command()
@in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES)
@@ -207,126 +192,6 @@ class Utils(Cog):
for reaction in options:
await message.add_reaction(reaction)
- # region: PEP
-
- async def refresh_peps_urls(self) -> None:
- """Refresh PEP URLs listing in every 3 hours."""
- # Wait until HTTP client is available
- await self.bot.wait_until_ready()
- log.trace("Started refreshing PEP URLs.")
-
- async with self.bot.http_session.get(self.PEPS_LISTING_API_URL) as resp:
- listing = await resp.json()
-
- log.trace("Got PEP URLs listing from GitHub API")
-
- for file in listing:
- name = file["name"]
- if name.startswith("pep-") and name.endswith((".rst", ".txt")):
- pep_number = name.replace("pep-", "").split(".")[0]
- self.peps[int(pep_number)] = file["download_url"]
-
- self.last_refreshed_peps = datetime.now()
- log.info("Successfully refreshed PEP URLs listing.")
-
- @command(name='pep', aliases=('get_pep', 'p'))
- async def pep_command(self, ctx: Context, pep_number: int) -> None:
- """Fetches information about a PEP and sends it to the channel."""
- # Trigger typing in chat to show users that bot is responding
- await ctx.trigger_typing()
-
- # Handle PEP 0 directly because it's not in .rst or .txt so it can't be accessed like other PEPs.
- if pep_number == 0:
- pep_embed = self.get_pep_zero_embed()
- success = True
- else:
- success = False
- if not (pep_embed := await self.validate_pep_number(pep_number)):
- pep_embed, success = await self.get_pep_embed(pep_number)
-
- await ctx.send(embed=pep_embed)
- if success:
- log.trace(f"PEP {pep_number} getting and sending finished successfully. Increasing stat.")
- self.bot.stats.incr(f"pep_fetches.{pep_number}")
- else:
- log.trace(f"Getting PEP {pep_number} failed. Error embed sent.")
-
- @staticmethod
- def get_pep_zero_embed() -> Embed:
- """Get information embed about PEP 0."""
- pep_embed = Embed(
- title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
- url="https://www.python.org/dev/peps/"
- )
- pep_embed.set_thumbnail(url=ICON_URL)
- pep_embed.add_field(name="Status", value="Active")
- pep_embed.add_field(name="Created", value="13-Jul-2000")
- pep_embed.add_field(name="Type", value="Informational")
-
- return pep_embed
-
- async def validate_pep_number(self, pep_nr: int) -> Optional[Embed]:
- """Validate is PEP number valid. When it isn't, return error embed, otherwise None."""
- if (
- pep_nr not in self.peps
- and (self.last_refreshed_peps + timedelta(minutes=30)) <= datetime.now()
- and len(str(pep_nr)) < 5
- ):
- await self.refresh_peps_urls()
-
- if pep_nr not in self.peps:
- log.trace(f"PEP {pep_nr} was not found")
- return Embed(
- title="PEP not found",
- description=f"PEP {pep_nr} does not exist.",
- colour=Colour.red()
- )
-
- return None
-
- def generate_pep_embed(self, pep_header: Dict, pep_nr: int) -> Embed:
- """Generate PEP embed based on PEP headers data."""
- # Assemble the embed
- pep_embed = Embed(
- title=f"**PEP {pep_nr} - {pep_header['Title']}**",
- description=f"[Link]({self.BASE_PEP_URL}{pep_nr:04})",
- )
-
- pep_embed.set_thumbnail(url=ICON_URL)
-
- # Add the interesting information
- fields_to_check = ("Status", "Python-Version", "Created", "Type")
- for field in fields_to_check:
- # Check for a PEP metadata field that is present but has an empty value
- # embed field values can't contain an empty string
- if pep_header.get(field, ""):
- pep_embed.add_field(name=field, value=pep_header[field])
-
- return pep_embed
-
- @pep_cache(arg_offset=1)
- async def get_pep_embed(self, pep_nr: int) -> Tuple[Embed, bool]:
- """Fetch, generate and return PEP embed. Second item of return tuple show does getting success."""
- response = await self.bot.http_session.get(self.peps[pep_nr])
-
- if response.status == 200:
- log.trace(f"PEP {pep_nr} found")
- pep_content = await response.text()
-
- # Taken from https://github.com/python/peps/blob/master/pep0/pep.py#L179
- pep_header = HeaderParser().parse(StringIO(pep_content))
- return self.generate_pep_embed(pep_header, pep_nr), True
- else:
- log.trace(
- f"The user requested PEP {pep_nr}, but the response had an unexpected status code: {response.status}."
- )
- return Embed(
- title="Unexpected error",
- description="Unexpected HTTP error during PEP search. Please let us know.",
- colour=Colour.red()
- ), False
- # endregion
-
def setup(bot: Bot) -> None:
"""Load the Utils cog."""
diff --git a/config-default.yml b/config-default.yml
index 3f3f66962..ca89bb639 100644
--- a/config-default.yml
+++ b/config-default.yml
@@ -323,6 +323,7 @@ filter:
keys:
site_api: !ENV "BOT_API_KEY"
+ github: !ENV "GITHUB_API_KEY"
urls: