aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar D0rs4n <[email protected]>2021-08-02 19:13:03 +0200
committerGravatar D0rs4n <[email protected]>2021-08-02 19:13:03 +0200
commitc9f8efc6cca4556b04adca2894cdd1b61c87d421 (patch)
treec458804c335f45e79384260f253d98a76232fcce
parentMerge pull request #791 from onerandomusername/patch-1 (diff)
Handle Wikipedia API errors and warnings in the Wikipedia Cog
- Add an additional check to handle errors and warnings coming from the side of the Wikipedia API, as the response code sent by the Wikipedia API remains 200 even if there is an error.
-rw-r--r--bot/exts/evergreen/wikipedia.py55
1 files changed, 33 insertions, 22 deletions
diff --git a/bot/exts/evergreen/wikipedia.py b/bot/exts/evergreen/wikipedia.py
index 83937438..8af22737 100644
--- a/bot/exts/evergreen/wikipedia.py
+++ b/bot/exts/evergreen/wikipedia.py
@@ -39,31 +39,42 @@ class WikipediaSearch(commands.Cog):
async with self.bot.http_session.get(url=url) as resp:
if resp.status == 200:
raw_data = await resp.json()
- number_of_results = raw_data["query"]["searchinfo"]["totalhits"]
-
- if number_of_results:
- results = raw_data["query"]["search"]
- lines = []
-
- for article in results:
- line = WIKI_SEARCH_RESULT.format(
- name=article["title"],
- description=unescape(
- re.sub(
- WIKI_SNIPPET_REGEX, "", article["snippet"]
- )
- ),
- url=f"https://en.wikipedia.org/?curid={article['pageid']}"
- )
- lines.append(line)
-
- return lines
+ if raw_data.get("query", None) is None:
+ if raw_data.get("errors", None) is not None:
+ log.info("There was an error regarding the Wikipedia API query.")
+ else:
+ log.info("There was an issue when trying to communicate with the Wikipedia API")
- else:
await channel.send(
- "Sorry, we could not find a wikipedia article using that search term."
- )
+ "There was an issue processing your Wikipedia request, please try again later.")
return
+ else:
+
+ number_of_results = raw_data["query"]["searchinfo"]["totalhits"]
+
+ if number_of_results:
+ results = raw_data["query"]["search"]
+ lines = []
+
+ for article in results:
+ line = WIKI_SEARCH_RESULT.format(
+ name=article["title"],
+ description=unescape(
+ re.sub(
+ WIKI_SNIPPET_REGEX, "", article["snippet"]
+ )
+ ),
+ url=f"https://en.wikipedia.org/?curid={article['pageid']}"
+ )
+ lines.append(line)
+
+ return lines
+
+ else:
+ await channel.send(
+ "Sorry, we could not find a wikipedia article using that search term."
+ )
+ return
else:
log.info(f"Unexpected response `{resp.status}` while searching wikipedia for `{search}`")
await channel.send(