diff options
-rw-r--r-- | LICENSE-THIRD-PARTY | 30 | ||||
-rw-r--r-- | Pipfile | 4 | ||||
-rw-r--r-- | Pipfile.lock | 314 | ||||
-rw-r--r-- | bot/converters.py | 42 | ||||
-rw-r--r-- | bot/decorators.py | 12 | ||||
-rw-r--r-- | bot/exts/info/doc.py | 485 | ||||
-rw-r--r-- | bot/exts/info/doc/__init__.py | 15 | ||||
-rw-r--r-- | bot/exts/info/doc/_batch_parser.py | 203 | ||||
-rw-r--r-- | bot/exts/info/doc/_cog.py | 425 | ||||
-rw-r--r-- | bot/exts/info/doc/_html.py | 136 | ||||
-rw-r--r-- | bot/exts/info/doc/_inventory_parser.py | 126 | ||||
-rw-r--r-- | bot/exts/info/doc/_markdown.py | 58 | ||||
-rw-r--r-- | bot/exts/info/doc/_parsing.py | 247 | ||||
-rw-r--r-- | bot/exts/info/doc/_redis_cache.py | 65 | ||||
-rw-r--r-- | bot/exts/info/source.py | 3 | ||||
-rw-r--r-- | bot/utils/function.py | 72 | ||||
-rw-r--r-- | bot/utils/lock.py | 37 | ||||
-rw-r--r-- | bot/utils/messages.py | 4 | ||||
-rw-r--r-- | tests/bot/test_converters.py | 21 |
19 files changed, 1542 insertions, 757 deletions
diff --git a/LICENSE-THIRD-PARTY b/LICENSE-THIRD-PARTY index eacd9b952..ab715630d 100644 --- a/LICENSE-THIRD-PARTY +++ b/LICENSE-THIRD-PARTY @@ -35,6 +35,36 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. --------------------------------------------------------------------------------------------------- + BSD 2-Clause License +Applies to: + - Copyright (c) 2007-2020 by the Sphinx team (see AUTHORS file). All rights reserved. + - bot/cogs/doc/inventory_parser.py: _load_v1, _load_v2 and ZlibStreamReader.__aiter__. +--------------------------------------------------------------------------------------------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +--------------------------------------------------------------------------------------------------- PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 Applies to: - Copyright © 2001-2020 Python Software Foundation. All rights reserved. @@ -18,13 +18,11 @@ deepdiff = "~=4.0" feedparser = "~=5.2" fuzzywuzzy = "~=0.17" lxml = "~=4.4" -markdownify = "==0.5.3" +markdownify = "~=0.6.1" more_itertools = "~=8.2" python-dateutil = "~=2.8" pyyaml = "~=5.1" -requests = "~=2.22" sentry-sdk = "~=0.19" -sphinx = "~=2.2" statsd = "~=3.3" arrow = "~=0.17" emoji = "~=0.6" diff --git a/Pipfile.lock b/Pipfile.lock index 636d07b1a..5aff33383 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "26c8089f17d6d6bac11dbed366b1b46818b4546f243af756a106a32af5d9d8f6" + "sha256": "33874d325a918682da3ae4d833748263695836d0cda4c1b0627ce5a5f29746e5" }, "pipfile-spec": 6, "requires": { @@ -99,13 +99,6 @@ "markers": "python_version >= '3.6'", "version": "==3.3.1" }, - "alabaster": { - "hashes": [ - "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", - "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02" - ], - "version": "==0.7.12" - }, "arrow": { "hashes": [ "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5", @@ -142,14 +135,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==20.3.0" }, - "babel": { - "hashes": [ - "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", - "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.9.0" - }, "beautifulsoup4": { "hashes": [ "sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35", @@ -219,6 +204,7 @@ "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], + "index": "pypi", "markers": "sys_platform == 'win32'", "version": "==0.4.4" }, @@ -246,14 +232,6 @@ "index": "pypi", "version": "==1.6.0" }, - "docutils": { - "hashes": [ - "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", - "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==0.16" - }, "emoji": { "hashes": [ "sha256:e42da4f8d648f8ef10691bc246f682a1ec6b18373abfd9be10ec0b398823bd11" @@ -347,27 +325,11 @@ }, "idna": { "hashes": [ - "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", - "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16", + "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.10" - }, - "imagesize": { - "hashes": [ - "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", - "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.2.0" - }, - "jinja2": { - "hashes": [ - "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", - "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==2.11.2" + "markers": "python_version >= '3.4'", + "version": "==3.1" }, "lxml": { "hashes": [ @@ -414,50 +376,11 @@ }, "markdownify": { "hashes": [ - "sha256:30be8340724e706c9e811c27fe8c1542cf74a15b46827924fff5c54b40dd9b0d", - "sha256:a69588194fd76634f0139d6801b820fd652dc5eeba9530e90d323dfdc0155252" + "sha256:2147197d9c45cdd24d57302b94e01cac44988862960ac42eba730345a31aebbc", + "sha256:3de08764db001e7119cb06481de4ec0b2ea0338fd26cf49bdf16c4475ef44b81" ], "index": "pypi", - "version": "==0.5.3" - }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.1.1" + "version": "==0.6.3" }, "more-itertools": { "hashes": [ @@ -517,14 +440,6 @@ "markers": "python_version >= '3.5'", "version": "==4.0.2" }, - "packaging": { - "hashes": [ - "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858", - "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.8" - }, "pamqp": { "hashes": [ "sha256:2f81b5c186f668a67f165193925b6bfd83db4363a6222f599517f29ecee60b02", @@ -574,21 +489,14 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, - "pygments": { - "hashes": [ - "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435", - "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337" - ], - "markers": "python_version >= '3.5'", - "version": "==2.7.4" - }, - "pyparsing": { + "pyreadline": { "hashes": [ - "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", - "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1", + "sha256:65540c21bfe14405a3a77e4c085ecfce88724743a4ead47c66b84defcf82c32e", + "sha256:9ce5fa65b8992dfa373bddc5b6e0864ead8f291c94fbfec05fbd5c836162e67b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", - "version": "==2.4.7" + "markers": "sys_platform == 'win32'", + "version": "==2.1" }, "python-dateutil": { "hashes": [ @@ -598,31 +506,32 @@ "index": "pypi", "version": "==2.8.1" }, - "pytz": { - "hashes": [ - "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4", - "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5" - ], - "version": "==2020.5" - }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" - ], - "index": "pypi", - "version": "==5.3.1" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + ], + "index": "pypi", + "version": "==5.4.1" }, "redis": { "hashes": [ @@ -632,14 +541,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.5.3" }, - "requests": { - "hashes": [ - "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", - "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" - ], - "index": "pypi", - "version": "==2.25.1" - }, "sentry-sdk": { "hashes": [ "sha256:0a711ec952441c2ec89b8f5d226c33bc697914f46e876b44a4edd3e7864cf4d0", @@ -653,16 +554,9 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, - "snowballstemmer": { - "hashes": [ - "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0", - "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52" - ], - "version": "==2.0.0" - }, "sortedcontainers": { "hashes": [ "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", @@ -678,62 +572,6 @@ "markers": "python_version >= '3.0'", "version": "==2.1" }, - "sphinx": { - "hashes": [ - "sha256:b4c750d546ab6d7e05bdff6ac24db8ae3e8b8253a3569b754e445110a0a12b66", - "sha256:fc312670b56cb54920d6cc2ced455a22a547910de10b3142276495ced49231cb" - ], - "index": "pypi", - "version": "==2.4.4" - }, - "sphinxcontrib-applehelp": { - "hashes": [ - "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", - "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.2" - }, - "sphinxcontrib-devhelp": { - "hashes": [ - "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", - "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.2" - }, - "sphinxcontrib-htmlhelp": { - "hashes": [ - "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", - "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.3" - }, - "sphinxcontrib-jsmath": { - "hashes": [ - "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", - "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.1" - }, - "sphinxcontrib-qthelp": { - "hashes": [ - "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", - "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.3" - }, - "sphinxcontrib-serializinghtml": { - "hashes": [ - "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", - "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" - ], - "markers": "python_version >= '3.5'", - "version": "==1.1.4" - }, "statsd": { "hashes": [ "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa", @@ -995,19 +833,19 @@ }, "identify": { "hashes": [ - "sha256:18994e850ba50c37bcaed4832be8b354d6a06c8fb31f54e0e7ece76d32f69bc8", - "sha256:892473bf12e655884132a3a32aca737a3cbefaa34a850ff52d501773a45837bc" + "sha256:70b638cf4743f33042bebb3b51e25261a0a10e80f978739f17e7fd4837664a66", + "sha256:9dfb63a2e871b807e3ba62f029813552a24b5289504f5b071dea9b041aee9fe4" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.5.12" + "version": "==1.5.13" }, "idna": { "hashes": [ - "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", - "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16", + "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.10" + "markers": "python_version >= '3.4'", + "version": "==3.1" }, "mccabe": { "hashes": [ @@ -1065,29 +903,37 @@ }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" - ], - "index": "pypi", - "version": "==5.3.1" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + ], + "index": "pypi", + "version": "==5.4.1" }, "requests": { "hashes": [ "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" ], - "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.25.1" }, "six": { @@ -1095,22 +941,22 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "snowballstemmer": { "hashes": [ - "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0", - "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52" + "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2", + "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914" ], - "version": "==2.0.0" + "version": "==2.1.0" }, "toml": { "hashes": [ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.2" }, "urllib3": { @@ -1123,11 +969,11 @@ }, "virtualenv": { "hashes": [ - "sha256:0c111a2236b191422b37fe8c28b8c828ced39aab4bf5627fa5c331aeffb570d9", - "sha256:14b34341e742bdca219e10708198e704e8a7064dd32f474fc16aca68ac53a306" + "sha256:219ee956e38b08e32d5639289aaa5bd190cfbe7dafcb8fa65407fca08e808f9c", + "sha256:227a8fed626f2f20a6cdb0870054989f82dd27b2560a911935ba905a2a5e0034" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.3.1" + "version": "==20.4.0" } } } diff --git a/bot/converters.py b/bot/converters.py index d0a9731d6..2b383636c 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -15,6 +15,7 @@ from discord.utils import DISCORD_EPOCH, snowflake_time from bot.api import ResponseCodeError from bot.constants import URLs +from bot.exts.info.doc import _inventory_parser from bot.utils.regex import INVITE_RE log = logging.getLogger(__name__) @@ -126,22 +127,20 @@ class ValidFilterListType(Converter): return list_type -class ValidPythonIdentifier(Converter): +class PackageName(Converter): """ - A converter that checks whether the given string is a valid Python identifier. + A converter that checks whether the given string is a valid package name. - This is used to have package names that correspond to how you would use the package in your - code, e.g. `import package`. - - Raises `BadArgument` if the argument is not a valid Python identifier, and simply passes through - the given argument otherwise. + Package names are used for stats and are restricted to the a-z and _ characters. """ - @staticmethod - async def convert(ctx: Context, argument: str) -> str: - """Checks whether the given string is a valid Python identifier.""" - if not argument.isidentifier(): - raise BadArgument(f"`{argument}` is not a valid Python identifier") + PACKAGE_NAME_RE = re.compile(r"[^a-z_]") + + @classmethod + async def convert(cls, ctx: Context, argument: str) -> str: + """Checks whether the given string is a valid package name.""" + if cls.PACKAGE_NAME_RE.search(argument): + raise BadArgument("The provided package name is not valid; please only use the _ and a-z characters.") return argument @@ -177,6 +176,25 @@ class ValidURL(Converter): return url +class Inventory(Converter): + """ + Represents an Intersphinx inventory URL. + + This converter checks whether intersphinx accepts the given inventory URL, and raises + `BadArgument` if that is not the case or if the url is unreachable. + + Otherwise, it returns the url and the fetched inventory dict in a tuple. + """ + + @staticmethod + async def convert(ctx: Context, url: str) -> t.Tuple[str, _inventory_parser.InventoryDict]: + """Convert url to Intersphinx inventory URL.""" + await ctx.trigger_typing() + if (inventory := await _inventory_parser.fetch_inventory(url)) is None: + raise BadArgument(f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS}.") + return url, inventory + + class Snowflake(IDConverter): """ Converts to an int if the argument is a valid Discord snowflake. diff --git a/bot/decorators.py b/bot/decorators.py index 063c8f878..02735d0dc 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -1,8 +1,8 @@ import asyncio import logging +import types import typing as t from contextlib import suppress -from functools import wraps from discord import Member, NotFound from discord.ext import commands @@ -11,6 +11,7 @@ from discord.ext.commands import Cog, Context from bot.constants import Channels, RedirectOutput from bot.utils import function from bot.utils.checks import in_whitelist_check +from bot.utils.function import command_wraps log = logging.getLogger(__name__) @@ -70,8 +71,8 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N This decorator must go before (below) the `command` decorator. """ - def wrap(func: t.Callable) -> t.Callable: - @wraps(func) + def wrap(func: types.FunctionType) -> types.FunctionType: + @command_wraps(func) async def inner(self: Cog, ctx: Context, *args, **kwargs) -> None: if ctx.channel.id == destination_channel: log.trace(f"Command {ctx.command.name} was invoked in destination_channel, not redirecting") @@ -105,7 +106,6 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N with suppress(NotFound): await ctx.message.delete() log.trace("Redirect output: Deleted invocation message") - return inner return wrap @@ -122,8 +122,8 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: This decorator must go before (below) the `command` decorator. """ - def decorator(func: t.Callable) -> t.Callable: - @wraps(func) + def decorator(func: types.FunctionType) -> types.FunctionType: + @command_wraps(func) async def wrapper(*args, **kwargs) -> None: log.trace(f"{func.__name__}: respect role hierarchy decorator called") diff --git a/bot/exts/info/doc.py b/bot/exts/info/doc.py deleted file mode 100644 index 9b5bd6504..000000000 --- a/bot/exts/info/doc.py +++ /dev/null @@ -1,485 +0,0 @@ -import asyncio -import functools -import logging -import re -import textwrap -from contextlib import suppress -from types import SimpleNamespace -from typing import Optional, Tuple - -import discord -from bs4 import BeautifulSoup -from bs4.element import PageElement, Tag -from discord.errors import NotFound -from discord.ext import commands -from markdownify import MarkdownConverter -from requests import ConnectTimeout, ConnectionError, HTTPError -from sphinx.ext import intersphinx -from urllib3.exceptions import ProtocolError - -from bot.bot import Bot -from bot.constants import MODERATION_ROLES, RedirectOutput -from bot.converters import ValidPythonIdentifier, ValidURL -from bot.pagination import LinePaginator -from bot.utils.cache import AsyncCache -from bot.utils.messages import wait_for_deletion - - -log = logging.getLogger(__name__) -logging.getLogger('urllib3').setLevel(logging.WARNING) - -# Since Intersphinx is intended to be used with Sphinx, -# we need to mock its configuration. -SPHINX_MOCK_APP = SimpleNamespace( - config=SimpleNamespace( - intersphinx_timeout=3, - tls_verify=True, - user_agent="python3:python-discord/bot:1.0.0" - ) -) - -NO_OVERRIDE_GROUPS = ( - "2to3fixer", - "token", - "label", - "pdbcommand", - "term", -) -NO_OVERRIDE_PACKAGES = ( - "python", -) - -SEARCH_END_TAG_ATTRS = ( - "data", - "function", - "class", - "exception", - "seealso", - "section", - "rubric", - "sphinxsidebar", -) -UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") -WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") - -FAILED_REQUEST_RETRY_AMOUNT = 3 -NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay - -symbol_cache = AsyncCache() - - -class DocMarkdownConverter(MarkdownConverter): - """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" - - def convert_code(self, el: PageElement, text: str) -> str: - """Undo `markdownify`s underscore escaping.""" - return f"`{text}`".replace('\\', '') - - def convert_pre(self, el: PageElement, text: str) -> str: - """Wrap any codeblocks in `py` for syntax highlighting.""" - code = ''.join(el.strings) - return f"```py\n{code}```" - - -def markdownify(html: str) -> DocMarkdownConverter: - """Create a DocMarkdownConverter object from the input html.""" - return DocMarkdownConverter(bullets='•').convert(html) - - -class InventoryURL(commands.Converter): - """ - Represents an Intersphinx inventory URL. - - This converter checks whether intersphinx accepts the given inventory URL, and raises - `BadArgument` if that is not the case. - - Otherwise, it simply passes through the given URL. - """ - - @staticmethod - async def convert(ctx: commands.Context, url: str) -> str: - """Convert url to Intersphinx inventory URL.""" - try: - intersphinx.fetch_inventory(SPHINX_MOCK_APP, '', url) - except AttributeError: - raise commands.BadArgument(f"Failed to fetch Intersphinx inventory from URL `{url}`.") - except ConnectionError: - if url.startswith('https'): - raise commands.BadArgument( - f"Cannot establish a connection to `{url}`. Does it support HTTPS?" - ) - raise commands.BadArgument(f"Cannot connect to host with URL `{url}`.") - except ValueError: - raise commands.BadArgument( - f"Failed to read Intersphinx inventory from URL `{url}`. " - "Are you sure that it's a valid inventory file?" - ) - return url - - -class Doc(commands.Cog): - """A set of commands for querying & displaying documentation.""" - - def __init__(self, bot: Bot): - self.base_urls = {} - self.bot = bot - self.inventories = {} - self.renamed_symbols = set() - - self.bot.loop.create_task(self.init_refresh_inventory()) - - async def init_refresh_inventory(self) -> None: - """Refresh documentation inventory on cog initialization.""" - await self.bot.wait_until_guild_available() - await self.refresh_inventory() - - async def update_single( - self, package_name: str, base_url: str, inventory_url: str - ) -> None: - """ - Rebuild the inventory for a single package. - - Where: - * `package_name` is the package name to use, appears in the log - * `base_url` is the root documentation URL for the specified package, used to build - absolute paths that link to specific symbols - * `inventory_url` is the absolute URL to the intersphinx inventory, fetched by running - `intersphinx.fetch_inventory` in an executor on the bot's event loop - """ - self.base_urls[package_name] = base_url - - package = await self._fetch_inventory(inventory_url) - if not package: - return None - - for group, value in package.items(): - for symbol, (package_name, _version, relative_doc_url, _) in value.items(): - absolute_doc_url = base_url + relative_doc_url - - if symbol in self.inventories: - group_name = group.split(":")[1] - symbol_base_url = self.inventories[symbol].split("/", 3)[2] - if ( - group_name in NO_OVERRIDE_GROUPS - or any(package in symbol_base_url for package in NO_OVERRIDE_PACKAGES) - ): - - symbol = f"{group_name}.{symbol}" - # If renamed `symbol` already exists, add library name in front to differentiate between them. - if symbol in self.renamed_symbols: - # Split `package_name` because of packages like Pillow that have spaces in them. - symbol = f"{package_name.split()[0]}.{symbol}" - - self.inventories[symbol] = absolute_doc_url - self.renamed_symbols.add(symbol) - continue - - self.inventories[symbol] = absolute_doc_url - - log.trace(f"Fetched inventory for {package_name}.") - - async def refresh_inventory(self) -> None: - """Refresh internal documentation inventory.""" - log.debug("Refreshing documentation inventory...") - - # Clear the old base URLS and inventories to ensure - # that we start from a fresh local dataset. - # Also, reset the cache used for fetching documentation. - self.base_urls.clear() - self.inventories.clear() - self.renamed_symbols.clear() - symbol_cache.clear() - - # Run all coroutines concurrently - since each of them performs a HTTP - # request, this speeds up fetching the inventory data heavily. - coros = [ - self.update_single( - package["package"], package["base_url"], package["inventory_url"] - ) for package in await self.bot.api_client.get('bot/documentation-links') - ] - await asyncio.gather(*coros) - - async def get_symbol_html(self, symbol: str) -> Optional[Tuple[list, str]]: - """ - Given a Python symbol, return its signature and description. - - The first tuple element is the signature of the given symbol as a markup-free string, and - the second tuple element is the description of the given symbol with HTML markup included. - - If the given symbol is a module, returns a tuple `(None, str)` - else if the symbol could not be found, returns `None`. - """ - url = self.inventories.get(symbol) - if url is None: - return None - - async with self.bot.http_session.get(url) as response: - html = await response.text(encoding='utf-8') - - # Find the signature header and parse the relevant parts. - symbol_id = url.split('#')[-1] - soup = BeautifulSoup(html, 'lxml') - symbol_heading = soup.find(id=symbol_id) - search_html = str(soup) - - if symbol_heading is None: - return None - - if symbol_id == f"module-{symbol}": - # Get page content from the module headerlink to the - # first tag that has its class in `SEARCH_END_TAG_ATTRS` - start_tag = symbol_heading.find("a", attrs={"class": "headerlink"}) - if start_tag is None: - return [], "" - - end_tag = start_tag.find_next(self._match_end_tag) - if end_tag is None: - return [], "" - - description_start_index = search_html.find(str(start_tag.parent)) + len(str(start_tag.parent)) - description_end_index = search_html.find(str(end_tag)) - description = search_html[description_start_index:description_end_index] - signatures = None - - else: - signatures = [] - description = str(symbol_heading.find_next_sibling("dd")) - description_pos = search_html.find(description) - # Get text of up to 3 signatures, remove unwanted symbols - for element in [symbol_heading] + symbol_heading.find_next_siblings("dt", limit=2): - signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) - if signature and search_html.find(str(element)) < description_pos: - signatures.append(signature) - - return signatures, description.replace('¶', '') - - @symbol_cache(arg_offset=1) - async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: - """ - Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents. - - If the symbol is known, an Embed with documentation about it is returned. - """ - scraped_html = await self.get_symbol_html(symbol) - if scraped_html is None: - return None - - signatures = scraped_html[0] - permalink = self.inventories[symbol] - description = markdownify(scraped_html[1]) - - # Truncate the description of the embed to the last occurrence - # of a double newline (interpreted as a paragraph) before index 1000. - if len(description) > 1000: - shortened = description[:1000] - description_cutoff = shortened.rfind('\n\n', 100) - if description_cutoff == -1: - # Search the shortened version for cutoff points in decreasing desirability, - # cutoff at 1000 if none are found. - for string in (". ", ", ", ",", " "): - description_cutoff = shortened.rfind(string) - if description_cutoff != -1: - break - else: - description_cutoff = 1000 - description = description[:description_cutoff] - - # If there is an incomplete code block, cut it out - if description.count("```") % 2: - codeblock_start = description.rfind('```py') - description = description[:codeblock_start].rstrip() - description += f"... [read more]({permalink})" - - description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description) - if signatures is None: - # If symbol is a module, don't show signature. - embed_description = description - - elif not signatures: - # It's some "meta-page", for example: - # https://docs.djangoproject.com/en/dev/ref/views/#module-django.views - embed_description = "This appears to be a generic page not tied to a specific symbol." - - else: - embed_description = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures) - embed_description += f"\n{description}" - - embed = discord.Embed( - title=f'`{symbol}`', - url=permalink, - description=embed_description - ) - # Show all symbols with the same name that were renamed in the footer. - embed.set_footer( - text=", ".join(renamed for renamed in self.renamed_symbols - {symbol} if renamed.endswith(f".{symbol}")) - ) - return embed - - @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) - async def docs_group(self, ctx: commands.Context, symbol: commands.clean_content = None) -> None: - """Lookup documentation for Python symbols.""" - await self.get_command(ctx, symbol) - - @docs_group.command(name='get', aliases=('g',)) - async def get_command(self, ctx: commands.Context, symbol: commands.clean_content = None) -> None: - """ - Return a documentation embed for a given symbol. - - If no symbol is given, return a list of all available inventories. - - Examples: - !docs - !docs aiohttp - !docs aiohttp.ClientSession - !docs get aiohttp.ClientSession - """ - if symbol is None: - inventory_embed = discord.Embed( - title=f"All inventories (`{len(self.base_urls)}` total)", - colour=discord.Colour.blue() - ) - - lines = sorted(f"• [`{name}`]({url})" for name, url in self.base_urls.items()) - if self.base_urls: - await LinePaginator.paginate(lines, ctx, inventory_embed, max_size=400, empty=False) - - else: - inventory_embed.description = "Hmmm, seems like there's nothing here yet." - await ctx.send(embed=inventory_embed) - - else: - # Fetching documentation for a symbol (at least for the first time, since - # caching is used) takes quite some time, so let's send typing to indicate - # that we got the command, but are still working on it. - async with ctx.typing(): - doc_embed = await self.get_symbol_embed(symbol) - - if doc_embed is None: - error_embed = discord.Embed( - description=f"Sorry, I could not find any documentation for `{symbol}`.", - colour=discord.Colour.red() - ) - error_message = await ctx.send(embed=error_embed) - with suppress(NotFound): - await error_message.delete(delay=NOT_FOUND_DELETE_DELAY) - await ctx.message.delete(delay=NOT_FOUND_DELETE_DELAY) - else: - msg = await ctx.send(embed=doc_embed) - await wait_for_deletion(msg, (ctx.author.id,)) - - @docs_group.command(name='set', aliases=('s',)) - @commands.has_any_role(*MODERATION_ROLES) - async def set_command( - self, ctx: commands.Context, package_name: ValidPythonIdentifier, - base_url: ValidURL, inventory_url: InventoryURL - ) -> None: - """ - Adds a new documentation metadata object to the site's database. - - The database will update the object, should an existing item with the specified `package_name` already exist. - - Example: - !docs set \ - python \ - https://docs.python.org/3/ \ - https://docs.python.org/3/objects.inv - """ - body = { - 'package': package_name, - 'base_url': base_url, - 'inventory_url': inventory_url - } - await self.bot.api_client.post('bot/documentation-links', json=body) - - log.info( - f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n" - f"Package name: {package_name}\n" - f"Base url: {base_url}\n" - f"Inventory URL: {inventory_url}" - ) - - # Rebuilding the inventory can take some time, so lets send out a - # typing event to show that the Bot is still working. - async with ctx.typing(): - await self.refresh_inventory() - await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") - - @docs_group.command(name='delete', aliases=('remove', 'rm', 'd')) - @commands.has_any_role(*MODERATION_ROLES) - async def delete_command(self, ctx: commands.Context, package_name: ValidPythonIdentifier) -> None: - """ - Removes the specified package from the database. - - Examples: - !docs delete aiohttp - """ - await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') - - async with ctx.typing(): - # Rebuild the inventory to ensure that everything - # that was from this package is properly deleted. - await self.refresh_inventory() - await ctx.send(f"Successfully deleted `{package_name}` and refreshed inventory.") - - @docs_group.command(name="refresh", aliases=("rfsh", "r")) - @commands.has_any_role(*MODERATION_ROLES) - async def refresh_command(self, ctx: commands.Context) -> None: - """Refresh inventories and send differences to channel.""" - old_inventories = set(self.base_urls) - with ctx.typing(): - await self.refresh_inventory() - # Get differences of added and removed inventories - added = ', '.join(inv for inv in self.base_urls if inv not in old_inventories) - if added: - added = f"+ {added}" - - removed = ', '.join(inv for inv in old_inventories if inv not in self.base_urls) - if removed: - removed = f"- {removed}" - - embed = discord.Embed( - title="Inventories refreshed", - description=f"```diff\n{added}\n{removed}```" if added or removed else "" - ) - await ctx.send(embed=embed) - - async def _fetch_inventory(self, inventory_url: str) -> Optional[dict]: - """Get and return inventory from `inventory_url`. If fetching fails, return None.""" - fetch_func = functools.partial(intersphinx.fetch_inventory, SPHINX_MOCK_APP, '', inventory_url) - for retry in range(1, FAILED_REQUEST_RETRY_AMOUNT+1): - try: - package = await self.bot.loop.run_in_executor(None, fetch_func) - except ConnectTimeout: - log.error( - f"Fetching of inventory {inventory_url} timed out," - f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})" - ) - except ProtocolError: - log.error( - f"Connection lost while fetching inventory {inventory_url}," - f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})" - ) - except HTTPError as e: - log.error(f"Fetching of inventory {inventory_url} failed with status code {e.response.status_code}.") - return None - except ConnectionError: - log.error(f"Couldn't establish connection to inventory {inventory_url}.") - return None - else: - return package - log.error(f"Fetching of inventory {inventory_url} failed.") - return None - - @staticmethod - def _match_end_tag(tag: Tag) -> bool: - """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" - for attr in SEARCH_END_TAG_ATTRS: - if attr in tag.get("class", ()): - return True - - return tag.name == "table" - - -def setup(bot: Bot) -> None: - """Load the Doc cog.""" - bot.add_cog(Doc(bot)) diff --git a/bot/exts/info/doc/__init__.py b/bot/exts/info/doc/__init__.py new file mode 100644 index 000000000..2bb43a950 --- /dev/null +++ b/bot/exts/info/doc/__init__.py @@ -0,0 +1,15 @@ +from bot.bot import Bot +from ._redis_cache import DocRedisCache + +MAX_SIGNATURE_AMOUNT = 3 +PRIORITY_PACKAGES = ( + "python", +) + +doc_cache = DocRedisCache(namespace="Docs") + + +def setup(bot: Bot) -> None: + """Load the Doc cog.""" + from ._cog import DocCog + bot.add_cog(DocCog(bot)) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py new file mode 100644 index 000000000..606c5d803 --- /dev/null +++ b/bot/exts/info/doc/_batch_parser.py @@ -0,0 +1,203 @@ +from __future__ import annotations + +import asyncio +import logging +import time +from collections import defaultdict +from contextlib import suppress +from functools import partial +from operator import attrgetter +from typing import Dict, List, NamedTuple, TYPE_CHECKING, Union + +import discord +from bs4 import BeautifulSoup + +import bot +from bot.constants import Channels +from . import doc_cache +from ._parsing import get_symbol_markdown +if TYPE_CHECKING: + from ._cog import DocItem + +log = logging.getLogger(__name__) + + +class StaleInventoryNotifier: + """Handle sending notifications about stale inventories through `DocItem`s to dev log.""" + + def __init__(self): + self._init_task = bot.instance.loop.create_task(self._init_channel()) + self._warned_urls = set() + + async def _init_channel(self) -> None: + """Wait for guild and get channel.""" + await bot.instance.wait_until_guild_available() + self._dev_log = bot.instance.get_channel(Channels.dev_log) + + async def send_warning(self, item: DocItem) -> None: + """Send a warning to dev log is one wasn't already sent for `item`'s url.""" + if item.url not in self._warned_urls: + self._warned_urls.add(item.url) + await self._init_task + embed = discord.Embed( + description=f"Doc item `{item.symbol_id=}` present in loaded documentation inventories " + f"not found on [site]({item.url}), inventories may need to be refreshed." + ) + await self._dev_log.send(embed=embed) + + +class QueueItem(NamedTuple): + """Contains a symbol and the BeautifulSoup object needed to parse it.""" + + symbol: DocItem + soup: BeautifulSoup + + def __eq__(self, other: Union[QueueItem, DocItem]): + if isinstance(other, type(self.symbol)): + return self.symbol == other + return NamedTuple.__eq__(self, other) + + +class ParseResultFuture(asyncio.Future): + """ + Future with metadata for the parser class. + + `user_requested` is set by the parser when a Future is requested by an user and moved to the front, + allowing the futures to only be waited for when clearing if they were user requested. + + `result_set_time` provides the time at which the future's result has been set, + or -inf if the result hasn't been set yet + """ + + def __init__(self): + super().__init__() + self.user_requested = False + self.result_set_time = float("inf") + + def set_result(self, result: str, /) -> None: + """Set `self.result_set_time` to current time when the result is set.""" + self.result_set_time = time.time() + super().set_result(result) + + +class BatchParser: + """ + Get the Markdown of all symbols on a page and send them to redis when a symbol is requested. + + DocItems are added through the `add_item` method which adds them to the `_page_symbols` dict. + `get_markdown` is used to fetch the Markdown; when this is used for the first time on a page, + all of the symbols are queued to be parsed to avoid multiple web requests to the same page. + """ + + def __init__(self): + self._queue: List[QueueItem] = [] + self._page_symbols: Dict[str, List[DocItem]] = defaultdict(list) + self._item_futures: Dict[DocItem, ParseResultFuture] = defaultdict(ParseResultFuture) + self._parse_task = None + + self.cleanup_futures_task = bot.instance.loop.create_task(self._cleanup_futures()) + + self.stale_inventory_notifier = StaleInventoryNotifier() + + async def get_markdown(self, doc_item: DocItem) -> str: + """ + Get the result Markdown of `doc_item`. + + If no symbols were fetched from `doc_item`s page before, + the HTML has to be fetched and then all items from the page are put into the parse queue. + + Not safe to run while `self.clear` is running. + """ + self._item_futures[doc_item].user_requested = True + if (symbols_to_queue := self._page_symbols.get(doc_item.url)) is not None: + async with bot.instance.http_session.get(doc_item.url) as response: + soup = BeautifulSoup(await response.text(encoding="utf8"), "lxml") + + self._queue.extend(QueueItem(symbol, soup) for symbol in symbols_to_queue) + del self._page_symbols[doc_item.url] + log.debug(f"Added symbols from {doc_item.url} to parse queue.") + + if self._parse_task is None: + self._parse_task = asyncio.create_task(self._parse_queue()) + + with suppress(ValueError): + # If the item is not in the list then the item is already parsed or is being parsed + self._move_to_front(doc_item) + return await self._item_futures[doc_item] + + async def _parse_queue(self) -> None: + """ + Parse all item from the queue, setting their result markdown on the futures and sending them to redis. + + The coroutine will run as long as the queue is not empty, resetting `self._parse_task` to None when finished. + """ + log.trace("Starting queue parsing.") + try: + while self._queue: + item, soup = self._queue.pop() + try: + if (future := self._item_futures[item]).done(): + # Some items are present in the inventories multiple times under different symbols, + # if we already parsed an equal item, we can just skip it. + continue + + markdown = await bot.instance.loop.run_in_executor( + None, + partial(get_symbol_markdown, soup, item), + ) + if markdown is not None: + await doc_cache.set(item, markdown) + else: + asyncio.create_task(self.stale_inventory_notifier.send_warning(item)) + except Exception as e: + log.exception(f"Unexpected error when handling {item}") + future.set_exception(e) + else: + future.set_result(markdown) + await asyncio.sleep(0.1) + finally: + self._parse_task = None + log.trace("Finished parsing queue.") + + def _move_to_front(self, item: Union[QueueItem, DocItem]) -> None: + """Move `item` to the front of the parse queue.""" + # The parse queue stores soups along with the doc symbols in QueueItem objects, + # in case we're moving a DocItem we have to get the associated QueueItem first and then move it. + item_index = self._queue.index(item) + queue_item = self._queue.pop(item_index) + + self._queue.append(queue_item) + log.trace(f"Moved {item} to the front of the queue.") + + def add_item(self, doc_item: DocItem) -> None: + """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" + self._page_symbols[doc_item.url].append(doc_item) + + async def clear(self) -> None: + """ + Clear all internal symbol data. + + All currently requested items are waited to be parsed before clearing. + """ + for future in filter(attrgetter("user_requested"), self._item_futures.values()): + await future + if self._parse_task is not None: + self._parse_task.cancel() + self._queue.clear() + self._page_symbols.clear() + self._item_futures.clear() + + async def _cleanup_futures(self) -> None: + """ + Clear old futures from internal results. + + After a future is set, we only need to wait for old requests to its associated `DocItem` to finish + as all new requests will get the value from the redis cache in the cog first. + Keeping them around for longer than a second is unnecessary and keeps the parsed Markdown strings alive. + """ + while True: + current_time = time.time() + for key, future in self._item_futures.copy().items(): + if current_time - future.result_set_time > 5: + del self._item_futures[key] + await asyncio.sleep(5) diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py new file mode 100644 index 000000000..26694ae55 --- /dev/null +++ b/bot/exts/info/doc/_cog.py @@ -0,0 +1,425 @@ +from __future__ import annotations + +import asyncio +import logging +import sys +import textwrap +from collections import defaultdict +from contextlib import suppress +from types import SimpleNamespace +from typing import Dict, NamedTuple, Optional + +import discord +from discord.ext import commands + +from bot.bot import Bot +from bot.constants import MODERATION_ROLES, RedirectOutput +from bot.converters import Inventory, PackageName, ValidURL +from bot.pagination import LinePaginator +from bot.utils.lock import SharedEvent, lock +from bot.utils.messages import send_denial, wait_for_deletion +from bot.utils.scheduling import Scheduler +from . import PRIORITY_PACKAGES, doc_cache +from ._batch_parser import BatchParser +from ._inventory_parser import InventoryDict, fetch_inventory + +log = logging.getLogger(__name__) + +# symbols with a group contained here will get the group prefixed on duplicates +FORCE_PREFIX_GROUPS = ( + "2to3fixer", + "token", + "label", + "pdbcommand", + "term", +) +NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay +# Delay to wait before trying to reach a rescheduled inventory again, in minutes +FETCH_RESCHEDULE_DELAY = SimpleNamespace(first=2, repeated=5) + +COMMAND_LOCK_SINGLETON = "inventory refresh" + + +class DocItem(NamedTuple): + """Holds inventory symbol information.""" + + package: str + group: str + base_url: str + relative_url_path: str + symbol_id: str + + @property + def url(self) -> str: + """Return the absolute url to the symbol.""" + return self.base_url + self.relative_url_path + + +class DocCog(commands.Cog): + """A set of commands for querying & displaying documentation.""" + + def __init__(self, bot: Bot): + self.base_urls = {} + self.bot = bot + self.doc_symbols: Dict[str, DocItem] = {} + self.item_fetcher = BatchParser() + self.renamed_symbols = defaultdict(list) + + self.inventory_scheduler = Scheduler(self.__class__.__name__) + self.inventory_reschedule_attempts = defaultdict(int) + + self.refresh_event = asyncio.Event() + self.refresh_event.set() + self.symbol_get_event = SharedEvent() + + self.init_refresh_task = self.bot.loop.create_task(self.init_refresh_inventory()) + + @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) + async def init_refresh_inventory(self) -> None: + """Refresh documentation inventory on cog initialization.""" + await self.bot.wait_until_guild_available() + await self.refresh_inventory() + + def update_single(self, api_package_name: str, base_url: str, package: InventoryDict) -> None: + """ + Rebuild the inventory for a single package. + + Where: + * `package_name` is the package name to use, appears in the log + * `base_url` is the root documentation URL for the specified package, used to build + absolute paths that link to specific symbols + * `inventory_url` is the absolute URL to the intersphinx inventory. + """ + self.base_urls[api_package_name] = base_url + + for group, items in package.items(): + for symbol, relative_doc_url in items: + + # e.g. get 'class' from 'py:class' + group_name = group.split(":")[1] + if (original_symbol := self.doc_symbols.get(symbol)) is not None: + replaced_symbol_name = self.ensure_unique_symbol_name( + api_package_name, + group_name, + original_symbol, + symbol, + ) + if replaced_symbol_name is not None: + symbol = replaced_symbol_name + + relative_url_path, _, symbol_id = relative_doc_url.partition("#") + # Intern fields that have shared content so we're not storing unique strings for every object + symbol_item = DocItem( + api_package_name, + sys.intern(group_name), + base_url, + sys.intern(relative_url_path), + symbol_id + ) + self.doc_symbols[symbol] = symbol_item + self.item_fetcher.add_item(symbol_item) + + log.trace(f"Fetched inventory for {api_package_name}.") + + async def update_or_reschedule_inventory( + self, + api_package_name: str, + base_url: str, + inventory_url: str + ) -> None: + """ + Update the cog's inventory, or reschedule this method to execute again if the remote inventory unreachable. + + The first attempt is rescheduled to execute in `FETCH_RESCHEDULE_DELAY.first` minutes, the subsequent attempts + in `FETCH_RESCHEDULE_DELAY.repeated` minutes. + """ + package = await fetch_inventory(inventory_url) + + if not package: + attempt = self.inventory_reschedule_attempts[package] + self.inventory_reschedule_attempts[package] += 1 + if attempt == 0: + delay = FETCH_RESCHEDULE_DELAY.first + else: + delay = FETCH_RESCHEDULE_DELAY.repeated + log.info(f"Failed to fetch inventory; attempting again in {delay} minutes.") + self.inventory_scheduler.schedule_later( + delay*60, + (attempt, api_package_name), + self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url) + ) + else: + self.update_single(api_package_name, base_url, package) + + def ensure_unique_symbol_name( + self, + package_name: str, + group_name: str, + original_item: DocItem, + symbol_name: str + ) -> Optional[str]: + """ + Ensure `symbol_name` doesn't overwrite an another symbol in `doc_symbols`. + + Should only be called with symbol names that already have a conflict in `doc_symbols`. + + If None is returned, space was created for `symbol_name` in `doc_symbols` instead of + the symbol name being changed. + """ + # Certain groups are added as prefixes to disambiguate the symbols. + if group_name in FORCE_PREFIX_GROUPS: + new_symbol = f"{group_name}.{symbol_name}" + if new_symbol in self.doc_symbols: + # If there's still a conflict, prefix with package name. + new_symbol = f"{package_name}.{new_symbol}" + self.renamed_symbols[symbol_name].append(new_symbol) + return new_symbol + + # The existing symbol with which the current symbol conflicts should have a group prefix. + # It currently doesn't have the group prefix because it's only added once there's a conflict. + elif (original_symbol_group := original_item.group) in FORCE_PREFIX_GROUPS: + overridden_symbol = f"{original_symbol_group}.{symbol_name}" + if overridden_symbol in self.doc_symbols: + # If there's still a conflict, prefix with package name. + overridden_symbol = f"{original_item.package}.{overridden_symbol}" + + self.doc_symbols[overridden_symbol] = original_item + self.renamed_symbols[symbol_name].append(overridden_symbol) + + elif package_name in PRIORITY_PACKAGES: + overridden_symbol = f"{original_item.package}.{symbol_name}" + if overridden_symbol in self.doc_symbols: + # If there's still a conflict, add the symbol's group in the middle. + overridden_symbol = f"{original_item.package}.{original_item.group}.{symbol_name}" + + self.doc_symbols[overridden_symbol] = original_item + self.renamed_symbols[symbol_name].append(overridden_symbol) + + # If we can't specially handle the symbol through its group or package, + # fall back to prepending its package name to the front. + else: + new_symbol = f"{package_name}.{symbol_name}" + if new_symbol in self.doc_symbols: + # If there's still a conflict, add the symbol's group in the middle. + new_symbol = f"{package_name}.{group_name}.{symbol_name}" + self.renamed_symbols[symbol_name].append(new_symbol) + return new_symbol + + async def refresh_inventory(self) -> None: + """Refresh internal documentation inventory.""" + self.refresh_event.clear() + await self.symbol_get_event.wait() + log.debug("Refreshing documentation inventory...") + self.inventory_scheduler.cancel_all() + self.inventory_reschedule_attempts.clear() + + # Clear the old base URLS and doc symbols to ensure + # that we start from a fresh local dataset. + # Also, reset the cache used for fetching documentation. + self.base_urls.clear() + self.doc_symbols.clear() + self.renamed_symbols.clear() + await self.item_fetcher.clear() + + # Run all coroutines concurrently - since each of them performs an HTTP + # request, this speeds up fetching the inventory data heavily. + coros = [ + self.update_or_reschedule_inventory( + package["package"], package["base_url"], package["inventory_url"] + ) for package in await self.bot.api_client.get('bot/documentation-links') + ] + await asyncio.gather(*coros) + log.debug("Finished inventory refresh.") + self.refresh_event.set() + + async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: + """ + Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents. + + If the symbol is known, an Embed with documentation about it is returned. + + First check the DocRedisCache before querying the cog's `BatchParser`. + """ + log.trace(f"Building embed for symbol `{symbol}`") + if not self.refresh_event.is_set(): + log.debug("Waiting for inventories to be refreshed before processing item.") + await self.refresh_event.wait() + + symbol_info = self.doc_symbols.get(symbol) + if symbol_info is None: + log.debug("Symbol does not exist.") + return None + self.bot.stats.incr(f"doc_fetches.{symbol_info.package}") + + with self.symbol_get_event: + markdown = await doc_cache.get(symbol_info) + + if markdown is None: + log.debug(f"Redis cache miss for symbol `{symbol}`.") + markdown = await self.item_fetcher.get_markdown(symbol_info) + if markdown is not None: + await doc_cache.set(symbol_info, markdown) + else: + markdown = "Unable to parse the requested symbol." + + embed = discord.Embed( + title=discord.utils.escape_markdown(symbol), + url=f"{symbol_info.url}#{symbol_info.symbol_id}", + description=markdown + ) + # Show all symbols with the same name that were renamed in the footer, + # with a max of 100 chars. + if symbol in self.renamed_symbols: + renamed_symbols = ', '.join(self.renamed_symbols[symbol]) + footer_text = f"Moved: {textwrap.shorten(renamed_symbols, 100-7, placeholder=' ...')}" + else: + footer_text = "" + embed.set_footer(text=footer_text) + return embed + + @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) + async def docs_group(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: + """Look up documentation for Python symbols.""" + await self.get_command(ctx, symbol=symbol) + + @docs_group.command(name='getdoc', aliases=('g',)) + async def get_command(self, ctx: commands.Context, *, symbol: Optional[str]) -> None: + """ + Return a documentation embed for a given symbol. + + If no symbol is given, return a list of all available inventories. + + Examples: + !docs + !docs aiohttp + !docs aiohttp.ClientSession + !docs getdoc aiohttp.ClientSession + """ + if not symbol: + inventory_embed = discord.Embed( + title=f"All inventories (`{len(self.base_urls)}` total)", + colour=discord.Colour.blue() + ) + + lines = sorted(f"• [`{name}`]({url})" for name, url in self.base_urls.items()) + if self.base_urls: + await LinePaginator.paginate(lines, ctx, inventory_embed, max_size=400, empty=False) + + else: + inventory_embed.description = "Hmmm, seems like there's nothing here yet." + await ctx.send(embed=inventory_embed) + + else: + symbol = symbol.strip("`") + # Fetching documentation for a symbol (at least for the first time, since + # caching is used) takes quite some time, so let's send typing to indicate + # that we got the command, but are still working on it. + async with ctx.typing(): + doc_embed = await self.get_symbol_embed(symbol) + + if doc_embed is None: + error_message = await send_denial(ctx, "No documentation found for the requested symbol.") + await wait_for_deletion(error_message, (ctx.author.id,), timeout=NOT_FOUND_DELETE_DELAY) + with suppress(discord.NotFound): + await ctx.message.delete() + with suppress(discord.NotFound): + await error_message.delete() + else: + msg = await ctx.send(embed=doc_embed) + await wait_for_deletion(msg, (ctx.author.id,)) + + @docs_group.command(name='setdoc', aliases=('s',)) + @commands.has_any_role(*MODERATION_ROLES) + @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) + async def set_command( + self, + ctx: commands.Context, + package_name: PackageName, + base_url: ValidURL, + inventory: Inventory, + ) -> None: + """ + Adds a new documentation metadata object to the site's database. + + The database will update the object, should an existing item with the specified `package_name` already exist. + + Example: + !docs setdoc \ + python \ + https://docs.python.org/3/ \ + https://docs.python.org/3/objects.inv + """ + inventory_url, inventory_dict = inventory + body = { + 'package': package_name, + 'base_url': base_url, + 'inventory_url': inventory_url + } + await self.bot.api_client.post('bot/documentation-links', json=body) + + log.info( + f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n" + + "\n".join(f"{key}: {value}" for key, value in body.items()) + ) + + self.update_single(package_name, base_url, inventory_dict) + await ctx.send(f"Added the package `{package_name}` to the database and refreshed the inventory.") + + @docs_group.command(name='deletedoc', aliases=('removedoc', 'rm', 'd')) + @commands.has_any_role(*MODERATION_ROLES) + @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) + async def delete_command(self, ctx: commands.Context, package_name: PackageName) -> None: + """ + Removes the specified package from the database. + + Example: + !docs deletedoc aiohttp + """ + await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') + + async with ctx.typing(): + # Rebuild the inventory to ensure that everything + # that was from this package is properly deleted. + await self.refresh_inventory() + await doc_cache.delete(package_name) + await ctx.send(f"Successfully deleted `{package_name}` and refreshed the inventory.") + + @docs_group.command(name="refreshdoc", aliases=("rfsh", "r")) + @commands.has_any_role(*MODERATION_ROLES) + @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) + async def refresh_command(self, ctx: commands.Context) -> None: + """Refresh inventories and show the difference.""" + old_inventories = set(self.base_urls) + with ctx.typing(): + await self.refresh_inventory() + new_inventories = set(self.base_urls) + + if added := ", ".join(new_inventories - old_inventories): + added = "+ " + added + + if removed := ", ".join(old_inventories - new_inventories): + removed = "- " + removed + + embed = discord.Embed( + title="Inventories refreshed", + description=f"```diff\n{added}\n{removed}```" if added or removed else "" + ) + await ctx.send(embed=embed) + + @docs_group.command(name="cleardoccache") + @commands.has_any_role(*MODERATION_ROLES) + @lock("doc", COMMAND_LOCK_SINGLETON, raise_error=True) + async def clear_cache_command(self, ctx: commands.Context, package_name: PackageName) -> None: + """Clear the persistent redis cache for `package`.""" + if await doc_cache.delete(package_name): + await self.refresh_inventory() + await ctx.send(f"Successfully cleared the cache for `{package_name}` and refreshed the inventories.") + else: + await ctx.send("No keys matching the package found.") + + def cog_unload(self) -> None: + """Clear scheduled inventories, queued symbols and cleanup task on cog unload.""" + self.inventory_scheduler.cancel_all() + self.item_fetcher.cleanup_futures_task.cancel() + self.init_refresh_task.cancel() + asyncio.create_task(self.item_fetcher.clear()) diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py new file mode 100644 index 000000000..f9fe542ce --- /dev/null +++ b/bot/exts/info/doc/_html.py @@ -0,0 +1,136 @@ +import logging +import re +from functools import partial +from typing import Callable, Container, Iterable, List, Union + +from bs4 import BeautifulSoup +from bs4.element import NavigableString, PageElement, SoupStrainer, Tag + +from . import MAX_SIGNATURE_AMOUNT + +log = logging.getLogger(__name__) + +_UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") +_SEARCH_END_TAG_ATTRS = ( + "data", + "function", + "class", + "exception", + "seealso", + "section", + "rubric", + "sphinxsidebar", +) + + +class Strainer(SoupStrainer): + """Subclass of SoupStrainer to allow matching of both `Tag`s and `NavigableString`s.""" + + def __init__(self, *, include_strings: bool, **kwargs): + self.include_strings = include_strings + passed_text = kwargs.pop("text", None) + if passed_text is not None: + log.warning("`text` is not a supported kwarg in the custom strainer.") + super().__init__(**kwargs) + + markup_hint = Union[PageElement, List["markup_hint"]] + + def search(self, markup: markup_hint) -> Union[PageElement, str]: + """Extend default SoupStrainer behaviour to allow matching both `Tag`s` and `NavigableString`s.""" + if isinstance(markup, str): + # Let everything through the text filter if we're including strings and tags. + if not self.name and not self.attrs and self.include_strings: + return markup + else: + return super().search(markup) + + +def _find_elements_until_tag( + start_element: PageElement, + end_tag_filter: Union[Container[str], Callable[[Tag], bool]], + *, + func: Callable, + include_strings: bool = False, + limit: int = None, +) -> List[Union[Tag, NavigableString]]: + """ + Get all elements up to `limit` or until a tag matching `tag_filter` is found. + + `end_tag_filter` can be either a container of string names to check against, + or a filtering callable that's applied to tags. + + When `include_strings` is True, `NavigableString`s from the document will be included in the result along `Tag`s. + + `func` takes in a BeautifulSoup unbound method for finding multiple elements, such as `BeautifulSoup.find_all`. + The method is then iterated over and all elements until the matching tag or the limit are added to the return list. + """ + use_container_filter = not callable(end_tag_filter) + elements = [] + + for element in func(start_element, name=Strainer(include_strings=include_strings), limit=limit): + if isinstance(element, Tag): + if use_container_filter: + if element.name in end_tag_filter: + break + elif end_tag_filter(element): + break + elements.append(element) + + return elements + + +_find_next_children_until_tag = partial(_find_elements_until_tag, func=partial(BeautifulSoup.find_all, recursive=False)) +_find_recursive_children_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_all) +_find_next_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_next_siblings) +_find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) + + +def _class_filter_factory(class_names: Iterable[str]) -> Callable[[Tag], bool]: + """Create callable that returns True when the passed in tag's class is in `class_names` or when it's is a table.""" + def match_tag(tag: Tag) -> bool: + for attr in class_names: + if attr in tag.get("class", ()): + return True + return tag.name == "table" + + return match_tag + + +def get_general_description(start_element: Tag) -> List[Union[Tag, NavigableString]]: + """ + Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. + + A headerlink a tag is attempted to be found to skip repeating the symbol information in the description, + if it's found it's used as the tag to start the search from instead of the `start_element`. + """ + child_tags = _find_recursive_children_until_tag(start_element, _class_filter_factory(["section"]), limit=100) + header = next(filter(_class_filter_factory(["headerlink"]), child_tags), None) + start_tag = header.parent if header is not None else start_element + return _find_next_siblings_until_tag(start_tag, _class_filter_factory(_SEARCH_END_TAG_ATTRS), include_strings=True) + + +def get_dd_description(symbol: PageElement) -> List[Union[Tag, NavigableString]]: + """Get the contents of the next dd tag, up to a dt or a dl tag.""" + description_tag = symbol.find_next("dd") + return _find_next_children_until_tag(description_tag, ("dt", "dl"), include_strings=True) + + +def get_signatures(start_signature: PageElement) -> List[str]: + """ + Collect up to `_MAX_SIGNATURE_AMOUNT` signatures from dt tags around the `start_signature` dt tag. + + First the signatures under the `start_signature` are included; + if less than 2 are found, tags above the start signature are added to the result if any are present. + """ + signatures = [] + for element in ( + *reversed(_find_previous_siblings_until_tag(start_signature, ("dd",), limit=2)), + start_signature, + *_find_next_siblings_until_tag(start_signature, ("dd",), limit=2), + )[-MAX_SIGNATURE_AMOUNT:]: + signature = _UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) + + if signature: + signatures.append(signature) + + return signatures diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py new file mode 100644 index 000000000..1615f15bd --- /dev/null +++ b/bot/exts/info/doc/_inventory_parser.py @@ -0,0 +1,126 @@ +import logging +import re +import zlib +from collections import defaultdict +from typing import AsyncIterator, DefaultDict, List, Optional, Tuple + +import aiohttp + +import bot + +log = logging.getLogger(__name__) + +FAILED_REQUEST_ATTEMPTS = 3 +_V2_LINE_RE = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+?(\S*)\s+(.*)') + +InventoryDict = DefaultDict[str, List[Tuple[str, str]]] + + +class ZlibStreamReader: + """Class used for decoding zlib data of a stream line by line.""" + + READ_CHUNK_SIZE = 16 * 1024 + + def __init__(self, stream: aiohttp.StreamReader) -> None: + self.stream = stream + + async def _read_compressed_chunks(self) -> AsyncIterator[bytes]: + """Read zlib data in `READ_CHUNK_SIZE` sized chunks and decompress.""" + decompressor = zlib.decompressobj() + async for chunk in self.stream.iter_chunked(self.READ_CHUNK_SIZE): + yield decompressor.decompress(chunk) + + yield decompressor.flush() + + async def __aiter__(self) -> AsyncIterator[str]: + """Yield lines of decompressed text.""" + buf = b'' + async for chunk in self._read_compressed_chunks(): + buf += chunk + pos = buf.find(b'\n') + while pos != -1: + yield buf[:pos].decode() + buf = buf[pos + 1:] + pos = buf.find(b'\n') + + +async def _load_v1(stream: aiohttp.StreamReader) -> InventoryDict: + invdata = defaultdict(list) + + async for line in stream: + name, type_, location = line.decode().rstrip().split(maxsplit=2) + # version 1 did not add anchors to the location + if type_ == 'mod': + type_ = 'py:module' + location += '#module-' + name + else: + type_ = 'py:' + type_ + location += '#' + name + invdata[type_].append((name, location)) + return invdata + + +async def _load_v2(stream: aiohttp.StreamReader) -> InventoryDict: + invdata = defaultdict(list) + + async for line in ZlibStreamReader(stream): + m = _V2_LINE_RE.match(line.rstrip()) + name, type_, _prio, location, _dispname = m.groups() # ignore the parsed items we don't need + if location.endswith('$'): + location = location[:-1] + name + + invdata[type_].append((name, location)) + return invdata + + +async def _fetch_inventory(url: str) -> InventoryDict: + """Fetch, parse and return an intersphinx inventory file from an url.""" + timeout = aiohttp.ClientTimeout(sock_connect=5, sock_read=5) + async with bot.instance.http_session.get(url, timeout=timeout, raise_for_status=True) as response: + stream = response.content + + inventory_header = (await stream.readline()).decode().rstrip() + inventory_version = int(inventory_header[-1:]) + await stream.readline() # skip project name + await stream.readline() # skip project version + + if inventory_version == 1: + return await _load_v1(stream) + + elif inventory_version == 2: + if b"zlib" not in await stream.readline(): + raise ValueError(f"Invalid inventory file at url {url}.") + return await _load_v2(stream) + + raise ValueError(f"Invalid inventory file at url {url}.") + + +async def fetch_inventory(url: str) -> Optional[InventoryDict]: + """ + Get an inventory dict from `url`, retrying `FAILED_REQUEST_ATTEMPTS` times on errors. + + `url` should point at a valid sphinx objects.inv inventory file, which will be parsed into the + inventory dict in the format of {"domain:role": [("symbol_name", "relative_url_to_symbol"), ...], ...} + """ + for attempt in range(1, FAILED_REQUEST_ATTEMPTS+1): + try: + inventory = await _fetch_inventory(url) + except aiohttp.ClientConnectorError: + log.warning( + f"Failed to connect to inventory url at {url}; " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + except aiohttp.ClientError: + log.error( + f"Failed to get inventory from {url}; " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + except Exception: + log.exception( + f"An unexpected error has occurred during fetching of {url}; " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + else: + return inventory + + return None diff --git a/bot/exts/info/doc/_markdown.py b/bot/exts/info/doc/_markdown.py new file mode 100644 index 000000000..1b7d8232b --- /dev/null +++ b/bot/exts/info/doc/_markdown.py @@ -0,0 +1,58 @@ +from urllib.parse import urljoin + +from bs4.element import PageElement +from markdownify import MarkdownConverter + + +class DocMarkdownConverter(MarkdownConverter): + """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" + + def __init__(self, *, page_url: str, **options): + super().__init__(**options) + self.page_url = page_url + + def convert_li(self, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Fix markdownify's erroneous indexing in ol tags.""" + parent = el.parent + if parent is not None and parent.name == "ol": + li_tags = parent.find_all("li") + bullet = f"{li_tags.index(el)+1}." + else: + depth = -1 + while el: + if el.name == "ul": + depth += 1 + el = el.parent + bullets = self.options["bullets"] + bullet = bullets[depth % len(bullets)] + return f"{bullet} {text}\n" + + def convert_hn(self, _n: int, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Convert h tags to bold text with ** instead of adding #.""" + if convert_as_inline: + return text + return f"**{text}**\n\n" + + def convert_code(self, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Undo `markdownify`s underscore escaping.""" + return f"`{text}`".replace("\\", "") + + def convert_pre(self, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Wrap any codeblocks in `py` for syntax highlighting.""" + code = "".join(el.strings) + return f"```py\n{code}```" + + def convert_a(self, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Resolve relative URLs to `self.page_url`.""" + el["href"] = urljoin(self.page_url, el["href"]) + return super().convert_a(el, text, convert_as_inline) + + def convert_p(self, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Include only one newline instead of two when the parent is a li tag.""" + if convert_as_inline: + return text + + parent = el.parent + if parent is not None and parent.name == "li": + return f"{text}\n" + return super().convert_p(el, text, convert_as_inline) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py new file mode 100644 index 000000000..45a81a4cb --- /dev/null +++ b/bot/exts/info/doc/_parsing.py @@ -0,0 +1,247 @@ +from __future__ import annotations + +import logging +import re +import string +import textwrap +from collections import namedtuple +from typing import Collection, Iterable, Iterator, List, Optional, TYPE_CHECKING, Union + +from bs4 import BeautifulSoup +from bs4.element import NavigableString, Tag + +from bot.utils.helpers import find_nth_occurrence +from . import MAX_SIGNATURE_AMOUNT +from ._html import get_dd_description, get_general_description, get_signatures +from ._markdown import DocMarkdownConverter +if TYPE_CHECKING: + from ._cog import DocItem + +log = logging.getLogger(__name__) + +_WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") +_PARAMETERS_RE = re.compile(r"\((.+)\)") + +_NO_SIGNATURE_GROUPS = { + "attribute", + "envvar", + "setting", + "tempaltefilter", + "templatetag", + "term", +} +_EMBED_CODE_BLOCK_LINE_LENGTH = 61 +# _MAX_SIGNATURE_AMOUNT code block wrapped lines with py syntax highlight +_MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LINE_LENGTH + 8) * MAX_SIGNATURE_AMOUNT +# Maximum discord message length - signatures on top - space for footer +_MAX_DESCRIPTION_LENGTH = 1900 - _MAX_SIGNATURES_LENGTH +_TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace + +BracketPair = namedtuple("BracketPair", ["opening_bracket", "closing_bracket"]) +_BRACKET_PAIRS = { + "{": BracketPair("{", "}"), + "(": BracketPair("(", ")"), + "[": BracketPair("[", "]"), +} + + +def _is_closing_quote(search_string: str, index: int) -> bool: + """Check whether the quote at `index` inside `search_string` can be a closing quote.""" + if search_string[index - 1] != "\\": + return True + elif search_string[index - 2] == "\\": + return True + return False + + +def _split_parameters(parameters_string: str) -> Iterator[str]: + """ + Split parameters of a signature into individual parameter strings on commas. + + Long string literals are not accounted for. + """ + last_split = 0 + depth = 0 + current_search: Optional[BracketPair] = None + quote_character = None + + enumerated_string = enumerate(parameters_string) + for index, character in enumerated_string: + if quote_character is None and character in _BRACKET_PAIRS: + if current_search is None: + current_search = _BRACKET_PAIRS[character] + depth = 1 + elif character == current_search.opening_bracket: + depth += 1 + + elif character in {"'", '"'}: + if current_search is not None: + # We're currently searching for a bracket, skip all characters that belong to the string + # to avoid false positives of closing brackets + quote_character = character + for index, character in enumerated_string: + if character == quote_character and _is_closing_quote(parameters_string, index): + break + + elif depth == 0: + depth += 1 + quote_character = character + elif character == quote_character: + if _is_closing_quote(parameters_string, index): + depth -= 1 + if depth == 0: + quote_character = None + + elif current_search is not None and character == current_search.closing_bracket: + depth -= 1 + if depth == 0: + current_search = None + + elif depth == 0 and character == ",": + yield parameters_string[last_split:index] + last_split = index + 1 + + yield parameters_string[last_split:] + + +def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collection[str]]: + """ + Truncate passed signatures to not exceed `_MAX_SIGNAUTRES_LENGTH`. + + If the signatures need to be truncated, parameters are collapsed until they fit withing the limit. + Individual signatures can consist of max 1, 2, ..., `_MAX_SIGNATURE_AMOUNT` lines of text, + inversely proportional to the amount of signatures. + A maximum of `_MAX_SIGNATURE_AMOUNT` signatures is assumed to be passed. + """ + if not sum(len(signature) for signature in signatures) > _MAX_SIGNATURES_LENGTH: + return signatures + + max_signature_length = _EMBED_CODE_BLOCK_LINE_LENGTH * (MAX_SIGNATURE_AMOUNT + 1 - len(signatures)) + formatted_signatures = [] + for signature in signatures: + signature = signature.strip() + if len(signature) > max_signature_length: + if (parameters_match := _PARAMETERS_RE.search(signature)) is None: + formatted_signatures.append(textwrap.shorten(signature, max_signature_length)) + continue + + truncated_signature = [] + parameters_string = parameters_match[1] + running_length = len(signature) - len(parameters_string) + for parameter in _split_parameters(parameters_string): + if (len(parameter) + running_length) <= max_signature_length - 4: # account for comma and placeholder + truncated_signature.append(parameter) + running_length += len(parameter) + 1 + else: + truncated_signature.append(" ...") + formatted_signatures.append(signature.replace(parameters_string, ",".join(truncated_signature))) + break + else: + formatted_signatures.append(signature) + + return formatted_signatures + + +def _get_truncated_description( + elements: Iterable[Union[Tag, NavigableString]], + markdown_converter: DocMarkdownConverter, + max_length: int, + max_lines: int, +) -> str: + """ + Truncate markdown from `elements` to be at most `max_length` characters when rendered or `max_lines` newlines. + + `max_length` limits the length of the rendered characters in the string, + with the real string length limited to `_MAX_DESCRIPTION_LENGTH` to accommodate discord length limits + """ + result = "" + markdown_element_ends = [] + rendered_length = 0 + + tag_end_index = 0 + for element in elements: + is_tag = isinstance(element, Tag) + element_length = len(element.text) if is_tag else len(element) + + if rendered_length + element_length < max_length: + if is_tag: + element_markdown = markdown_converter.process_tag(element, convert_as_inline=False) + else: + element_markdown = markdown_converter.process_text(element) + + rendered_length += element_length + tag_end_index += len(element_markdown) + + if not element_markdown.isspace(): + markdown_element_ends.append(tag_end_index) + result += element_markdown + else: + break + + if not markdown_element_ends: + return "" + + # Determine the "hard" truncation index. + newline_truncate_index = find_nth_occurrence(result, "\n", max_lines) + if newline_truncate_index is not None and newline_truncate_index < _MAX_DESCRIPTION_LENGTH: + # Truncate based on maximum lines if there are more than the maximum number of lines. + truncate_index = newline_truncate_index + else: + # There are less than the maximum number of lines; truncate based on the max char length. + truncate_index = _MAX_DESCRIPTION_LENGTH + + # Nothing needs to be truncated if the last element ends before the truncation index. + if truncate_index >= markdown_element_ends[-1]: + return result + + # Determine the actual truncation index. + # Truncate at the last Markdown element that comes before the truncation index. + markdown_truncate_index = max(cut for cut in markdown_element_ends if cut < truncate_index) + return result[:markdown_truncate_index].strip(_TRUNCATE_STRIP_CHARACTERS) + "..." + + +def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag], url: str) -> str: + """ + Create a markdown string with the signatures at the top, and the converted html description below them. + + The signatures are wrapped in python codeblocks, separated from the description by a newline. + The result markdown string is max 750 rendered characters for the description with signatures at the start. + """ + description = _get_truncated_description( + description, + markdown_converter=DocMarkdownConverter(bullets="•", page_url=url), + max_length=750, + max_lines=13 + ) + description = _WHITESPACE_AFTER_NEWLINES_RE.sub('', description) + if signatures is not None: + formatted_markdown = "".join(f"```py\n{signature}```" for signature in _truncate_signatures(signatures)) + else: + formatted_markdown = "" + formatted_markdown += f"\n{description}" + + return formatted_markdown + + +def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[str]: + """ + Return parsed markdown of the passed symbol using the passed in soup, truncated to fit within a discord message. + + The method of parsing and what information gets included depends on the symbol's group. + """ + symbol_heading = soup.find(id=symbol_data.symbol_id) + if symbol_heading is None: + return None + signature = None + # Modules, doc pages and labels don't point to description list tags but to tags like divs, + # no special parsing can be done so we only try to include what's under them. + if symbol_data.group in {"module", "doc", "label"} or symbol_heading.name != "dt": + description = get_general_description(symbol_heading) + + elif symbol_data.group in _NO_SIGNATURE_GROUPS: + description = get_dd_description(symbol_heading) + + else: + signature = get_signatures(symbol_heading) + description = get_dd_description(symbol_heading) + return _create_markdown(signature, description, symbol_data.url).replace('¶', '').strip() diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py new file mode 100644 index 000000000..cab51c3f1 --- /dev/null +++ b/bot/exts/info/doc/_redis_cache.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import datetime +from typing import Optional, TYPE_CHECKING + +from async_rediscache.types.base import RedisObject, namespace_lock +if TYPE_CHECKING: + from ._cog import DocItem + + +class DocRedisCache(RedisObject): + """Interface for redis functionality needed by the Doc cog.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._set_expires = set() + + @namespace_lock + async def set(self, item: DocItem, value: str) -> None: + """ + Set the Markdown `value` for the symbol `item`. + + All keys from a single page are stored together, expiring a week after the first set. + """ + url_key = remove_suffix(item.relative_url_path, ".html") + redis_key = f"{self.namespace}:{item.package}:{url_key}" + needs_expire = False + + with await self._get_pool_connection() as connection: + if redis_key not in self._set_expires: + self._set_expires.add(redis_key) + needs_expire = not await connection.exists(redis_key) + + await connection.hset(redis_key, item.symbol_id, value) + if needs_expire: + await connection.expire(redis_key, datetime.timedelta(weeks=1).total_seconds()) + + @namespace_lock + async def get(self, item: DocItem) -> Optional[str]: + """Return the Markdown content of the symbol `item` if it exists.""" + url_key = remove_suffix(item.relative_url_path, ".html") + + with await self._get_pool_connection() as connection: + return await connection.hget(f"{self.namespace}:{item.package}:{url_key}", item.symbol_id, encoding="utf8") + + @namespace_lock + async def delete(self, package: str) -> bool: + """Remove all values for `package`; return True if at least one key was deleted, False otherwise.""" + with await self._get_pool_connection() as connection: + package_keys = [ + package_key async for package_key in connection.iscan(match=f"{self.namespace}:{package}:*") + ] + if package_keys: + await connection.delete(*package_keys) + return True + return False + + +def remove_suffix(string: str, suffix: str) -> str: + """Remove `suffix` from end of `string`.""" + # TODO replace usages with str.removesuffix on 3.9 + if string.endswith(suffix): + return string[:-len(suffix)] + else: + return string diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py index 7b41352d4..f03b6a46f 100644 --- a/bot/exts/info/source.py +++ b/bot/exts/info/source.py @@ -68,7 +68,8 @@ class BotSource(commands.Cog): Raise BadArgument if `source_item` is a dynamically-created object (e.g. via internal eval). """ if isinstance(source_item, commands.Command): - src = source_item.callback.__code__ + source_item = inspect.unwrap(source_item.callback) + src = source_item.__code__ filename = src.co_filename elif isinstance(source_item, str): tags_cog = self.bot.get_cog("Tags") diff --git a/bot/utils/function.py b/bot/utils/function.py index 3ab32fe3c..4fa7a9f60 100644 --- a/bot/utils/function.py +++ b/bot/utils/function.py @@ -1,14 +1,23 @@ """Utilities for interaction with functions.""" +import functools import inspect +import logging +import types import typing as t +log = logging.getLogger(__name__) + Argument = t.Union[int, str] BoundArgs = t.OrderedDict[str, t.Any] Decorator = t.Callable[[t.Callable], t.Callable] ArgValGetter = t.Callable[[BoundArgs], t.Any] +class GlobalNameConflictError(Exception): + """Raised when there's a conflict between the globals used to resolve annotations of wrapped and its wrapper.""" + + def get_arg_value(name_or_pos: Argument, arguments: BoundArgs) -> t.Any: """ Return a value from `arguments` based on a name or position. @@ -73,3 +82,66 @@ def get_bound_args(func: t.Callable, args: t.Tuple, kwargs: t.Dict[str, t.Any]) bound_args.apply_defaults() return bound_args.arguments + + +def update_wrapper_globals( + wrapper: types.FunctionType, + wrapped: types.FunctionType, + *, + error_on_conflict: bool = True, +) -> types.FunctionType: + """ + Update globals of `wrapper` with the globals from `wrapped`. + + For forwardrefs in command annotations discordpy uses the __global__ attribute of the function + to resolve their values, with decorators that replace the function this breaks because they have + their own globals. + + This function creates a new function functionally identical to `wrapper`, which has the globals replaced with + a merge of `wrapped`s globals and the `wrapper`s globals. + + If `error_on_conflict` is True, an exception will be raised in case `wrapper` and `wrapped` share a global name + that is used by `wrapped`'s typehints, as this can cause incorrect objects being used by discordpy's converters. + The error can be turned into a warning by setting the argument to False. + """ + forwardrefs = (ann for ann in wrapped.__annotations__.values() if isinstance(ann, str)) + annotation_global_names = (ann.split(".", maxsplit=1)[0] for ann in forwardrefs) + # Conflicting globals from both functions' modules that are also used in the wrapper and in wrapped's annotations. + shared_globals = set(wrapper.__code__.co_names) & set(annotation_global_names) + shared_globals &= set(wrapped.__globals__) & set(wrapper.__globals__) + if shared_globals: + message = ( + f"wrapper and the wrapped function share the following " + f"global names used by annotations: {', '.join(shared_globals)}. " + f"Resolve the conflicts or pass error_on_conflict=False to suppress this error if this is intentional." + ) + if error_on_conflict: + raise GlobalNameConflictError(message) + else: + log.info(message) + + new_globals = wrapper.__globals__.copy() + new_globals.update((k, v) for k, v in wrapped.__globals__.items() if k not in wrapper.__code__.co_names) + return types.FunctionType( + code=wrapper.__code__, + globals=new_globals, + name=wrapper.__name__, + argdefs=wrapper.__defaults__, + closure=wrapper.__closure__, + ) + + +def command_wraps( + wrapped: types.FunctionType, + assigned: t.Sequence[str] = functools.WRAPPER_ASSIGNMENTS, + updated: t.Sequence[str] = functools.WRAPPER_UPDATES, + *, + error_on_conflict: bool = True, +) -> t.Callable[[types.FunctionType], types.FunctionType]: + """Update the decorated function to look like `wrapped` and update globals for discordpy forwardref evaluation.""" + def decorator(wrapper: types.FunctionType) -> types.FunctionType: + return functools.update_wrapper( + update_wrapper_globals(wrapper, wrapped, error_on_conflict=error_on_conflict), wrapped, assigned, updated + ) + + return decorator diff --git a/bot/utils/lock.py b/bot/utils/lock.py index e44776340..b4c93f063 100644 --- a/bot/utils/lock.py +++ b/bot/utils/lock.py @@ -1,13 +1,15 @@ import asyncio import inspect import logging +import types from collections import defaultdict -from functools import partial, wraps +from functools import partial from typing import Any, Awaitable, Callable, Hashable, Union from weakref import WeakValueDictionary from bot.errors import LockedResourceError from bot.utils import function +from bot.utils.function import command_wraps log = logging.getLogger(__name__) __lock_dicts = defaultdict(WeakValueDictionary) @@ -17,6 +19,35 @@ _IdCallable = Callable[[function.BoundArgs], _IdCallableReturn] ResourceId = Union[Hashable, _IdCallable] +class SharedEvent: + """ + Context manager managing an internal event exposed through the wait coro. + + While any code is executing in this context manager, the underyling event will not be set; + when all of the holders finish the event will be set. + """ + + def __init__(self): + self._active_count = 0 + self._event = asyncio.Event() + self._event.set() + + def __enter__(self): + """Increment the count of the active holders and clear the internal event.""" + self._active_count += 1 + self._event.clear() + + def __exit__(self, _exc_type, _exc_val, _exc_tb): # noqa: ANN001 + """Decrement the count of the active holders; if 0 is reached set the internal event.""" + self._active_count -= 1 + if not self._active_count: + self._event.set() + + async def wait(self) -> None: + """Wait for all active holders to exit.""" + await self._event.wait() + + def lock( namespace: Hashable, resource_id: ResourceId, @@ -41,10 +72,10 @@ def lock( If decorating a command, this decorator must go before (below) the `command` decorator. """ - def decorator(func: Callable) -> Callable: + def decorator(func: types.FunctionType) -> types.FunctionType: name = func.__name__ - @wraps(func) + @command_wraps(func) async def wrapper(*args, **kwargs) -> Any: log.trace(f"{name}: mutually exclusive decorator called") diff --git a/bot/utils/messages.py b/bot/utils/messages.py index 42bde358d..c42e4bacc 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -135,14 +135,14 @@ def sub_clyde(username: Optional[str]) -> Optional[str]: return username # Empty string or None -async def send_denial(ctx: Context, reason: str) -> None: +async def send_denial(ctx: Context, reason: str) -> discord.Message: """Send an embed denying the user with the given reason.""" embed = discord.Embed() embed.colour = discord.Colour.red() embed.title = random.choice(NEGATIVE_REPLIES) embed.description = reason - await ctx.send(embed=embed) + return await ctx.send(embed=embed) def format_user(user: discord.abc.User) -> str: diff --git a/tests/bot/test_converters.py b/tests/bot/test_converters.py index c42111f3f..231798a92 100644 --- a/tests/bot/test_converters.py +++ b/tests/bot/test_converters.py @@ -10,9 +10,9 @@ from bot.converters import ( Duration, HushDurationConverter, ISODateTime, + PackageName, TagContentConverter, TagNameConverter, - ValidPythonIdentifier, ) @@ -78,24 +78,23 @@ class ConverterTests(unittest.IsolatedAsyncioTestCase): with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): await TagNameConverter.convert(self.context, invalid_name) - async def test_valid_python_identifier_for_valid(self): - """ValidPythonIdentifier returns valid identifiers unchanged.""" - test_values = ('foo', 'lemon') + async def test_package_name_for_valid(self): + """PackageName returns valid package names unchanged.""" + test_values = ('foo', 'le_mon') for name in test_values: with self.subTest(identifier=name): - conversion = await ValidPythonIdentifier.convert(self.context, name) + conversion = await PackageName.convert(self.context, name) self.assertEqual(name, conversion) - async def test_valid_python_identifier_for_invalid(self): - """ValidPythonIdentifier raises the proper exception for invalid identifiers.""" - test_values = ('nested.stuff', '#####') + async def test_package_name_for_invalid(self): + """PackageName raises the proper exception for invalid package names.""" + test_values = ('text_with_a_dot.', 'UpperCaseName', "num83r") for name in test_values: with self.subTest(identifier=name): - exception_message = f'`{name}` is not a valid Python identifier' - with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): - await ValidPythonIdentifier.convert(self.context, name) + with self.assertRaises(BadArgument): + await PackageName.convert(self.context, name) async def test_duration_converter_for_valid(self): """Duration returns the correct `datetime` for valid duration strings.""" |