From c8abfbbe81a300ce7eb77f7f64d2bde753b2d852 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:41 -0500 Subject: [PATCH 01/20] Update uv.lock --- uv.lock | 158 +++++++++++++++++++++++++++++--------------------------- 1 file changed, 82 insertions(+), 76 deletions(-) diff --git a/uv.lock b/uv.lock index a5a5c91febc..dd5c7b7eb56 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.10" resolution-markers = [ "python_full_version >= '3.11'", @@ -47,85 +47,88 @@ wheels = [ [[package]] name = "beautifulsoup4" -version = "4.13.4" +version = "4.13.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "soupsieve" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067, upload-time = "2025-04-15T17:05:13.836Z" } +sdist = { url = "https://files.pythonhosted.org/packages/85/2e/3e5079847e653b1f6dc647aa24549d68c6addb4c595cc0d902d1b19308ad/beautifulsoup4-4.13.5.tar.gz", hash = "sha256:5e70131382930e7c3de33450a2f54a63d5e4b19386eab43a5b34d594268f3695", size = 622954, upload-time = "2025-08-24T14:06:13.168Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285, upload-time = "2025-04-15T17:05:12.221Z" }, + { url = "https://files.pythonhosted.org/packages/04/eb/f4151e0c7377a6e08a38108609ba5cede57986802757848688aeedd1b9e8/beautifulsoup4-4.13.5-py3-none-any.whl", hash = "sha256:642085eaa22233aceadff9c69651bc51e8bf3f874fb6d7104ece2beb24b47c4a", size = 105113, upload-time = "2025-08-24T14:06:14.884Z" }, ] [[package]] name = "certifi" -version = "2025.7.14" +version = "2025.8.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981, upload-time = "2025-07-14T03:29:28.449Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722, upload-time = "2025-07-14T03:29:26.863Z" }, + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, ] [[package]] name = "charset-normalizer" -version = "3.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, - { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, - { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, - { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, - { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, - { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, - { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, - { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, - { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, - { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, - { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, - { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, - { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, - { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, - { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, - { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, - { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, - { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, - { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, - { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, - { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, - { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, - { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, - { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, - { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, - { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, - { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, - { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, - { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, - { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, - { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, - { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, - { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, - { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, - { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, - { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, + { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, + { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, + { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, + { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, + { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, + { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, ] [[package]] @@ -525,7 +528,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.4" +version = "2.32.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -533,9 +536,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] [[package]] @@ -582,11 +585,11 @@ wheels = [ [[package]] name = "soupsieve" -version = "2.7" +version = "2.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418, upload-time = "2025-04-20T18:50:08.518Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, ] [[package]] @@ -772,11 +775,11 @@ wheels = [ [[package]] name = "typing-extensions" -version = "4.14.1" +version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] @@ -802,9 +805,12 @@ wheels = [ [[package]] name = "yamlloader" -version = "1.5.1" +version = "1.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6b/7f094b4d9009794cb69dea18c99bdee4e62b4e0ee69bb19128178191f18a/yamlloader-1.5.1.tar.gz", hash = "sha256:8dece19b050acb1c6a8ca14aa30793388f9be154f734b826541f9a1828d41cec", size = 77157, upload-time = "2025-01-01T17:31:36.453Z" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/ab/a0cdfe8081e0fb9dd15372f8ccb257541d197450f6047a366fef597bb1bf/yamlloader-1.5.2.tar.gz", hash = "sha256:c10ac1321a626b1ca8b09d3f3afe9855b818391c59992a76b65e4a2d95eac41b", size = 77246, upload-time = "2025-08-05T13:23:47.62Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/0f/28323a9c418403cd4080d1736873c354bf058ad34306be5ff58639dcaedd/yamlloader-1.5.1-py3-none-any.whl", hash = "sha256:610014b14e25d7328d69f6526524d4616a552561f4c1b919f1282695bc1f4a11", size = 7684, upload-time = "2025-01-01T17:31:33.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a3/bdf1a1dd0ddb658332dbe53194624c668c7935b5c60b2d36dae4018053ec/yamlloader-1.5.2-py3-none-any.whl", hash = "sha256:5855d320d55d55f4309f69798b7cd6f55f70739a0f3d4bbe78aeb42c204c2d81", size = 7774, upload-time = "2025-08-05T13:23:46.455Z" }, ] From c8b9d384e11e71e094c8fbc9503f56f9acf0afeb Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:41 -0500 Subject: [PATCH 02/20] Add ruff and shfmt to pyproject.toml --- pyproject.toml | 13 +++++++++++++ uv.lock | 40 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 72f42c753d8..d9392ebbc73 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,11 @@ format = [ "clang-format~=20.1.0", ] +format-scripts = [ + "ruff>=0.13.0", + "shfmt-py>=3.12.0.2", +] + docs = [ # .evergreen/scripts/build-docs.sh "furo>=2023.5.20", @@ -45,3 +50,11 @@ mc-evg-generate = "config_generator.generate:main" packages = [ ".evergreen/config_generator", ] + +[tool.ruff] +line-length = 120 +src = [".evergreen", "etc"] + +[tool.ruff.format] +quote-style = "single" +indent-style = "space" diff --git a/uv.lock b/uv.lock index dd5c7b7eb56..0695f610b90 100644 --- a/uv.lock +++ b/uv.lock @@ -323,6 +323,10 @@ evg = [ format = [ { name = "clang-format" }, ] +format-scripts = [ + { name = "ruff" }, + { name = "shfmt-py" }, +] [package.metadata] @@ -349,6 +353,10 @@ evg = [ { name = "yamlloader", specifier = ">=1.5" }, ] format = [{ name = "clang-format", specifier = "~=20.1.0" }] +format-scripts = [ + { name = "ruff", specifier = ">=0.13.0" }, + { name = "shfmt-py", specifier = ">=3.12.0.2" }, +] [[package]] name = "packaging" @@ -550,6 +558,38 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" }, ] +[[package]] +name = "ruff" +version = "0.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/fe/6f87b419dbe166fd30a991390221f14c5b68946f389ea07913e1719741e0/ruff-0.13.0-py3-none-linux_armv6l.whl", hash = "sha256:137f3d65d58ee828ae136a12d1dc33d992773d8f7644bc6b82714570f31b2004", size = 12187826, upload-time = "2025-09-10T16:24:39.5Z" }, + { url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428, upload-time = "2025-09-10T16:24:43.866Z" }, + { url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543, upload-time = "2025-09-10T16:24:46.638Z" }, + { url = "https://files.pythonhosted.org/packages/f1/03/8b5ff2a211efb68c63a1d03d157e924997ada87d01bebffbd13a0f3fcdeb/ruff-0.13.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2c653ae9b9d46e0ef62fc6fbf5b979bda20a0b1d2b22f8f7eb0cde9f4963b8", size = 12312489, upload-time = "2025-09-10T16:24:49.556Z" }, + { url = "https://files.pythonhosted.org/packages/37/fc/2336ef6d5e9c8d8ea8305c5f91e767d795cd4fc171a6d97ef38a5302dadc/ruff-0.13.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cec632534332062bc9eb5884a267b689085a1afea9801bf94e3ba7498a2d207", size = 11991631, upload-time = "2025-09-10T16:24:53.439Z" }, + { url = "https://files.pythonhosted.org/packages/39/7f/f6d574d100fca83d32637d7f5541bea2f5e473c40020bbc7fc4a4d5b7294/ruff-0.13.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd628101d9f7d122e120ac7c17e0a0f468b19bc925501dbe03c1cb7f5415b24", size = 13720602, upload-time = "2025-09-10T16:24:56.392Z" }, + { url = "https://files.pythonhosted.org/packages/fd/c8/a8a5b81d8729b5d1f663348d11e2a9d65a7a9bd3c399763b1a51c72be1ce/ruff-0.13.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afe37db8e1466acb173bb2a39ca92df00570e0fd7c94c72d87b51b21bb63efea", size = 14697751, upload-time = "2025-09-10T16:24:59.89Z" }, + { url = "https://files.pythonhosted.org/packages/57/f5/183ec292272ce7ec5e882aea74937f7288e88ecb500198b832c24debc6d3/ruff-0.13.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f96a8d90bb258d7d3358b372905fe7333aaacf6c39e2408b9f8ba181f4b6ef2", size = 14095317, upload-time = "2025-09-10T16:25:03.025Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8d/7f9771c971724701af7926c14dab31754e7b303d127b0d3f01116faef456/ruff-0.13.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b5e3d883e4f924c5298e3f2ee0f3085819c14f68d1e5b6715597681433f153", size = 13144418, upload-time = "2025-09-10T16:25:06.272Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a6/7985ad1778e60922d4bef546688cd8a25822c58873e9ff30189cfe5dc4ab/ruff-0.13.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03447f3d18479df3d24917a92d768a89f873a7181a064858ea90a804a7538991", size = 13370843, upload-time = "2025-09-10T16:25:09.965Z" }, + { url = "https://files.pythonhosted.org/packages/64/1c/bafdd5a7a05a50cc51d9f5711da704942d8dd62df3d8c70c311e98ce9f8a/ruff-0.13.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:fbc6b1934eb1c0033da427c805e27d164bb713f8e273a024a7e86176d7f462cf", size = 13321891, upload-time = "2025-09-10T16:25:12.969Z" }, + { url = "https://files.pythonhosted.org/packages/bc/3e/7817f989cb9725ef7e8d2cee74186bf90555279e119de50c750c4b7a72fe/ruff-0.13.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8ab6a3e03665d39d4a25ee199d207a488724f022db0e1fe4002968abdb8001b", size = 12119119, upload-time = "2025-09-10T16:25:16.621Z" }, + { url = "https://files.pythonhosted.org/packages/58/07/9df080742e8d1080e60c426dce6e96a8faf9a371e2ce22eef662e3839c95/ruff-0.13.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2a5c62f8ccc6dd2fe259917482de7275cecc86141ee10432727c4816235bc41", size = 11961594, upload-time = "2025-09-10T16:25:19.49Z" }, + { url = "https://files.pythonhosted.org/packages/6a/f4/ae1185349197d26a2316840cb4d6c3fba61d4ac36ed728bf0228b222d71f/ruff-0.13.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b7b85ca27aeeb1ab421bc787009831cffe6048faae08ad80867edab9f2760945", size = 12933377, upload-time = "2025-09-10T16:25:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/b6/39/e776c10a3b349fc8209a905bfb327831d7516f6058339a613a8d2aaecacd/ruff-0.13.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:79ea0c44a3032af768cabfd9616e44c24303af49d633b43e3a5096e009ebe823", size = 13418555, upload-time = "2025-09-10T16:25:25.681Z" }, + { url = "https://files.pythonhosted.org/packages/46/09/dca8df3d48e8b3f4202bf20b1658898e74b6442ac835bfe2c1816d926697/ruff-0.13.0-py3-none-win32.whl", hash = "sha256:4e473e8f0e6a04e4113f2e1de12a5039579892329ecc49958424e5568ef4f768", size = 12141613, upload-time = "2025-09-10T16:25:28.664Z" }, + { url = "https://files.pythonhosted.org/packages/61/21/0647eb71ed99b888ad50e44d8ec65d7148babc0e242d531a499a0bbcda5f/ruff-0.13.0-py3-none-win_amd64.whl", hash = "sha256:48e5c25c7a3713eea9ce755995767f4dcd1b0b9599b638b12946e892123d1efb", size = 13258250, upload-time = "2025-09-10T16:25:31.773Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a3/03216a6a86c706df54422612981fb0f9041dbb452c3401501d4a22b942c9/ruff-0.13.0-py3-none-win_arm64.whl", hash = "sha256:ab80525317b1e1d38614addec8ac954f1b3e662de9d59114ecbf771d00cf613e", size = 12312357, upload-time = "2025-09-10T16:25:35.595Z" }, +] + +[[package]] +name = "shfmt-py" +version = "3.12.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/d9/a1ea26749bd19467e9fbfe7b34e6c8df517e01de4028a45b954eebe8c03b/shfmt_py-3.12.0.2.tar.gz", hash = "sha256:6a0dc675b37d000eb236609cf15aedd9e7a538927ea02c57b617908b6f237e9c", size = 4467, upload-time = "2025-07-08T06:54:40.396Z" } + [[package]] name = "shrub-py" version = "3.10.0" From ffd2fb3d13c9e75e4aa96d4b33bff7fa493a162e Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:41 -0500 Subject: [PATCH 03/20] Add tools/ruff-format-all.sh --- tools/ruff-format-all.sh | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100755 tools/ruff-format-all.sh diff --git a/tools/ruff-format-all.sh b/tools/ruff-format-all.sh new file mode 100755 index 00000000000..94dc9e133b3 --- /dev/null +++ b/tools/ruff-format-all.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +# +# format.sh +# +# Usage: +# uv run --frozen etc/ruff-format-all.sh +# +# This script is meant to be run from the project root directory. + +set -o errexit +set -o pipefail + +# Python scripts. +# https://github.com/astral-sh/ruff/issues/8232 +uv run --frozen --group format-scripts ruff check --select I --fix +uv run --frozen --group format-scripts ruff format From 2b54eaaf5635aaff86e59f70002e75812a053453 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:41 -0500 Subject: [PATCH 04/20] Add tools/shfmt-format-all.sh --- tools/shfmt-format-all.sh | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100755 tools/shfmt-format-all.sh diff --git a/tools/shfmt-format-all.sh b/tools/shfmt-format-all.sh new file mode 100755 index 00000000000..a730f5b127e --- /dev/null +++ b/tools/shfmt-format-all.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +# +# format.sh +# +# Usage: +# uv run --frozen etc/shfmt-format-all.sh +# +# This script is meant to be run from the project root directory. + +set -o errexit +set -o pipefail + +script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +root_dir="$(cd "${script_dir:?}/.." && pwd)" + +command -v find >/dev/null + +include=( + "${root_dir:?}/.evergreen" + "${root_dir:?}/src" + "${root_dir:?}/tools" +) + +exclude=( + "${root_dir:?}/.evergreen/scripts/uv-installer.sh" +) + +mapfile -t files < <(find "${include[@]:?}" -name '*.sh' -type f | grep -v "${exclude[@]:?}") + +for file in "${files[@]:?}"; do + uv run --frozen --group format-scripts shfmt -i 2 -w "${file:?}" +done From 5cca39d0dc801c99e5170133f158bccbfbf4db12 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:41 -0500 Subject: [PATCH 05/20] Add exception for Python >=3.12 requirement --- tools/ruff-format-all.sh | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tools/ruff-format-all.sh b/tools/ruff-format-all.sh index 94dc9e133b3..4ea8b2621cd 100755 --- a/tools/ruff-format-all.sh +++ b/tools/ruff-format-all.sh @@ -10,7 +10,14 @@ set -o errexit set -o pipefail +# Scripts which require a different Python version than the one specified in pyproject.toml. +# See: https://github.com/astral-sh/ruff/issues/10457 +py312=( + src/libbson/tests/validate-tests.py +) + # Python scripts. # https://github.com/astral-sh/ruff/issues/8232 -uv run --frozen --group format-scripts ruff check --select I --fix +uv run --frozen --group format-scripts ruff check --select I --fix --exclude "${py312[@]:?}" +uv run --frozen --group format-scripts --isolated ruff check --select I --fix --target-version py312 "${py312[@]:?}" uv run --frozen --group format-scripts ruff format From 7d666dd98a2bb02eb47cba3100c9e5c74da957bd Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:41 -0500 Subject: [PATCH 06/20] Format all script files --- .../components/abi_compliance_check.py | 8 +- .../components/c_std_compile.py | 11 +- .../components/clang_format.py | 16 +- .../components/cse/darwinssl.py | 6 +- .../components/cse/openssl.py | 12 +- .../config_generator/components/cse/winssl.py | 6 +- .../config_generator/components/earthly.py | 168 +- .../funcs/bootstrap_mongo_orchestration.py | 8 +- .../components/funcs/fetch_build.py | 8 +- .../components/funcs/fetch_det.py | 15 +- .../components/funcs/fetch_source.py | 15 +- .../components/funcs/find_cmake_latest.py | 6 +- .../funcs/restore_instance_profile.py | 4 +- .../components/funcs/run_mock_kms_servers.py | 8 +- .../funcs/run_simple_http_server.py | 4 +- .../components/funcs/set_cache_dir.py | 13 +- .../components/funcs/stop_load_balancer.py | 4 +- .../funcs/stop_mongo_orchestration.py | 4 +- .../components/funcs/upload_build.py | 4 +- .../components/funcs/upload_mo_artifacts.py | 11 +- .../components/funcs/upload_test_results.py | 8 +- .../components/kms_divergence_check.py | 9 +- .../components/loadbalanced.py | 77 +- .../config_generator/components/make_docs.py | 101 +- .../components/mock_server.py | 14 +- .../components/openssl_compat.py | 17 +- .../components/sanitizers/asan.py | 1 - .../components/sanitizers/asan_cse.py | 16 +- .../components/sanitizers/asan_sasl.py | 12 +- .../components/sanitizers/tsan.py | 1 - .../components/sanitizers/tsan_sasl.py | 14 +- .../components/sasl/darwinssl.py | 4 +- .../config_generator/components/sasl/nossl.py | 4 +- .../components/sasl/openssl.py | 12 +- .../components/sasl/winssl.py | 4 +- .../config_generator/components/sbom.py | 26 +- .../config_generator/components/scan_build.py | 9 +- .evergreen/config_generator/etc/compile.py | 7 +- .../config_generator/etc/cse/compile.py | 8 +- .evergreen/config_generator/etc/cse/test.py | 10 +- .evergreen/config_generator/etc/distros.py | 31 +- .evergreen/config_generator/etc/function.py | 6 +- .../config_generator/etc/sanitizers/test.py | 20 +- .../config_generator/etc/sasl/compile.py | 6 +- .evergreen/config_generator/etc/sasl/test.py | 10 +- .evergreen/config_generator/etc/utils.py | 25 +- .evergreen/config_generator/generate.py | 20 +- .../evergreen_config_generator/__init__.py | 34 +- .../evergreen_config_generator/functions.py | 58 +- .../evergreen_config_generator/taskgroups.py | 21 +- .../evergreen_config_generator/tasks.py | 45 +- .../evergreen_config_generator/variants.py | 3 +- .../evergreen_config_lib/__init__.py | 3 +- .../evergreen_config_lib/functions.py | 490 ++-- .../evergreen_config_lib/taskgroups.py | 1 + .../evergreen_config_lib/tasks.py | 676 ++--- .../evergreen_config_lib/testazurekms.py | 51 +- .../evergreen_config_lib/testgcpkms.py | 34 +- .../evergreen_config_lib/variants.py | 451 ++-- .../generate-evergreen-config.py | 22 +- .../ocsp/ecdsa/mock-delegate-revoked.sh | 6 +- .evergreen/ocsp/ecdsa/mock-delegate-valid.sh | 4 +- .evergreen/ocsp/ecdsa/mock-revoked.sh | 7 +- .evergreen/ocsp/ecdsa/mock-valid.sh | 4 +- .evergreen/ocsp/ecdsa/rename.sh | 2 +- .evergreen/ocsp/mock_ocsp_responder.py | 261 +- .evergreen/ocsp/ocsp_mock.py | 38 +- .evergreen/ocsp/rsa/mock-delegate-revoked.sh | 6 +- .evergreen/ocsp/rsa/mock-delegate-valid.sh | 4 +- .evergreen/ocsp/rsa/mock-revoked.sh | 6 +- .evergreen/ocsp/rsa/mock-valid.sh | 4 +- .evergreen/scripts/build-docs.sh | 2 +- .evergreen/scripts/build_snapshot_rpm.sh | 16 +- .evergreen/scripts/check-preludes.py | 62 +- .evergreen/scripts/check-symlink.sh | 3 +- .evergreen/scripts/check_rpm_spec.sh | 12 +- .evergreen/scripts/compile-libmongocrypt.sh | 9 +- .evergreen/scripts/compile-windows.sh | 2 +- .evergreen/scripts/debian_package_build.sh | 28 +- .evergreen/scripts/debug-core-evergreen.sh | 14 +- .evergreen/scripts/integration-tests.sh | 38 +- .evergreen/scripts/kms-divergence-check.sh | 14 +- .../scripts/link-sample-program-bson.sh | 9 +- .evergreen/scripts/link-sample-program.sh | 14 +- .evergreen/scripts/man-pages-to-html.sh | 2 +- .evergreen/scripts/openssl-compat-setup.sh | 12 +- .evergreen/scripts/run-auth-tests.sh | 36 +- .evergreen/scripts/run-aws-tests.sh | 16 +- .evergreen/scripts/run-ocsp-test.sh | 16 +- .evergreen/scripts/run-tests.sh | 4 +- .evergreen/scripts/simple_http_server.py | 9 +- build/bottle.py | 2343 +++++++++-------- build/fake_kms_provider_server.py | 66 +- build/generate-future-functions.py | 973 +++---- build/generate-opts.py | 5 +- build/mongodl.py | 314 +-- build/proc-ctl.py | 127 +- build/sphinx/homepage-config/conf.py | 28 +- build/sphinx/mongoc/__init__.py | 8 +- build/sphinx/mongoc_common.py | 73 +- docs/dev/conf.py | 68 +- lldb.pyi | 2 +- lldb_bson.py | 362 +-- src/libbson/doc/conf.py | 26 +- .../compile-with-pkg-config-static.sh | 2 +- .../examples/compile-with-pkg-config.sh | 2 +- src/libbson/fuzz/make-dicts.py | 124 +- src/libbson/tests/validate-tests.py | 788 +++--- src/libmongoc/doc/cmakerefdomain.py | 48 +- src/libmongoc/doc/conf.py | 107 +- src/libmongoc/examples/parse_handshake_cfg.py | 88 +- tools/base.sh | 45 +- tools/download.sh | 136 +- tools/earthly.sh | 32 +- tools/format.py | 88 +- tools/paths.sh | 190 +- tools/platform.sh | 199 +- tools/use.sh | 76 +- 118 files changed, 4938 insertions(+), 4734 deletions(-) diff --git a/.evergreen/config_generator/components/abi_compliance_check.py b/.evergreen/config_generator/components/abi_compliance_check.py index 1ea3ac0d3c1..3d541e53f2c 100644 --- a/.evergreen/config_generator/components/abi_compliance_check.py +++ b/.evergreen/config_generator/components/abi_compliance_check.py @@ -1,9 +1,7 @@ -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import s3_put +from shrub.v3.evg_command import EvgCommandType, s3_put from shrub.v3.evg_task import EvgTask from config_generator.components.funcs.set_cache_dir import SetCacheDir - from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -15,14 +13,14 @@ class CheckABICompliance(Function): command_type=EvgCommandType.SETUP, working_dir='mongoc', include_expansions_in_env=['MONGO_C_DRIVER_CACHE_DIR'], - script='.evergreen/scripts/abi-compliance-check-setup.sh' + script='.evergreen/scripts/abi-compliance-check-setup.sh', ), bash_exec( command_type=EvgCommandType.TEST, add_expansions_to_env=True, working_dir='mongoc', include_expansions_in_env=['MONGO_C_DRIVER_CACHE_DIR'], - script='.evergreen/scripts/abi-compliance-check.sh' + script='.evergreen/scripts/abi-compliance-check.sh', ), s3_put( command_type=EvgCommandType.SYSTEM, diff --git a/.evergreen/config_generator/components/c_std_compile.py b/.evergreen/config_generator/components/c_std_compile.py index 9060cf54458..03f7b64bc82 100644 --- a/.evergreen/config_generator/components/c_std_compile.py +++ b/.evergreen/config_generator/components/c_std_compile.py @@ -3,14 +3,10 @@ from shrub.v3.evg_task import EvgTask, EvgTaskRef from config_generator.components.funcs.find_cmake_latest import FindCMakeLatest - -from config_generator.etc.distros import find_large_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars +from config_generator.etc.distros import compiler_to_vars, find_large_distro, make_distro_str from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec - TAG = 'std-matrix' @@ -95,10 +91,7 @@ def tasks(): name=task_name, run_on=distro.name, tags=tags + [f'std-c{std}'], - commands=[ - FindCMakeLatest.call(), - StdCompile.call(vars=compile_vars | with_std) - ], + commands=[FindCMakeLatest.call(), StdCompile.call(vars=compile_vars | with_std)], ) ) diff --git a/.evergreen/config_generator/components/clang_format.py b/.evergreen/config_generator/components/clang_format.py index 821b1a1efcf..049477fe685 100644 --- a/.evergreen/config_generator/components/clang_format.py +++ b/.evergreen/config_generator/components/clang_format.py @@ -1,14 +1,12 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_task import EvgTask -from shrub.v3.evg_task import EvgTaskRef +from shrub.v3.evg_task import EvgTask, EvgTaskRef from config_generator.etc.distros import find_small_distro from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec - -TAG = "clang-format" +TAG = 'clang-format' class ClangFormat(Function): @@ -16,11 +14,11 @@ class ClangFormat(Function): commands = [ bash_exec( command_type=EvgCommandType.TEST, - working_dir="mongoc", + working_dir='mongoc', env={ - "DRYRUN": "1", + 'DRYRUN': '1', }, - script="uv run --frozen --only-group=format tools/format.py --mode=check", + script='uv run --frozen --only-group=format tools/format.py --mode=check', ), ] @@ -42,7 +40,7 @@ def variants(): BuildVariant( name=TAG, display_name=TAG, - run_on=[find_small_distro("ubuntu2204").name], - tasks=[EvgTaskRef(name=f".{TAG}")], + run_on=[find_small_distro('ubuntu2204').name], + tasks=[EvgTaskRef(name=f'.{TAG}')], ), ] diff --git a/.evergreen/config_generator/components/cse/darwinssl.py b/.evergreen/config_generator/components/cse/darwinssl.py index a3819b4e858..c593e942eb5 100644 --- a/.evergreen/config_generator/components/cse/darwinssl.py +++ b/.evergreen/config_generator/components/cse/darwinssl.py @@ -2,11 +2,9 @@ from shrub.v3.evg_task import EvgTaskRef from config_generator.etc.compile import generate_compile_tasks - from config_generator.etc.cse.compile import CompileCommon from config_generator.etc.cse.test import generate_test_tasks - SSL = 'darwinssl' TAG = f'cse-matrix-{SSL}' @@ -52,9 +50,7 @@ def tasks(): MORE_TAGS = ['cse'] - res += generate_compile_tasks( - SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS - ) + res += generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS) res += generate_test_tasks(SSL, TAG, TEST_MATRIX) diff --git a/.evergreen/config_generator/components/cse/openssl.py b/.evergreen/config_generator/components/cse/openssl.py index 8dbeb8176c9..8e33bd6b793 100644 --- a/.evergreen/config_generator/components/cse/openssl.py +++ b/.evergreen/config_generator/components/cse/openssl.py @@ -1,12 +1,10 @@ from shrub.v3.evg_build_variant import BuildVariant from config_generator.etc.compile import generate_compile_tasks -from config_generator.etc.function import merge_defns -from config_generator.etc.utils import TaskRef - from config_generator.etc.cse.compile import CompileCommon from config_generator.etc.cse.test import generate_test_tasks - +from config_generator.etc.function import merge_defns +from config_generator.etc.utils import TaskRef SSL = 'openssl' TAG = f'cse-matrix-{SSL}' @@ -83,7 +81,7 @@ def tasks(): # PowerPC and zSeries are limited resources. for task in res: - if any(pattern in task.run_on for pattern in ["power", "zseries"]): + if any(pattern in task.run_on for pattern in ['power', 'zseries']): task.patchable = False return res @@ -98,11 +96,11 @@ def variants(): # PowerPC and zSeries are limited resources. for task in TASKS: - if any(pattern in task.run_on for pattern in ["power", "zseries"]): + if any(pattern in task.run_on for pattern in ['power', 'zseries']): tasks.append( TaskRef( name=task.name, - batchtime=1440, # 1 day + batchtime=1440, # 1 day ) ) else: diff --git a/.evergreen/config_generator/components/cse/winssl.py b/.evergreen/config_generator/components/cse/winssl.py index f9281975260..995a268720a 100644 --- a/.evergreen/config_generator/components/cse/winssl.py +++ b/.evergreen/config_generator/components/cse/winssl.py @@ -2,11 +2,9 @@ from shrub.v3.evg_task import EvgTaskRef from config_generator.etc.compile import generate_compile_tasks - from config_generator.etc.cse.compile import CompileCommon from config_generator.etc.cse.test import generate_test_tasks - SSL = 'winssl' TAG = f'cse-matrix-{SSL}' @@ -50,9 +48,7 @@ def tasks(): MORE_TAGS = ['cse'] - res += generate_compile_tasks( - SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS - ) + res += generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS) res += generate_test_tasks(SSL, TAG, TEST_MATRIX) diff --git a/.evergreen/config_generator/components/earthly.py b/.evergreen/config_generator/components/earthly.py index 683be59728c..f38aba8cf74 100644 --- a/.evergreen/config_generator/components/earthly.py +++ b/.evergreen/config_generator/components/earthly.py @@ -19,62 +19,62 @@ from ..etc.utils import all_possible -T = TypeVar("T") +T = TypeVar('T') -_ENV_PARAM_NAME = "MONGOC_EARTHLY_ENV" -_CC_PARAM_NAME = "MONGOC_EARTHLY_C_COMPILER" -"The name of the EVG expansion for the Earthly c_compiler argument" +_ENV_PARAM_NAME = 'MONGOC_EARTHLY_ENV' +_CC_PARAM_NAME = 'MONGOC_EARTHLY_C_COMPILER' +'The name of the EVG expansion for the Earthly c_compiler argument' EnvKey = Literal[ - "u20", - "u22", - "almalinux8", - "almalinux9", - "almalinux10", - "alpine3.19", - "alpine3.20", - "alpine3.21", - "alpine3.22", - "archlinux", - "centos9", - "centos10", + 'u20', + 'u22', + 'almalinux8', + 'almalinux9', + 'almalinux10', + 'alpine3.19', + 'alpine3.20', + 'alpine3.21', + 'alpine3.22', + 'archlinux', + 'centos9', + 'centos10', ] "Identifiers for environments. These correspond to special 'env.*' targets in the Earthfile." -CompilerName = Literal["gcc", "clang"] -"The name of the compiler program that is used for the build. Passed via --c_compiler to Earthly." +CompilerName = Literal['gcc', 'clang'] +'The name of the compiler program that is used for the build. Passed via --c_compiler to Earthly.' # Other options: SSPI (Windows only), AUTO (not reliably test-able without more environments) -SASLOption = Literal["Cyrus", "off"] -"Valid options for the SASL configuration parameter" -TLSOption = Literal["OpenSSL", "off"] +SASLOption = Literal['Cyrus', 'off'] +'Valid options for the SASL configuration parameter' +TLSOption = Literal['OpenSSL', 'off'] "Options for the TLS backend configuration parameter (AKA 'ENABLE_SSL')" -CxxVersion = Literal["master", "r4.1.0", "none"] -"C++ driver refs that are under CI test" +CxxVersion = Literal['master', 'r4.1.0', 'none'] +'C++ driver refs that are under CI test' # A separator character, since we cannot use whitespace -_SEPARATOR = "\N{NO-BREAK SPACE}\N{BULLET}\N{NO-BREAK SPACE}" +_SEPARATOR = '\N{NO-BREAK SPACE}\N{BULLET}\N{NO-BREAK SPACE}' def os_split(env: EnvKey) -> tuple[str, None | str]: """Convert the environment key into a pretty name+version pair""" match env: # Match 'alpine3.18' 'alpine53.123' etc. - case alp if mat := re.match(r"alpine(\d+\.\d+)", alp): - return ("Alpine", mat[1]) - case "archlinux": - return "ArchLinux", None + case alp if mat := re.match(r'alpine(\d+\.\d+)', alp): + return ('Alpine', mat[1]) + case 'archlinux': + return 'ArchLinux', None # Match 'u22', 'u20', 'u71' etc. - case ubu if mat := re.match(r"u(\d\d)", ubu): - return "Ubuntu", f"{mat[1]}.04" + case ubu if mat := re.match(r'u(\d\d)', ubu): + return 'Ubuntu', f'{mat[1]}.04' # Match 'centos9', 'centos10', etc. - case cent if mat := re.match(r"centos(\d+)", cent): - return "CentOS", f"{mat[1]}" + case cent if mat := re.match(r'centos(\d+)', cent): + return 'CentOS', f'{mat[1]}' # Match 'almalinux8', 'almalinux10', etc. - case alm if mat := re.match(r"almalinux(\d+)", alm): - return "AlmaLinux", f"{mat[1]}" + case alm if mat := re.match(r'almalinux(\d+)', alm): + return 'AlmaLinux', f'{mat[1]}' case _: - raise ValueError(f"Failed to split OS env key {env=} into a name+version pair (unrecognized)") + raise ValueError(f'Failed to split OS env key {env=} into a name+version pair (unrecognized)') class EarthlyVariant(NamedTuple): @@ -95,14 +95,14 @@ def display_name(self) -> str: case name, None: base = name case name, version: - base = f"{name} {version}" + base = f'{name} {version}' toolchain: str match self.c_compiler: - case "clang": - toolchain = "LLVM/Clang" - case "gcc": - toolchain = "GCC" - return f"{base} ({toolchain})" + case 'clang': + toolchain = 'LLVM/Clang' + case 'gcc': + toolchain = 'GCC' + return f'{base} ({toolchain})' @property def task_selector_tag(self) -> str: @@ -110,7 +110,7 @@ def task_selector_tag(self) -> str: The task tag that is used to select the tasks that want to run on this variant. """ - return f"{self.env}-{self.c_compiler}" + return f'{self.env}-{self.c_compiler}' @property def expansions(self) -> Mapping[str, str]: @@ -125,8 +125,8 @@ def expansions(self) -> Mapping[str, str]: def as_evg_variant(self) -> BuildVariant: return BuildVariant( - name=f"{self.task_selector_tag}", - tasks=[EvgTaskRef(name=f".{self.task_selector_tag}")], + name=f'{self.task_selector_tag}', + tasks=[EvgTaskRef(name=f'.{self.task_selector_tag}')], display_name=self.display_name, expansions=dict(self.expansions), ) @@ -148,28 +148,28 @@ class Configuration(NamedTuple): @property def suffix(self) -> str: - return _SEPARATOR.join(f"{k}={v}" for k, v in self._asdict().items()) + return _SEPARATOR.join(f'{k}={v}' for k, v in self._asdict().items()) # Authenticate with DevProd-provided Amazon ECR instance to use as pull-through cache for DockerHub. class DockerLoginAmazonECR(Function): - name = "docker-login-amazon-ecr" + name = 'docker-login-amazon-ecr' commands = [ # Avoid inadvertently using a pre-existing and potentially conflicting Docker config. - expansions_update(updates=[KeyValueParam(key="DOCKER_CONFIG", value="${workdir}/.docker")]), - ec2_assume_role(role_arn="arn:aws:iam::901841024863:role/ecr-role-evergreen-ro"), + expansions_update(updates=[KeyValueParam(key='DOCKER_CONFIG', value='${workdir}/.docker')]), + ec2_assume_role(role_arn='arn:aws:iam::901841024863:role/ecr-role-evergreen-ro'), subprocess_exec( - binary="bash", + binary='bash', command_type=EvgCommandType.SETUP, include_expansions_in_env=[ - "AWS_ACCESS_KEY_ID", - "AWS_SECRET_ACCESS_KEY", - "AWS_SESSION_TOKEN", - "DOCKER_CONFIG", + 'AWS_ACCESS_KEY_ID', + 'AWS_SECRET_ACCESS_KEY', + 'AWS_SESSION_TOKEN', + 'DOCKER_CONFIG', ], args=[ - "-c", - "aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin 901841024863.dkr.ecr.us-east-1.amazonaws.com", + '-c', + 'aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin 901841024863.dkr.ecr.us-east-1.amazonaws.com', ], ), ] @@ -195,7 +195,7 @@ def variants_for(config: Configuration) -> Iterable[EarthlyVariant]: def earthly_exec( *, - kind: Literal["test", "setup", "system"], + kind: Literal['test', 'setup', 'system'], target: str, secrets: Mapping[str, str] | None = None, args: Mapping[str, str] | None = None, @@ -203,20 +203,20 @@ def earthly_exec( """Create a subprocess_exec command that runs Earthly with the given arguments""" env: dict[str, str] = {k: v for k, v in (secrets or {}).items()} return subprocess_exec( - "./tools/earthly.sh", + './tools/earthly.sh', args=[ # Use Amazon ECR as pull-through cache for DockerHub to avoid rate limits. - "--buildkit-image=901841024863.dkr.ecr.us-east-1.amazonaws.com/dockerhub/earthly/buildkitd:v0.8.3", - *(f"--secret={k}" for k in (secrets or ())), - f"+{target}", + '--buildkit-image=901841024863.dkr.ecr.us-east-1.amazonaws.com/dockerhub/earthly/buildkitd:v0.8.3', + *(f'--secret={k}' for k in (secrets or ())), + f'+{target}', # Use Amazon ECR as pull-through cache for DockerHub to avoid rate limits. - "--default_search_registry=901841024863.dkr.ecr.us-east-1.amazonaws.com/dockerhub", - *(f"--{arg}={val}" for arg, val in (args or {}).items()), + '--default_search_registry=901841024863.dkr.ecr.us-east-1.amazonaws.com/dockerhub', + *(f'--{arg}={val}' for arg, val in (args or {}).items()), ], command_type=EvgCommandType(kind), - include_expansions_in_env=["DOCKER_CONFIG"], + include_expansions_in_env=['DOCKER_CONFIG'], env=env if env else None, - working_dir="mongoc", + working_dir='mongoc', ) @@ -243,8 +243,8 @@ def earthly_task( earthly_args = config._asdict() earthly_args |= { # Add arguments that come from parameter expansions defined in the build variant - "env": f"${{{_ENV_PARAM_NAME}}}", - "c_compiler": f"${{{_CC_PARAM_NAME}}}", + 'env': f'${{{_ENV_PARAM_NAME}}}', + 'c_compiler': f'${{{_CC_PARAM_NAME}}}', } return EvgTask( name=name, @@ -255,29 +255,29 @@ def earthly_task( # for timing and logging purposes. The subequent build step will cache-hit the # warmed-up build environments. earthly_exec( - kind="setup", - target="env-warmup", + kind='setup', + target='env-warmup', args=earthly_args, ), # Now execute the main tasks: earthly_exec( - kind="test", - target="run", + kind='test', + target='run', # The "targets" arg is for +run to specify which targets to run - args={"targets": " ".join(targets)} | earthly_args, + args={'targets': ' '.join(targets)} | earthly_args, ), ], # type: ignore (The type annots on `commands` is wrong) - tags=["earthly", "pr-merge-gate", *env_tags], + tags=['earthly', 'pr-merge-gate', *env_tags], run_on=CONTAINER_RUN_DISTROS, ) CONTAINER_RUN_DISTROS = [ - "amazon2", - "debian11-large", - "debian12-large", - "ubuntu2204-large", - "ubuntu2404-large", + 'amazon2', + 'debian11-large', + 'debian12-large', + 'ubuntu2204-large', + 'ubuntu2404-large', ] @@ -288,14 +288,14 @@ def functions(): def tasks() -> Iterable[EvgTask]: for conf in all_possible(Configuration): # test-example is a target in all configurations - targets = ["test-example"] + targets = ['test-example'] # test-cxx-driver is only a target in configurations with specified mongocxx versions - if conf.test_mongocxx_ref != "none": - targets.append("test-cxx-driver") + if conf.test_mongocxx_ref != 'none': + targets.append('test-cxx-driver') task = earthly_task( - name=f"check:{conf.suffix}", + name=f'check:{conf.suffix}', targets=targets, config=conf, ) @@ -303,12 +303,12 @@ def tasks() -> Iterable[EvgTask]: yield task yield EvgTask( - name="verify-headers", + name='verify-headers', commands=[ DockerLoginAmazonECR.call(), - earthly_exec(kind="test", target="verify-headers"), + earthly_exec(kind='test', target='verify-headers'), ], - tags=["pr-merge-gate"], + tags=['pr-merge-gate'], run_on=CONTAINER_RUN_DISTROS, ) diff --git a/.evergreen/config_generator/components/funcs/bootstrap_mongo_orchestration.py b/.evergreen/config_generator/components/funcs/bootstrap_mongo_orchestration.py index d371571dd10..ab0bc59e8ab 100644 --- a/.evergreen/config_generator/components/funcs/bootstrap_mongo_orchestration.py +++ b/.evergreen/config_generator/components/funcs/bootstrap_mongo_orchestration.py @@ -1,5 +1,4 @@ -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import expansions_update +from shrub.v3.evg_command import EvgCommandType, expansions_update from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -15,10 +14,7 @@ class BootstrapMongoOrchestration(Function): script='.evergreen/scripts/integration-tests.sh', add_expansions_to_env=True, ), - expansions_update( - command_type=command_type, - file='mongoc/mo-expansion.yml' - ), + expansions_update(command_type=command_type, file='mongoc/mo-expansion.yml'), ] diff --git a/.evergreen/config_generator/components/funcs/fetch_build.py b/.evergreen/config_generator/components/funcs/fetch_build.py index fcd29e3495e..9e9fad98e1e 100644 --- a/.evergreen/config_generator/components/funcs/fetch_build.py +++ b/.evergreen/config_generator/components/funcs/fetch_build.py @@ -1,6 +1,4 @@ -from shrub.v3.evg_command import archive_targz_extract -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import s3_get +from shrub.v3.evg_command import EvgCommandType, archive_targz_extract, s3_get from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -24,11 +22,11 @@ class FetchBuild(Function): bash_exec( command_type=command_type, working_dir='mongoc', - script='''\ + script="""\ for file in $(find .evergreen/scripts -type f); do chmod +rx "$file" || exit done - ''' + """, ), ] diff --git a/.evergreen/config_generator/components/funcs/fetch_det.py b/.evergreen/config_generator/components/funcs/fetch_det.py index 75c8c3bf5cb..806534676d2 100644 --- a/.evergreen/config_generator/components/funcs/fetch_det.py +++ b/.evergreen/config_generator/components/funcs/fetch_det.py @@ -1,5 +1,4 @@ -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import expansions_update +from shrub.v3.evg_command import EvgCommandType, expansions_update from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -10,29 +9,27 @@ class FetchDET(Function): commands = [ bash_exec( command_type=EvgCommandType.SETUP, - script='''\ + script="""\ if [[ ! -d drivers-evergreen-tools ]]; then git clone --depth=1 https://github.com/mongodb-labs/drivers-evergreen-tools.git fi - ''', + """, ), - # Make shell scripts executable. bash_exec( command_type=EvgCommandType.SETUP, - working_dir="drivers-evergreen-tools", + working_dir='drivers-evergreen-tools', script='find .evergreen -type f -name "*.sh" -exec chmod +rx "{}" \;', ), - # python is used frequently enough by many tasks that it is worth # running find_python3 once here and reusing the result. bash_exec( command_type=EvgCommandType.SETUP, - script='''\ + script="""\ set -o errexit . drivers-evergreen-tools/.evergreen/find-python3.sh echo "PYTHON3_BINARY: $(find_python3)" >|python3_binary.yml - ''', + """, ), expansions_update( command_type=EvgCommandType.SETUP, diff --git a/.evergreen/config_generator/components/funcs/fetch_source.py b/.evergreen/config_generator/components/funcs/fetch_source.py index 81ff94983df..f8dd519803e 100644 --- a/.evergreen/config_generator/components/funcs/fetch_source.py +++ b/.evergreen/config_generator/components/funcs/fetch_source.py @@ -1,6 +1,4 @@ -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import expansions_update -from shrub.v3.evg_command import git_get_project +from shrub.v3.evg_command import EvgCommandType, expansions_update, git_get_project from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -14,7 +12,7 @@ class FetchSource(Function): bash_exec( command_type=command_type, working_dir='mongoc', - script='''\ + script="""\ set -o errexit set -o pipefail if [ -n "${github_pr_number}" -o "${is_patch}" = "true" ]; then @@ -23,19 +21,18 @@ class FetchSource(Function): VERSION=latest fi echo "CURRENT_VERSION: $VERSION" > expansion.yml - ''' + """, ), - expansions_update(command_type=command_type, - file='mongoc/expansion.yml'), + expansions_update(command_type=command_type, file='mongoc/expansion.yml'), # Scripts may not be executable on Windows. bash_exec( command_type=EvgCommandType.SETUP, working_dir='mongoc', - script='''\ + script="""\ for file in $(find .evergreen/scripts -type f); do chmod +rx "$file" || exit done - ''' + """, ), ] diff --git a/.evergreen/config_generator/components/funcs/find_cmake_latest.py b/.evergreen/config_generator/components/funcs/find_cmake_latest.py index 89387fbcf7b..34dbe0858f2 100644 --- a/.evergreen/config_generator/components/funcs/find_cmake_latest.py +++ b/.evergreen/config_generator/components/funcs/find_cmake_latest.py @@ -5,11 +5,11 @@ class FindCMakeLatest(Function): - ''' + """ Call `find_cmake_latest` in an attempt to download-and-build the latest CMake version as a Setup task with `retry_on_failure: true` prior to subsequent use of `find-cmake-latest.sh` by compile and build scripts. - ''' + """ name = 'find-cmake-latest' command_type = EvgCommandType.SETUP @@ -18,7 +18,7 @@ class FindCMakeLatest(Function): command_type=command_type, retry_on_failure=True, working_dir='mongoc', - script='. .evergreen/scripts/find-cmake-latest.sh && find_cmake_latest' + script='. .evergreen/scripts/find-cmake-latest.sh && find_cmake_latest', ), ] diff --git a/.evergreen/config_generator/components/funcs/restore_instance_profile.py b/.evergreen/config_generator/components/funcs/restore_instance_profile.py index 3d79e75c025..2b320513b93 100644 --- a/.evergreen/config_generator/components/funcs/restore_instance_profile.py +++ b/.evergreen/config_generator/components/funcs/restore_instance_profile.py @@ -6,7 +6,7 @@ class RestoreInstanceProfile(Function): name = 'restore-instance-profile' commands = [ bash_exec( - script='''\ + script="""\ # Restore the AWS Instance Profile that may have been removed in AWS tasks. if [[ ! -d drivers-evergreen-tools ]]; then @@ -30,7 +30,7 @@ class RestoreInstanceProfile(Function): exit 1 fi echo "restoring instance profile ... succeeded" - ''' + """ ), ] diff --git a/.evergreen/config_generator/components/funcs/run_mock_kms_servers.py b/.evergreen/config_generator/components/funcs/run_mock_kms_servers.py index 72e9d942975..8bed661a523 100644 --- a/.evergreen/config_generator/components/funcs/run_mock_kms_servers.py +++ b/.evergreen/config_generator/components/funcs/run_mock_kms_servers.py @@ -16,7 +16,7 @@ class RunMockKMSServers(Function): bash_exec( command_type=command_type, working_dir='drivers-evergreen-tools/.evergreen/csfle', - script='''\ + script="""\ set -o errexit echo "Preparing KMS TLS venv environment..." # TODO: remove this function along with the "run kms servers" function. @@ -33,13 +33,13 @@ class RunMockKMSServers(Function): deactivate fi echo "Preparing KMS TLS venv environment... done." - ''', + """, ), bash_exec( command_type=command_type, background=True, working_dir='drivers-evergreen-tools/.evergreen/csfle', - script='''\ + script="""\ set -o errexit echo "Starting mock KMS TLS servers..." . ./activate-kmstlsvenv.sh @@ -51,7 +51,7 @@ class RunMockKMSServers(Function): python -u kms_kmip_server.py & deactivate echo "Starting mock KMS TLS servers... done." - ''', + """, ), ] diff --git a/.evergreen/config_generator/components/funcs/run_simple_http_server.py b/.evergreen/config_generator/components/funcs/run_simple_http_server.py index 7eafc13aad7..0e4413408d8 100644 --- a/.evergreen/config_generator/components/funcs/run_simple_http_server.py +++ b/.evergreen/config_generator/components/funcs/run_simple_http_server.py @@ -12,13 +12,13 @@ class RunSimpleHTTPServer(Function): command_type=command_type, background=True, working_dir='mongoc', - script='''\ + script="""\ set -o errexit echo "Starting simple HTTP server..." command -V "${PYTHON3_BINARY}" >/dev/null "${PYTHON3_BINARY}" .evergreen/scripts/simple_http_server.py echo "Starting simple HTTP server... done." - ''', + """, ), ] diff --git a/.evergreen/config_generator/components/funcs/set_cache_dir.py b/.evergreen/config_generator/components/funcs/set_cache_dir.py index cd8d114c05d..e0584d3786e 100644 --- a/.evergreen/config_generator/components/funcs/set_cache_dir.py +++ b/.evergreen/config_generator/components/funcs/set_cache_dir.py @@ -1,15 +1,15 @@ +from shrub.v3.evg_command import EvgCommandType, expansions_update + from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec -from shrub.v3.evg_command import EvgCommandType, expansions_update - class SetCacheDir(Function): name = 'set-cache-dir' commands = [ bash_exec( command_type=EvgCommandType.SETUP, - script='''\ + script="""\ if [[ -n "$XDG_CACHE_DIR" ]]; then cache_dir="$XDG_CACHE_DIR" # XDG Base Directory specification. elif [[ -n "$LOCALAPPDATA" ]]; then @@ -30,12 +30,9 @@ class SetCacheDir(Function): cache_dir="$(cd "$cache_dir/mongo-c-driver" && pwd)" || exit printf "MONGO_C_DRIVER_CACHE_DIR: %s\\n" "$cache_dir" >|expansions.set-cache-dir.yml - ''', - ), - expansions_update( - command_type=EvgCommandType.SETUP, - file='expansions.set-cache-dir.yml' + """, ), + expansions_update(command_type=EvgCommandType.SETUP, file='expansions.set-cache-dir.yml'), ] diff --git a/.evergreen/config_generator/components/funcs/stop_load_balancer.py b/.evergreen/config_generator/components/funcs/stop_load_balancer.py index 5e867948ffe..a1d0f5030c4 100644 --- a/.evergreen/config_generator/components/funcs/stop_load_balancer.py +++ b/.evergreen/config_generator/components/funcs/stop_load_balancer.py @@ -6,7 +6,7 @@ class StopLoadBalancer(Function): name = 'stop-load-balancer' commands = [ bash_exec( - script='''\ + script="""\ # Only run if a load balancer was started. if [[ -z "${SINGLE_MONGOS_LB_URI}" ]]; then echo "OK - no load balancer running" @@ -15,7 +15,7 @@ class StopLoadBalancer(Function): if [[ -d drivers-evergreen-tools ]]; then cd drivers-evergreen-tools && .evergreen/run-load-balancer.sh stop fi - ''' + """ ), ] diff --git a/.evergreen/config_generator/components/funcs/stop_mongo_orchestration.py b/.evergreen/config_generator/components/funcs/stop_mongo_orchestration.py index 0e549e2a7cc..fb3126695f8 100644 --- a/.evergreen/config_generator/components/funcs/stop_mongo_orchestration.py +++ b/.evergreen/config_generator/components/funcs/stop_mongo_orchestration.py @@ -6,11 +6,11 @@ class StopMongoOrchestration(Function): name = 'stop-mongo-orchestration' commands = [ bash_exec( - script='''\ + script="""\ if [[ -d MO ]]; then cd MO && mongo-orchestration stop fi - ''' + """ ), ] diff --git a/.evergreen/config_generator/components/funcs/upload_build.py b/.evergreen/config_generator/components/funcs/upload_build.py index c0c3f58e277..9c852354d2a 100644 --- a/.evergreen/config_generator/components/funcs/upload_build.py +++ b/.evergreen/config_generator/components/funcs/upload_build.py @@ -1,6 +1,6 @@ +from shrub.v3.evg_command import archive_targz_pack, s3_put + from config_generator.etc.function import Function -from shrub.v3.evg_command import archive_targz_pack -from shrub.v3.evg_command import s3_put class UploadBuild(Function): diff --git a/.evergreen/config_generator/components/funcs/upload_mo_artifacts.py b/.evergreen/config_generator/components/funcs/upload_mo_artifacts.py index e9a4def310e..06bf736221c 100644 --- a/.evergreen/config_generator/components/funcs/upload_mo_artifacts.py +++ b/.evergreen/config_generator/components/funcs/upload_mo_artifacts.py @@ -1,5 +1,4 @@ -from shrub.v3.evg_command import archive_targz_pack -from shrub.v3.evg_command import s3_put +from shrub.v3.evg_command import archive_targz_pack, s3_put from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -10,7 +9,7 @@ class UploadMOArtifacts(Function): commands = [ bash_exec( working_dir='mongoc', - script='''\ + script="""\ set -o errexit declare dir="MO" if [[ -d "/cygdrive/c/data/mo" ]]; then @@ -19,7 +18,7 @@ class UploadMOArtifacts(Function): if [[ -d "$dir" ]]; then find "$dir" -name \\*.log | xargs tar czf mongodb-logs.tar.gz fi - ''' + """, ), s3_put( aws_key='${aws_key}', @@ -45,7 +44,7 @@ class UploadMOArtifacts(Function): ), bash_exec( working_dir='mongoc', - script='''\ + script="""\ set -o errexit # Find all core files from mongodb in orchestration and move to mongoc declare dir="MO" @@ -64,7 +63,7 @@ class UploadMOArtifacts(Function): mv "$core_file" . fi done - ''' + """, ), archive_targz_pack( target='mongo-coredumps.tgz', diff --git a/.evergreen/config_generator/components/funcs/upload_test_results.py b/.evergreen/config_generator/components/funcs/upload_test_results.py index dc756a2d92f..1084df056e9 100644 --- a/.evergreen/config_generator/components/funcs/upload_test_results.py +++ b/.evergreen/config_generator/components/funcs/upload_test_results.py @@ -9,14 +9,12 @@ class UploadTestResults(Function): commands = [ # Ensure attach_results does not fail even if no tests results exist. bash_exec( - script='''\ + script="""\ mkdir -p mongoc touch mongoc/test-results.json - ''' - ), - attach_results( - file_location='mongoc/test-results.json' + """ ), + attach_results(file_location='mongoc/test-results.json'), ] diff --git a/.evergreen/config_generator/components/kms_divergence_check.py b/.evergreen/config_generator/components/kms_divergence_check.py index 6cc7736d5d4..224d5406150 100644 --- a/.evergreen/config_generator/components/kms_divergence_check.py +++ b/.evergreen/config_generator/components/kms_divergence_check.py @@ -1,5 +1,4 @@ -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import s3_put +from shrub.v3.evg_command import EvgCommandType, s3_put from shrub.v3.evg_task import EvgTask from config_generator.etc.function import Function @@ -7,12 +6,12 @@ class KmsDivergenceCheck(Function): - name = "kms-divergence-check" + name = 'kms-divergence-check' commands = [ bash_exec( command_type=EvgCommandType.TEST, - working_dir="mongoc", - script=".evergreen/scripts/kms-divergence-check.sh" + working_dir='mongoc', + script='.evergreen/scripts/kms-divergence-check.sh', ), ] diff --git a/.evergreen/config_generator/components/loadbalanced.py b/.evergreen/config_generator/components/loadbalanced.py index d15b8397749..c26afd5d230 100644 --- a/.evergreen/config_generator/components/loadbalanced.py +++ b/.evergreen/config_generator/components/loadbalanced.py @@ -1,6 +1,6 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_command import EvgCommandType, FunctionCall, expansions_update -from shrub.v3.evg_task import EvgTask, EvgTaskRef, EvgTaskDependency +from shrub.v3.evg_task import EvgTask, EvgTaskDependency, EvgTaskRef from config_generator.components.funcs.bootstrap_mongo_orchestration import BootstrapMongoOrchestration from config_generator.components.funcs.fetch_build import FetchBuild @@ -9,12 +9,12 @@ from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.components.funcs.run_tests import RunTests from config_generator.components.funcs.upload_build import UploadBuild -from config_generator.etc.distros import make_distro_str, find_small_distro, find_large_distro +from config_generator.etc.distros import find_large_distro, find_small_distro, make_distro_str from config_generator.etc.utils import bash_exec # Use `rhel8-latest` distro. `rhel8-latest` distro includes necessary dependency: `haproxy`. -_DISTRO_NAME = "rhel8-latest" -_COMPILER = "gcc" +_DISTRO_NAME = 'rhel8-latest' +_COMPILER = 'gcc' def functions(): @@ -22,50 +22,51 @@ def functions(): 'start-load-balancer': [ bash_exec( command_type=EvgCommandType.SETUP, - script='''\ + script="""\ export DRIVERS_TOOLS=./drivers-evergreen-tools export MONGODB_URI="${MONGODB_URI}" $DRIVERS_TOOLS/.evergreen/run-load-balancer.sh start - ''', + """, ), expansions_update( command_type=EvgCommandType.SETUP, file='lb-expansion.yml', - ) + ), ] } def make_test_task(auth: bool, ssl: bool, server_version: str): - auth_str = "auth" if auth else "noauth" - ssl_str = "openssl" if ssl else "nossl" + auth_str = 'auth' if auth else 'noauth' + ssl_str = 'openssl' if ssl else 'nossl' distro_str = make_distro_str(_DISTRO_NAME, _COMPILER, None) return EvgTask( - name=f"loadbalanced-{distro_str}-test-{server_version}-{auth_str}-{ssl_str}", - depends_on=[EvgTaskDependency( - name=f"loadbalanced-{distro_str}-compile")], - run_on=find_large_distro(_DISTRO_NAME).name, # DEVPROD-18763 + name=f'loadbalanced-{distro_str}-test-{server_version}-{auth_str}-{ssl_str}', + depends_on=[EvgTaskDependency(name=f'loadbalanced-{distro_str}-compile')], + run_on=find_large_distro(_DISTRO_NAME).name, # DEVPROD-18763 tags=['loadbalanced', _DISTRO_NAME, _COMPILER, auth_str, ssl_str], commands=[ - FetchBuild.call(build_name=f"loadbalanced-{distro_str}-compile"), + FetchBuild.call(build_name=f'loadbalanced-{distro_str}-compile'), FetchDET.call(), - BootstrapMongoOrchestration().call(vars={ - 'AUTH': auth_str, - 'SSL': ssl_str, - 'MONGODB_VERSION': server_version, - 'TOPOLOGY': 'sharded_cluster', - 'LOAD_BALANCER': 'on', - }), + BootstrapMongoOrchestration().call( + vars={ + 'AUTH': auth_str, + 'SSL': ssl_str, + 'MONGODB_VERSION': server_version, + 'TOPOLOGY': 'sharded_cluster', + 'LOAD_BALANCER': 'on', + } + ), RunSimpleHTTPServer.call(), - FunctionCall(func='start-load-balancer', vars={ - 'MONGODB_URI': 'mongodb://localhost:27017,localhost:27018' - }), - RunTests().call(vars={ - 'AUTH': auth_str, - 'SSL': ssl_str, - 'LOADBALANCED': 'loadbalanced', - 'CC': _COMPILER, - }) + FunctionCall(func='start-load-balancer', vars={'MONGODB_URI': 'mongodb://localhost:27017,localhost:27018'}), + RunTests().call( + vars={ + 'AUTH': auth_str, + 'SSL': ssl_str, + 'LOADBALANCED': 'loadbalanced', + 'CC': _COMPILER, + } + ), ], ) @@ -73,22 +74,18 @@ def make_test_task(auth: bool, ssl: bool, server_version: str): def tasks(): distro_str = make_distro_str(_DISTRO_NAME, _COMPILER, None) yield EvgTask( - name=f"loadbalanced-{distro_str}-compile", + name=f'loadbalanced-{distro_str}-compile', run_on=find_large_distro(_DISTRO_NAME).name, tags=['loadbalanced', _DISTRO_NAME, _COMPILER], commands=[ FindCMakeLatest.call(), bash_exec( command_type=EvgCommandType.TEST, - env={ - 'CC': _COMPILER, - 'CFLAGS': '-fno-omit-frame-pointer', - 'SSL': 'OPENSSL' - }, + env={'CC': _COMPILER, 'CFLAGS': '-fno-omit-frame-pointer', 'SSL': 'OPENSSL'}, working_dir='mongoc', script='.evergreen/scripts/compile.sh', ), - UploadBuild.call() + UploadBuild.call(), ], ) @@ -107,9 +104,5 @@ def tasks(): def variants(): return [ - BuildVariant( - name="loadbalanced", - display_name="loadbalanced", - tasks=[EvgTaskRef(name='.loadbalanced')] - ), + BuildVariant(name='loadbalanced', display_name='loadbalanced', tasks=[EvgTaskRef(name='.loadbalanced')]), ] diff --git a/.evergreen/config_generator/components/make_docs.py b/.evergreen/config_generator/components/make_docs.py index aa49b6532d5..a149261961b 100644 --- a/.evergreen/config_generator/components/make_docs.py +++ b/.evergreen/config_generator/components/make_docs.py @@ -1,21 +1,18 @@ -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import s3_put +from shrub.v3.evg_command import EvgCommandType, s3_put from shrub.v3.evg_task import EvgTask from config_generator.components.funcs.find_cmake_latest import FindCMakeLatest - -from config_generator.etc.function import Function -from config_generator.etc.function import merge_defns +from config_generator.etc.function import Function, merge_defns from config_generator.etc.utils import bash_exec class MakeDocs(Function): - name = "make-docs" + name = 'make-docs' commands = [ bash_exec( command_type=EvgCommandType.TEST, - working_dir="mongoc", - include_expansions_in_env=["distro_id"], + working_dir='mongoc', + include_expansions_in_env=['distro_id'], script="""\ # See SphinxBuild.cmake for EVG_DOCS_BUILD reasoning uv run --frozen --only-group docs env EVG_DOCS_BUILD=1 .evergreen/scripts/build-docs.sh @@ -25,52 +22,52 @@ class MakeDocs(Function): class UploadDocs(Function): - name = "upload-docs" + name = 'upload-docs' commands = [ bash_exec( - working_dir="mongoc/_build/for-docs/src/libbson", + working_dir='mongoc/_build/for-docs/src/libbson', env={ - "AWS_ACCESS_KEY_ID": "${aws_key}", - "AWS_SECRET_ACCESS_KEY": "${aws_secret}", + 'AWS_ACCESS_KEY_ID': '${aws_key}', + 'AWS_SECRET_ACCESS_KEY': '${aws_secret}', }, - script="aws s3 cp doc/html s3://mciuploads/${project}/docs/libbson/${CURRENT_VERSION} --quiet --recursive --acl public-read --region us-east-1", + script='aws s3 cp doc/html s3://mciuploads/${project}/docs/libbson/${CURRENT_VERSION} --quiet --recursive --acl public-read --region us-east-1', ), s3_put( - aws_key="${aws_key}", - aws_secret="${aws_secret}", - bucket="mciuploads", - content_type="text/html", - display_name="libbson docs", - local_file="mongoc/_build/for-docs/src/libbson/doc/html/index.html", - permissions="public-read", - remote_file="${project}/docs/libbson/${CURRENT_VERSION}/index.html", + aws_key='${aws_key}', + aws_secret='${aws_secret}', + bucket='mciuploads', + content_type='text/html', + display_name='libbson docs', + local_file='mongoc/_build/for-docs/src/libbson/doc/html/index.html', + permissions='public-read', + remote_file='${project}/docs/libbson/${CURRENT_VERSION}/index.html', ), bash_exec( - working_dir="mongoc/_build/for-docs/src/libmongoc", + working_dir='mongoc/_build/for-docs/src/libmongoc', env={ - "AWS_ACCESS_KEY_ID": "${aws_key}", - "AWS_SECRET_ACCESS_KEY": "${aws_secret}", + 'AWS_ACCESS_KEY_ID': '${aws_key}', + 'AWS_SECRET_ACCESS_KEY': '${aws_secret}', }, - script="aws s3 cp doc/html s3://mciuploads/${project}/docs/libmongoc/${CURRENT_VERSION} --quiet --recursive --acl public-read --region us-east-1", + script='aws s3 cp doc/html s3://mciuploads/${project}/docs/libmongoc/${CURRENT_VERSION} --quiet --recursive --acl public-read --region us-east-1', ), s3_put( - aws_key="${aws_key}", - aws_secret="${aws_secret}", - bucket="mciuploads", - content_type="text/html", - display_name="libmongoc docs", - local_file="mongoc/_build/for-docs/src/libmongoc/doc/html/index.html", - permissions="public-read", - remote_file="${project}/docs/libmongoc/${CURRENT_VERSION}/index.html", + aws_key='${aws_key}', + aws_secret='${aws_secret}', + bucket='mciuploads', + content_type='text/html', + display_name='libmongoc docs', + local_file='mongoc/_build/for-docs/src/libmongoc/doc/html/index.html', + permissions='public-read', + remote_file='${project}/docs/libmongoc/${CURRENT_VERSION}/index.html', ), ] class UploadManPages(Function): - name = "upload-man-pages" + name = 'upload-man-pages' commands = [ bash_exec( - working_dir="mongoc", + working_dir='mongoc', silent=True, script="""\ set -o errexit @@ -85,24 +82,24 @@ class UploadManPages(Function): """, ), s3_put( - aws_key="${aws_key}", - aws_secret="${aws_secret}", - bucket="mciuploads", - content_type="text/html", - display_name="libbson man pages", - local_file="mongoc/bson-man-pages.html", - permissions="public-read", - remote_file="${project}/man-pages/libbson/${CURRENT_VERSION}/index.html", + aws_key='${aws_key}', + aws_secret='${aws_secret}', + bucket='mciuploads', + content_type='text/html', + display_name='libbson man pages', + local_file='mongoc/bson-man-pages.html', + permissions='public-read', + remote_file='${project}/man-pages/libbson/${CURRENT_VERSION}/index.html', ), s3_put( - aws_key="${aws_key}", - aws_secret="${aws_secret}", - bucket="mciuploads", - content_type="text/html", - display_name="libmongoc man pages", - local_file="mongoc/mongoc-man-pages.html", - permissions="public-read", - remote_file="${project}/man-pages/libmongoc/${CURRENT_VERSION}/index.html", + aws_key='${aws_key}', + aws_secret='${aws_secret}', + bucket='mciuploads', + content_type='text/html', + display_name='libmongoc man pages', + local_file='mongoc/mongoc-man-pages.html', + permissions='public-read', + remote_file='${project}/man-pages/libmongoc/${CURRENT_VERSION}/index.html', ), ] @@ -118,7 +115,7 @@ def functions(): def tasks(): return [ EvgTask( - name="make-docs", + name='make-docs', commands=[ FindCMakeLatest.call(), MakeDocs.call(), diff --git a/.evergreen/config_generator/components/mock_server.py b/.evergreen/config_generator/components/mock_server.py index b103cda65d6..e21fab98ddb 100644 --- a/.evergreen/config_generator/components/mock_server.py +++ b/.evergreen/config_generator/components/mock_server.py @@ -7,11 +7,12 @@ from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.etc.utils import bash_exec + def tasks(): return [ EvgTask( - name="mock-server-test", - run_on="ubuntu2204-small", + name='mock-server-test', + run_on='ubuntu2204-small', commands=[ # Call fetch-det to define PYTHON3_BINARY expansion required for run-simple-http-server. FetchDET.call(), @@ -27,22 +28,23 @@ def tasks(): command_type=EvgCommandType.TEST, working_dir='mongoc', script='.evergreen/scripts/run-mock-server-tests.sh', - ) + ), ], ) ] + def variants(): return [ BuildVariant( - name="mock-server-test", - display_name="Mock Server Test", + name='mock-server-test', + display_name='Mock Server Test', tasks=[EvgTaskRef(name='mock-server-test')], expansions={ 'CC': 'gcc', 'ASAN': 'on', 'CFLAGS': '-fno-omit-frame-pointer', 'SANITIZE': 'address,undefined', - } + }, ), ] diff --git a/.evergreen/config_generator/components/openssl_compat.py b/.evergreen/config_generator/components/openssl_compat.py index 119e7d2c20b..7095e7197e6 100644 --- a/.evergreen/config_generator/components/openssl_compat.py +++ b/.evergreen/config_generator/components/openssl_compat.py @@ -1,15 +1,14 @@ -from config_generator.etc.distros import find_large_distro, make_distro_str -from config_generator.etc.function import Function -from config_generator.etc.utils import bash_exec - -from config_generator.components.funcs.fetch_source import FetchSource -from config_generator.components.funcs.find_cmake_latest import FindCMakeLatest +from itertools import product from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_command import EvgCommandType, FunctionCall from shrub.v3.evg_task import EvgTask, EvgTaskRef -from itertools import product +from config_generator.components.funcs.fetch_source import FetchSource +from config_generator.components.funcs.find_cmake_latest import FindCMakeLatest +from config_generator.etc.distros import find_large_distro, make_distro_str +from config_generator.etc.function import Function +from config_generator.etc.utils import bash_exec TAG = 'openssl-compat' @@ -73,7 +72,7 @@ def tasks(): FetchSource.call(), FindCMakeLatest.call(), OpenSSLSetup.call(vars=vars), - FunctionCall(func="run auth tests"), + FunctionCall(func='run auth tests'), ], ) @@ -94,7 +93,7 @@ def tasks(): FetchSource.call(), FindCMakeLatest.call(), OpenSSLSetup.call(vars=vars), - FunctionCall(func="run auth tests"), + FunctionCall(func='run auth tests'), ], ) diff --git a/.evergreen/config_generator/components/sanitizers/asan.py b/.evergreen/config_generator/components/sanitizers/asan.py index b55d161239f..db40ffd0367 100644 --- a/.evergreen/config_generator/components/sanitizers/asan.py +++ b/.evergreen/config_generator/components/sanitizers/asan.py @@ -1,7 +1,6 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_task import EvgTaskRef - TAG = 'sanitizers-matrix-asan' diff --git a/.evergreen/config_generator/components/sanitizers/asan_cse.py b/.evergreen/config_generator/components/sanitizers/asan_cse.py index c597c48ad37..82a55c61320 100644 --- a/.evergreen/config_generator/components/sanitizers/asan_cse.py +++ b/.evergreen/config_generator/components/sanitizers/asan_cse.py @@ -1,11 +1,7 @@ -from config_generator.etc.compile import generate_compile_tasks - -from config_generator.etc.sanitizers.test import generate_test_tasks - from config_generator.components.cse.openssl import SaslCyrusOpenSSLCompile - from config_generator.components.sanitizers.asan import TAG - +from config_generator.etc.compile import generate_compile_tasks +from config_generator.etc.sanitizers.test import generate_test_tasks # pylint: disable=line-too-long # fmt: off @@ -33,16 +29,12 @@ def tasks(): 'cyrus': SaslCyrusOpenSSLCompile, } - res += generate_compile_tasks( - SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS - ) + res += generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS) res += generate_test_tasks(SSL, TAG, TEST_MATRIX, MORE_TAGS) res += generate_test_tasks( - SSL, TAG, TEST_MATRIX, MORE_TAGS, - MORE_TEST_TAGS=['with-mongocrypt'], - MORE_VARS={'SKIP_CRYPT_SHARED_LIB': 'on'} + SSL, TAG, TEST_MATRIX, MORE_TAGS, MORE_TEST_TAGS=['with-mongocrypt'], MORE_VARS={'SKIP_CRYPT_SHARED_LIB': 'on'} ) return res diff --git a/.evergreen/config_generator/components/sanitizers/asan_sasl.py b/.evergreen/config_generator/components/sanitizers/asan_sasl.py index 0275869de9b..1e05aabeb7e 100644 --- a/.evergreen/config_generator/components/sanitizers/asan_sasl.py +++ b/.evergreen/config_generator/components/sanitizers/asan_sasl.py @@ -1,12 +1,8 @@ +from config_generator.components.sanitizers.asan import TAG +from config_generator.components.sasl.openssl import SaslCyrusOpenSSLCompile from config_generator.etc.compile import generate_compile_tasks - from config_generator.etc.sanitizers.test import generate_test_tasks -from config_generator.components.sasl.openssl import SaslCyrusOpenSSLCompile - -from config_generator.components.sanitizers.asan import TAG - - # pylint: disable=line-too-long # fmt: off COMPILE_MATRIX = [ @@ -30,9 +26,7 @@ def tasks(): 'cyrus': SaslCyrusOpenSSLCompile, } - res += generate_compile_tasks( - SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, ['asan'] - ) + res += generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, ['asan']) res += generate_test_tasks(SSL, TAG, TEST_MATRIX, ['asan']) diff --git a/.evergreen/config_generator/components/sanitizers/tsan.py b/.evergreen/config_generator/components/sanitizers/tsan.py index d506f43b887..0c54fa8f64a 100644 --- a/.evergreen/config_generator/components/sanitizers/tsan.py +++ b/.evergreen/config_generator/components/sanitizers/tsan.py @@ -1,7 +1,6 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_task import EvgTaskRef - TAG = 'sanitizers-matrix-tsan' diff --git a/.evergreen/config_generator/components/sanitizers/tsan_sasl.py b/.evergreen/config_generator/components/sanitizers/tsan_sasl.py index 305326d10b9..7364c4d9255 100644 --- a/.evergreen/config_generator/components/sanitizers/tsan_sasl.py +++ b/.evergreen/config_generator/components/sanitizers/tsan_sasl.py @@ -1,11 +1,7 @@ -from config_generator.etc.compile import generate_compile_tasks - -from config_generator.etc.sanitizers.test import generate_test_tasks - -from config_generator.components.sasl.openssl import SaslCyrusOpenSSLCompile - from config_generator.components.sanitizers.tsan import TAG - +from config_generator.components.sasl.openssl import SaslCyrusOpenSSLCompile +from config_generator.etc.compile import generate_compile_tasks +from config_generator.etc.sanitizers.test import generate_test_tasks # pylint: disable=line-too-long # fmt: off @@ -30,9 +26,7 @@ def tasks(): SSL = 'openssl' SASL_TO_FUNC = {'cyrus': SaslCyrusOpenSSLCompile} - res += generate_compile_tasks( - SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS - ) + res += generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS) res += generate_test_tasks(SSL, TAG, TEST_OPENSSL_MATRIX, MORE_TAGS) diff --git a/.evergreen/config_generator/components/sasl/darwinssl.py b/.evergreen/config_generator/components/sasl/darwinssl.py index 89d91b6412c..2fe81fd8e43 100644 --- a/.evergreen/config_generator/components/sasl/darwinssl.py +++ b/.evergreen/config_generator/components/sasl/darwinssl.py @@ -1,13 +1,11 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_task import EvgTaskRef -from config_generator.etc.function import merge_defns from config_generator.etc.compile import generate_compile_tasks - +from config_generator.etc.function import merge_defns from config_generator.etc.sasl.compile import CompileCommon from config_generator.etc.sasl.test import generate_test_tasks - SSL = 'darwinssl' TAG = f'sasl-matrix-{SSL}' diff --git a/.evergreen/config_generator/components/sasl/nossl.py b/.evergreen/config_generator/components/sasl/nossl.py index 2f8b7a98037..2f9410a00a5 100644 --- a/.evergreen/config_generator/components/sasl/nossl.py +++ b/.evergreen/config_generator/components/sasl/nossl.py @@ -1,13 +1,11 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_task import EvgTaskRef -from config_generator.etc.function import merge_defns from config_generator.etc.compile import generate_compile_tasks - +from config_generator.etc.function import merge_defns from config_generator.etc.sasl.compile import CompileCommon from config_generator.etc.sasl.test import generate_test_tasks - SSL = 'nossl' TAG = f'sasl-matrix-{SSL}' diff --git a/.evergreen/config_generator/components/sasl/openssl.py b/.evergreen/config_generator/components/sasl/openssl.py index 40541ddd56f..c880c98a295 100644 --- a/.evergreen/config_generator/components/sasl/openssl.py +++ b/.evergreen/config_generator/components/sasl/openssl.py @@ -1,12 +1,10 @@ from shrub.v3.evg_build_variant import BuildVariant -from config_generator.etc.utils import TaskRef -from config_generator.etc.function import merge_defns from config_generator.etc.compile import generate_compile_tasks - +from config_generator.etc.function import merge_defns from config_generator.etc.sasl.compile import CompileCommon from config_generator.etc.sasl.test import generate_test_tasks - +from config_generator.etc.utils import TaskRef SSL = 'openssl' TAG = f'sasl-matrix-{SSL}' @@ -79,7 +77,7 @@ def tasks(): # PowerPC and zSeries are limited resources. for task in res: - if any(pattern in task.run_on for pattern in ["power", "zseries"]): + if any(pattern in task.run_on for pattern in ['power', 'zseries']): task.patchable = False return res @@ -92,11 +90,11 @@ def variants(): # PowerPC and zSeries are limited resources. for task in TASKS: - if any(pattern in task.run_on for pattern in ["power", "zseries"]): + if any(pattern in task.run_on for pattern in ['power', 'zseries']): tasks.append( TaskRef( name=task.name, - batchtime=1440, # 1 day + batchtime=1440, # 1 day ) ) else: diff --git a/.evergreen/config_generator/components/sasl/winssl.py b/.evergreen/config_generator/components/sasl/winssl.py index 3c56cfd036b..fe3655e183d 100644 --- a/.evergreen/config_generator/components/sasl/winssl.py +++ b/.evergreen/config_generator/components/sasl/winssl.py @@ -1,13 +1,11 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_task import EvgTaskRef -from config_generator.etc.function import merge_defns from config_generator.etc.compile import generate_compile_tasks - +from config_generator.etc.function import merge_defns from config_generator.etc.sasl.compile import CompileCommon from config_generator.etc.sasl.test import generate_test_tasks - SSL = 'winssl' TAG = f'sasl-matrix-{SSL}' diff --git a/.evergreen/config_generator/components/sbom.py b/.evergreen/config_generator/components/sbom.py index dd2af1b6ed9..5a510b8e060 100644 --- a/.evergreen/config_generator/components/sbom.py +++ b/.evergreen/config_generator/components/sbom.py @@ -1,7 +1,4 @@ -from config_generator.etc.distros import find_small_distro -from config_generator.etc.function import Function, merge_defns -from config_generator.etc.utils import bash_exec - +from pydantic import ConfigDict from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_command import ( BuiltInCommand, @@ -13,8 +10,9 @@ ) from shrub.v3.evg_task import EvgTask, EvgTaskRef -from pydantic import ConfigDict - +from config_generator.etc.distros import find_small_distro +from config_generator.etc.function import Function, merge_defns +from config_generator.etc.utils import bash_exec TAG = 'sbom' @@ -40,12 +38,12 @@ class SBOM(Function): 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN', ], - script='''\ + script="""\ set -o errexit set -o pipefail kondukto_token="$(aws secretsmanager get-secret-value --secret-id "kondukto-token" --region "us-east-1" --query 'SecretString' --output text)" printf "KONDUKTO_TOKEN: %s\\n" "$kondukto_token" >|expansions.kondukto.yml - ''', + """, ), expansions_update( command_type=EvgCommandType.SETUP, @@ -57,14 +55,14 @@ class SBOM(Function): # Avoid inadvertently using a pre-existing and potentially conflicting Podman config. # Note: podman understands and uses DOCKER_CONFIG despite the name. expansions_update(updates=[KeyValueParam(key='DOCKER_CONFIG', value='${workdir}/.docker')]), - ec2_assume_role(role_arn="arn:aws:iam::901841024863:role/ecr-role-evergreen-ro"), + ec2_assume_role(role_arn='arn:aws:iam::901841024863:role/ecr-role-evergreen-ro'), bash_exec( command_type=EvgCommandType.SETUP, include_expansions_in_env=[ - "AWS_ACCESS_KEY_ID", - "AWS_SECRET_ACCESS_KEY", - "AWS_SESSION_TOKEN", - "DOCKER_CONFIG", + 'AWS_ACCESS_KEY_ID', + 'AWS_SECRET_ACCESS_KEY', + 'AWS_SESSION_TOKEN', + 'DOCKER_CONFIG', ], script='aws ecr get-login-password --region us-east-1 | podman login --username AWS --password-stdin 901841024863.dkr.ecr.us-east-1.amazonaws.com', ), @@ -75,7 +73,7 @@ class SBOM(Function): include_expansions_in_env=[ 'branch_name', 'DOCKER_CONFIG', - "KONDUKTO_TOKEN", + 'KONDUKTO_TOKEN', ], script='.evergreen/scripts/sbom.sh', ), diff --git a/.evergreen/config_generator/components/scan_build.py b/.evergreen/config_generator/components/scan_build.py index cace4f67b5f..90af7178f2f 100644 --- a/.evergreen/config_generator/components/scan_build.py +++ b/.evergreen/config_generator/components/scan_build.py @@ -1,17 +1,12 @@ from shrub.v3.evg_build_variant import BuildVariant -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import FunctionCall +from shrub.v3.evg_command import EvgCommandType, FunctionCall from shrub.v3.evg_task import EvgTask, EvgTaskRef from config_generator.components.funcs.find_cmake_latest import FindCMakeLatest - -from config_generator.etc.distros import find_large_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars +from config_generator.etc.distros import compiler_to_vars, find_large_distro, make_distro_str from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec - TAG = 'scan-build-matrix' diff --git a/.evergreen/config_generator/etc/compile.py b/.evergreen/config_generator/etc/compile.py index 6e72b5757a4..1bcc037af7f 100644 --- a/.evergreen/config_generator/etc/compile.py +++ b/.evergreen/config_generator/etc/compile.py @@ -1,11 +1,8 @@ from shrub.v3.evg_task import EvgTask -from config_generator.etc.distros import find_large_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars - from config_generator.components.funcs.find_cmake_latest import FindCMakeLatest from config_generator.components.funcs.upload_build import UploadBuild +from config_generator.etc.distros import compiler_to_vars, find_large_distro, make_distro_str def generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, MATRIX, MORE_TAGS=None, MORE_VARS=None): @@ -14,7 +11,7 @@ def generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, MATRIX, MORE_TAGS=None, MORE_ MORE_TAGS = MORE_TAGS if MORE_TAGS else [] MORE_VARS = MORE_VARS if MORE_VARS else {} - for distro_name, compiler, arch, sasls, in MATRIX: + for distro_name, compiler, arch, sasls in MATRIX: tags = [TAG, 'compile', distro_name, compiler] + MORE_TAGS distro = find_large_distro(distro_name) diff --git a/.evergreen/config_generator/etc/cse/compile.py b/.evergreen/config_generator/etc/cse/compile.py index 47ced275552..2827c3146fa 100644 --- a/.evergreen/config_generator/etc/cse/compile.py +++ b/.evergreen/config_generator/etc/cse/compile.py @@ -1,13 +1,9 @@ from typing import ClassVar -from shrub.v3.evg_command import EvgCommand -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import expansions_update -from shrub.v3.evg_command import KeyValueParam - -from config_generator.etc.utils import bash_exec +from shrub.v3.evg_command import EvgCommand, EvgCommandType, KeyValueParam, expansions_update from config_generator.etc.function import Function +from config_generator.etc.utils import bash_exec class CompileCommon(Function): diff --git a/.evergreen/config_generator/etc/cse/test.py b/.evergreen/config_generator/etc/cse/test.py index ae2fe60de1b..41ecf1032f2 100644 --- a/.evergreen/config_generator/etc/cse/test.py +++ b/.evergreen/config_generator/etc/cse/test.py @@ -1,18 +1,14 @@ from itertools import product -from shrub.v3.evg_command import expansions_update -from shrub.v3.evg_command import KeyValueParam +from shrub.v3.evg_command import KeyValueParam, expansions_update from shrub.v3.evg_task import EvgTask, EvgTaskDependency -from config_generator.etc.distros import find_large_distro, find_small_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars - from config_generator.components.funcs.bootstrap_mongo_orchestration import BootstrapMongoOrchestration from config_generator.components.funcs.fetch_build import FetchBuild from config_generator.components.funcs.fetch_det import FetchDET from config_generator.components.funcs.run_mock_kms_servers import RunMockKMSServers from config_generator.components.funcs.run_tests import RunTests +from config_generator.etc.distros import compiler_to_vars, find_large_distro, find_small_distro, make_distro_str def generate_test_tasks(SSL, TAG, MATRIX): @@ -27,7 +23,7 @@ def generate_test_tasks(SSL, TAG, MATRIX): for distro_name, compiler, arch, sasl, auths, topologies, server_vers in MATRIX: tags = [TAG, 'test', distro_name, compiler, f'sasl-{sasl}', 'cse'] if distro_name == 'rhel8-latest': - test_distro = find_large_distro(distro_name) # DEVPROD-18763 + test_distro = find_large_distro(distro_name) # DEVPROD-18763 else: test_distro = find_small_distro(distro_name) diff --git a/.evergreen/config_generator/etc/distros.py b/.evergreen/config_generator/etc/distros.py index d4cb1c0c1fb..bef2f731700 100644 --- a/.evergreen/config_generator/etc/distros.py +++ b/.evergreen/config_generator/etc/distros.py @@ -1,7 +1,7 @@ from typing import Literal -from pydantic import BaseModel, validator from packaging.version import Version +from pydantic import BaseModel, validator class Distro(BaseModel): @@ -21,14 +21,17 @@ class Distro(BaseModel): os: str | None = None os_type: Literal['linux', 'macos', 'windows'] | None = None os_ver: str | None = None - vs_ver: Literal[ - '2013', - '2015', - '2017', - '2019', - '2022', - 'vsCurrent', - ] | None = None + vs_ver: ( + Literal[ + '2013', + '2015', + '2017', + '2019', + '2022', + 'vsCurrent', + ] + | None + ) = None size: Literal['small', 'large'] | None = None arch: Literal['arm64', 'power', 'zseries'] | None = None @@ -61,7 +64,6 @@ def ls_distro(name, **kwargs): RHEL_DISTROS = [ *ls_distro(name='rhel7-latest', os='rhel', os_type='linux', os_ver='7'), *ls_distro(name='rhel8-latest', os='rhel', os_type='linux', os_ver='8'), - *ls_distro(name='rhel80', os='rhel', os_type='linux', os_ver='8.0'), *ls_distro(name='rhel84', os='rhel', os_type='linux', os_ver='8.4'), *ls_distro(name='rhel90', os='rhel', os_type='linux', os_ver='9.0'), @@ -69,7 +71,7 @@ def ls_distro(name, **kwargs): *ls_distro(name='rhel92', os='rhel', os_type='linux', os_ver='9.2'), *ls_distro(name='rhel93', os='rhel', os_type='linux', os_ver='9.3'), *ls_distro(name='rhel94', os='rhel', os_type='linux', os_ver='9.4'), - *ls_distro(name='rhel95', os='rhel', os_type='linux', os_ver='9.5'), # rhel9-latest + *ls_distro(name='rhel95', os='rhel', os_type='linux', os_ver='9.5'), # rhel9-latest ] RHEL_POWER_DISTROS = [ @@ -150,14 +152,13 @@ def make_distro_str(distro_name, compiler, arch) -> str: # ('windows-vsCurrent-2022', 'mingw', None) -> windows-2022-mingw # ('windows-vsCurrent', 'vs2017x64', None) -> windows-2019-vs2017-x64 # ('windows-vsCurrent', 'mingw', None) -> windows-2019-mingw - maybe_arch = compiler[len('vs20XY'):] + maybe_arch = compiler[len('vs20XY') :] if maybe_arch in ('x86', 'x64'): - compiler_str = compiler[:-len(maybe_arch)] + '-' + maybe_arch + compiler_str = compiler[: -len(maybe_arch)] + '-' + maybe_arch else: compiler_str = compiler if distro_name.startswith('windows-vsCurrent-'): - distro_str = 'windows-' + \ - distro_name[len('windows-vsCurrent-'):] + f'-{compiler_str}' + distro_str = 'windows-' + distro_name[len('windows-vsCurrent-') :] + f'-{compiler_str}' else: distro_str = 'windows-2019-' + compiler_str else: diff --git a/.evergreen/config_generator/etc/function.py b/.evergreen/config_generator/etc/function.py index 61398dced5d..aa9ec13da5e 100644 --- a/.evergreen/config_generator/etc/function.py +++ b/.evergreen/config_generator/etc/function.py @@ -1,9 +1,7 @@ -from typing import ClassVar -from typing import Mapping from collections import ChainMap +from typing import ClassVar, Mapping -from shrub.v3.evg_command import EvgCommand -from shrub.v3.evg_command import FunctionCall +from shrub.v3.evg_command import EvgCommand, FunctionCall class Function: diff --git a/.evergreen/config_generator/etc/sanitizers/test.py b/.evergreen/config_generator/etc/sanitizers/test.py index e8cf927ec4f..9042082114e 100644 --- a/.evergreen/config_generator/etc/sanitizers/test.py +++ b/.evergreen/config_generator/etc/sanitizers/test.py @@ -1,19 +1,15 @@ from itertools import product -from shrub.v3.evg_command import expansions_update -from shrub.v3.evg_command import KeyValueParam +from shrub.v3.evg_command import KeyValueParam, expansions_update from shrub.v3.evg_task import EvgTask, EvgTaskDependency -from config_generator.etc.distros import find_large_distro, find_small_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars - from config_generator.components.funcs.bootstrap_mongo_orchestration import BootstrapMongoOrchestration from config_generator.components.funcs.fetch_build import FetchBuild from config_generator.components.funcs.fetch_det import FetchDET -from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.components.funcs.run_mock_kms_servers import RunMockKMSServers +from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.components.funcs.run_tests import RunTests +from config_generator.etc.distros import compiler_to_vars, find_large_distro, find_small_distro, make_distro_str def generate_test_tasks(SSL, TAG, MATRIX, MORE_COMPILE_TAGS=None, MORE_TEST_TAGS=None, MORE_VARS=None): @@ -30,12 +26,10 @@ def generate_test_tasks(SSL, TAG, MATRIX, MORE_COMPILE_TAGS=None, MORE_TEST_TAGS MORE_VARS = MORE_VARS if MORE_VARS else {} for distro_name, compiler, arch, sasl, auths, topologies, server_vers in MATRIX: - tags = [ - TAG, 'test', distro_name, compiler, f'sasl-{sasl}' - ] + MORE_COMPILE_TAGS + tags = [TAG, 'test', distro_name, compiler, f'sasl-{sasl}'] + MORE_COMPILE_TAGS if distro_name == 'rhel8-latest': - test_distro = find_large_distro(distro_name) # DEVPROD-18763 + test_distro = find_large_distro(distro_name) # DEVPROD-18763 else: test_distro = find_small_distro(distro_name) @@ -77,9 +71,7 @@ def generate_test_tasks(SSL, TAG, MATRIX, MORE_COMPILE_TAGS=None, MORE_TEST_TAGS ] if 'cse' in MORE_COMPILE_TAGS: - updates.append( - KeyValueParam(key='CLIENT_SIDE_ENCRYPTION', value='on') - ) + updates.append(KeyValueParam(key='CLIENT_SIDE_ENCRYPTION', value='on')) for key, value in MORE_VARS.items(): updates.append(KeyValueParam(key=key, value=value)) diff --git a/.evergreen/config_generator/etc/sasl/compile.py b/.evergreen/config_generator/etc/sasl/compile.py index f3246728010..05846e88595 100644 --- a/.evergreen/config_generator/etc/sasl/compile.py +++ b/.evergreen/config_generator/etc/sasl/compile.py @@ -1,11 +1,9 @@ from typing import ClassVar -from shrub.v3.evg_command import EvgCommand -from shrub.v3.evg_command import EvgCommandType - -from config_generator.etc.utils import bash_exec +from shrub.v3.evg_command import EvgCommand, EvgCommandType from config_generator.etc.function import Function +from config_generator.etc.utils import bash_exec class CompileCommon(Function): diff --git a/.evergreen/config_generator/etc/sasl/test.py b/.evergreen/config_generator/etc/sasl/test.py index 661656f9899..f6a56cd725e 100644 --- a/.evergreen/config_generator/etc/sasl/test.py +++ b/.evergreen/config_generator/etc/sasl/test.py @@ -1,18 +1,14 @@ from itertools import product -from shrub.v3.evg_command import expansions_update -from shrub.v3.evg_command import KeyValueParam +from shrub.v3.evg_command import KeyValueParam, expansions_update from shrub.v3.evg_task import EvgTask, EvgTaskDependency -from config_generator.etc.distros import find_large_distro, find_small_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars - from config_generator.components.funcs.bootstrap_mongo_orchestration import BootstrapMongoOrchestration from config_generator.components.funcs.fetch_build import FetchBuild from config_generator.components.funcs.fetch_det import FetchDET from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.components.funcs.run_tests import RunTests +from config_generator.etc.distros import compiler_to_vars, find_large_distro, find_small_distro, make_distro_str def generate_test_tasks(SSL, TAG, MATRIX): @@ -27,7 +23,7 @@ def generate_test_tasks(SSL, TAG, MATRIX): for distro_name, compiler, arch, sasl, auths, topologies, server_vers in MATRIX: tags = [TAG, 'test', distro_name, compiler] if distro_name == 'rhel8-latest': - test_distro = find_large_distro(distro_name) # DEVPROD-18763 + test_distro = find_large_distro(distro_name) # DEVPROD-18763 else: test_distro = find_small_distro(distro_name) diff --git a/.evergreen/config_generator/etc/utils.py b/.evergreen/config_generator/etc/utils.py index 0dd18327883..3a972d996dd 100644 --- a/.evergreen/config_generator/etc/utils.py +++ b/.evergreen/config_generator/etc/utils.py @@ -3,14 +3,13 @@ from inspect import isclass from pathlib import Path from textwrap import dedent -from typing import (Any, Iterable, Literal, Mapping, Type, TypeVar, - Union, cast) +from typing import Any, Iterable, Literal, Mapping, Type, TypeVar, Union, cast import yaml from shrub.v3.evg_command import EvgCommandType, subprocess_exec from shrub.v3.evg_project import EvgProject -from shrub.v3.shrub_service import ConfigDumper from shrub.v3.evg_task import EvgTaskRef +from shrub.v3.shrub_service import ConfigDumper from typing_extensions import get_args, get_origin, get_type_hints T = TypeVar('T') @@ -39,8 +38,8 @@ def bash_exec( **kwargs, ): ret = subprocess_exec( - binary="bash", - args=["-c", dedent(script)], + binary='bash', + args=['-c', dedent(script)], include_expansions_in_env=list(include_expansions_in_env) if include_expansions_in_env else None, working_dir=working_dir, command_type=command_type, @@ -49,7 +48,7 @@ def bash_exec( ) if retry_on_failure is not None: - ret.params |= {"retry_on_failure": retry_on_failure} + ret.params |= {'retry_on_failure': retry_on_failure} return ret @@ -75,7 +74,7 @@ def all_components(): # Helper function to print component name for diagnostic purposes. def component_name(component): component_prefix = 'config_generator.components.' - res = component.__name__[len(component_prefix):] + res = component.__name__[len(component_prefix) :] return res @@ -118,13 +117,9 @@ def represent_mapping(self, tag, mapping, flow_style=False): 'args', ] - ordered = { - field: mapping.pop(field) for field in before if field in mapping - } + ordered = {field: mapping.pop(field) for field in before if field in mapping} - suffix = { - field: mapping.pop(field) for field in after if field in mapping - } + suffix = {field: mapping.pop(field) for field in after if field in mapping} ordered.update(sorted(mapping.items())) ordered.update(suffix) @@ -177,6 +172,4 @@ def all_possible(typ: Type[T]) -> Iterable[T]: # Reconstruct as a NamedTuple: yield typ(**items) # type: ignore else: - raise TypeError( - f'Do not know how to do "all_possible" of type {typ!r} ({origin=})' - ) + raise TypeError(f'Do not know how to do "all_possible" of type {typ!r} ({origin=})') diff --git a/.evergreen/config_generator/generate.py b/.evergreen/config_generator/generate.py index f32b79ef046..6b4d3cf7e52 100644 --- a/.evergreen/config_generator/generate.py +++ b/.evergreen/config_generator/generate.py @@ -4,16 +4,14 @@ import sys - from importlib import import_module - GENERATOR_NAMES = [ - "functions", - "tasks", - "task_groups", - "variants", - "legacy_config", + 'functions', + 'tasks', + 'task_groups', + 'variants', + 'legacy_config', ] @@ -23,11 +21,11 @@ def main(): assert sys.version_info.minor >= 10 for name in GENERATOR_NAMES: - m = import_module(f"config_generator.generators.{name}") - print(f"Running {name}.generate()...") + m = import_module(f'config_generator.generators.{name}') + print(f'Running {name}.generate()...') m.generate() - print(f"Running {name}.generate()... done.") + print(f'Running {name}.generate()... done.') -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py b/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py index b5bb4aa3e0f..c5f877586d1 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py @@ -18,22 +18,22 @@ from typing import Any, Iterable, Mapping, MutableMapping, MutableSequence, Sequence, Union Scalar = Union[str, bool, int, None, float] -"YAML simple schema scalar types" -ValueSequence = Sequence["Value"] -"Sequence of YAML simple values" -MutableValueArray = MutableSequence["Value"] -"A mutable sequence of JSON values" -ValueMapping = Mapping[Scalar, "Value"] -"A YAML mapping type (arbitrary scalars as keys)" -MutableValueMapping = MutableMapping[Scalar, "Value"] -"A mutable YAML mapping type" +'YAML simple schema scalar types' +ValueSequence = Sequence['Value'] +'Sequence of YAML simple values' +MutableValueArray = MutableSequence['Value'] +'A mutable sequence of JSON values' +ValueMapping = Mapping[Scalar, 'Value'] +'A YAML mapping type (arbitrary scalars as keys)' +MutableValueMapping = MutableMapping[Scalar, 'Value'] +'A mutable YAML mapping type' Value = Union[ValueSequence, ValueMapping, Scalar] -"Any YAML simple value" +'Any YAML simple value' MutableValue = Union[MutableValueMapping, MutableValueArray, Scalar] -"Any YAML simple value, which may be a mutable sequence or map" +'Any YAML simple value, which may be a mutable sequence or map' ValueOrderedDict = OD[Scalar, Value] -"An OrderedDict of YAML values" +'An OrderedDict of YAML values' import yaml @@ -43,10 +43,10 @@ class ConfigObject(object): @property def name(self) -> str: - return "UNSET" + return 'UNSET' def to_dict(self) -> Value: - return OD([("name", self.name)]) + return OD([('name', self.name)]) # We want legible YAML tasks: @@ -72,8 +72,8 @@ def __init__(self, *args: Value, **kwargs: Value): self.add_multi_representer(ConfigObject, type(self).represent_config_object) def represent_scalar(self, tag: str, value: Value, style: str | None = None) -> yaml.ScalarNode: - if isinstance(value, (str)) and "\n" in value: - style = "|" + if isinstance(value, (str)) and '\n' in value: + style = '|' return super().represent_scalar(tag, value, style) # type: ignore def represent_set(self, data: Iterable[Value]) -> yaml.MappingNode: @@ -92,7 +92,7 @@ def generate(config: Any, path: str): """Dump config to a file as YAML. config is a dict, preferably an OrderedDict. path is a file path. """ - f = open(path, "w+") + f = open(path, 'w+') f.write( """#################################### # Evergreen configuration diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/functions.py b/.evergreen/legacy_config_generator/evergreen_config_generator/functions.py index 75d0ab0c229..99a05f0f064 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/functions.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/functions.py @@ -18,33 +18,33 @@ from evergreen_config_generator import ConfigObject -from . import Value, MutableValueMapping, ValueMapping, ValueOrderedDict +from . import MutableValueMapping, Value, ValueMapping, ValueOrderedDict def func(func_name: str, **kwargs: Value) -> MutableValueMapping: - od: MutableValueMapping = OD([("func", func_name)]) + od: MutableValueMapping = OD([('func', func_name)]) if kwargs: - od["vars"] = OD(sorted(kwargs.items())) + od['vars'] = OD(sorted(kwargs.items())) return od def s3_put(remote_file: str, project_path: bool = True, **kwargs: Value) -> ValueMapping: if project_path: - remote_file = "${project}/" + remote_file + remote_file = '${project}/' + remote_file return ValueOrderedDict( [ - ("command", "s3.put"), + ('command', 's3.put'), ( - "params", + 'params', ValueOrderedDict( ( - ("aws_key", "${aws_key}"), - ("aws_secret", "${aws_secret}"), - ("remote_file", remote_file), - ("bucket", "mciuploads"), - ("permissions", "public-read"), + ('aws_key', '${aws_key}'), + ('aws_secret', '${aws_secret}'), + ('remote_file', remote_file), + ('bucket', 'mciuploads'), + ('permissions', 'public-read'), *kwargs.items(), ) ), @@ -54,7 +54,7 @@ def s3_put(remote_file: str, project_path: bool = True, **kwargs: Value) -> Valu def strip_lines(s: str) -> str: - return "\n".join(line for line in s.split("\n") if line.strip()) + return '\n'.join(line for line in s.split('\n') if line.strip()) def shell_exec( @@ -70,50 +70,50 @@ def shell_exec( redirect_standard_error_to_output: bool = False, include_expansions_in_env: Iterable[str] = (), ) -> ValueMapping: - dedented = "" + dedented = '' if errexit: - dedented += "set -o errexit\n" + dedented += 'set -o errexit\n' if xtrace: - dedented += "set -o xtrace\n" + dedented += 'set -o xtrace\n' dedented += dedent(strip_lines(script)) - command = ValueOrderedDict([("command", "shell.exec")]) + command = ValueOrderedDict([('command', 'shell.exec')]) if test: - command["type"] = "test" + command['type'] = 'test' - command["params"] = OD() + command['params'] = OD() if silent: - command["params"]["silent"] = True + command['params']['silent'] = True if working_dir is not None: - command["params"]["working_dir"] = working_dir + command['params']['working_dir'] = working_dir if continue_on_err: - command["params"]["continue_on_err"] = True + command['params']['continue_on_err'] = True if background: - command["params"]["background"] = True + command['params']['background'] = True if add_expansions_to_env: - command["params"]["add_expansions_to_env"] = True + command['params']['add_expansions_to_env'] = True if redirect_standard_error_to_output: - command["params"]["redirect_standard_error_to_output"] = True + command['params']['redirect_standard_error_to_output'] = True if include_expansions_in_env: - command["params"]["include_expansions_in_env"] = list(include_expansions_in_env) + command['params']['include_expansions_in_env'] = list(include_expansions_in_env) - command["params"]["shell"] = "bash" - command["params"]["script"] = dedented + command['params']['shell'] = 'bash' + command['params']['script'] = dedented return command def targz_pack(target: str, source_dir: str, *include: str) -> ValueMapping: return OD( [ - ("command", "archive.targz_pack"), - ("params", OD([("target", target), ("source_dir", source_dir), ("include", list(include))])), + ('command', 'archive.targz_pack'), + ('params', OD([('target', target), ('source_dir', source_dir), ('include', list(include))])), ] ) diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/taskgroups.py b/.evergreen/legacy_config_generator/evergreen_config_generator/taskgroups.py index 47263e3b83b..cd96e745edd 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/taskgroups.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/taskgroups.py @@ -13,6 +13,7 @@ # limitations under the License. from typing import MutableMapping + from evergreen_config_generator import ConfigObject from . import Value, ValueSequence @@ -42,16 +43,16 @@ def to_dict(self) -> Value: # See possible TaskGroup attributes from the Evergreen wiki: # https://github.com/evergreen-ci/evergreen/wiki/Project-Configuration-Files#task-groups attrs = [ - "setup_group", - "teardown_group", - "setup_task", - "teardown_task", - "max_hosts", - "timeout", - "setup_group_can_fail_task", - "setup_group_timeout_secs", - "share_processes", - "tasks", + 'setup_group', + 'teardown_group', + 'setup_task', + 'teardown_task', + 'max_hosts', + 'timeout', + 'setup_group_can_fail_task', + 'setup_group_timeout_secs', + 'share_processes', + 'tasks', ] for i in attrs: diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/tasks.py b/.evergreen/legacy_config_generator/evergreen_config_generator/tasks.py index 3ea8072591d..01824659772 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/tasks.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/tasks.py @@ -12,17 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import OrderedDict as OD import copy -from itertools import chain, product import itertools +from collections import OrderedDict as OD +from itertools import chain, product from typing import ClassVar, Iterable, Literal, Mapping, MutableMapping, Sequence, Union from evergreen_config_generator import ConfigObject from evergreen_config_generator.functions import func -from . import Value, MutableValueMapping, ValueSequence - +from . import MutableValueMapping, Value, ValueSequence DependencySpec = Union[str, Mapping[str, Value]] @@ -44,7 +43,7 @@ def __init__( self._depends_on = list(map(self._normal_dep, depends_on)) if exec_timeout_secs is not None: - self.options["exec_timeout_secs"] = exec_timeout_secs + self.options['exec_timeout_secs'] = exec_timeout_secs @property def dependencies(self) -> Sequence[Mapping[str, Value]]: @@ -54,7 +53,7 @@ def dependencies(self) -> Sequence[Mapping[str, Value]]: def _normal_dep(self, spec: DependencySpec) -> Mapping[str, Value]: if isinstance(spec, str): - return OD([("name", spec)]) + return OD([('name', spec)]) return spec @property @@ -83,7 +82,7 @@ def additional_tags(self) -> Iterable[str]: def add_dependency(self, dependency: DependencySpec): if isinstance(dependency, str): - dependency = OD([("name", dependency)]) + dependency = OD([('name', dependency)]) self._depends_on.append(dependency) @@ -91,15 +90,15 @@ def to_dict(self): task: MutableValueMapping = super().to_dict() # type: ignore assert isinstance(task, MutableMapping) if self.tags: - task["tags"] = list(self.tags) + task['tags'] = list(self.tags) task.update(self.options) deps: Sequence[MutableValueMapping] = list(self.dependencies) # type: ignore if deps: if len(deps) == 1: - task["depends_on"] = OD(deps[0]) + task['depends_on'] = OD(deps[0]) else: - task["depends_on"] = copy.deepcopy(deps) - task["commands"] = list( + task['depends_on'] = copy.deepcopy(deps) + task['commands'] = list( itertools.chain( self.pre_commands(), self.main_commands(), @@ -149,7 +148,7 @@ def both_or_neither(rule0: bool, rule1: bool) -> None: class SettingsAccess: - def __init__(self, inst: "MatrixTask") -> None: + def __init__(self, inst: 'MatrixTask') -> None: self._task = inst def __getattr__(self, __setting: str) -> str | bool: @@ -170,18 +169,18 @@ def display(self, axis_name: str) -> str: value = self.setting_value(axis_name) if value is False: # E.g., if self.auth is False, return 'noauth'. - return f"no{axis_name}" + return f'no{axis_name}' elif value is True: return axis_name else: return value - def on_off(self, key: str, val: str) -> Literal["on", "off"]: - return "on" if self.setting_value(key) == val else "off" + def on_off(self, key: str, val: str) -> Literal['on', 'off']: + return 'on' if self.setting_value(key) == val else 'off' @property def name(self) -> str: - return "-".join(self.name_parts()) + return '-'.join(self.name_parts()) def name_parts(self) -> Iterable[str]: raise NotImplementedError @@ -191,24 +190,24 @@ def settings(self) -> SettingsAccess: return SettingsAccess(self) def setting_value(self, axis: str) -> str | bool: - assert ( - axis in type(self).axes.keys() - ), f'Attempted to inspect setting "{axis}", which is not defined for this task type' + assert axis in type(self).axes.keys(), ( + f'Attempted to inspect setting "{axis}", which is not defined for this task type' + ) return self._settings[axis] def setting_eq(self, axis: str, val: str | bool) -> bool: current = self.setting_value(axis) options = type(self).axes[axis] - assert ( - val in options - ), f'Looking for value "{val}" on setting "{axis}", but that is not a supported option (Expects one of {options})' + assert val in options, ( + f'Looking for value "{val}" on setting "{axis}", but that is not a supported option (Expects one of {options})' + ) return current == val def is_valid_combination(self) -> bool: try: return self.do_is_valid_combination() except Prohibited: - print(f"Ignoring invalid combination {self.name!r}") + print(f'Ignoring invalid combination {self.name!r}') return False def do_is_valid_combination(self) -> bool: diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/variants.py b/.evergreen/legacy_config_generator/evergreen_config_generator/variants.py index 3d328b95dcb..722919b0b6a 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/variants.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/variants.py @@ -13,6 +13,7 @@ # limitations under the License. from typing import Iterable, Mapping + from evergreen_config_generator import ConfigObject from . import ValueMapping @@ -48,7 +49,7 @@ def name(self): def to_dict(self): v = super(Variant, self).to_dict() - for i in "display_name", "expansions", "run_on", "tasks", "patchable", "batchtime", "tags", "display_tasks": + for i in 'display_name', 'expansions', 'run_on', 'tasks', 'patchable', 'batchtime', 'tags', 'display_tasks': attr = getattr(self, i) # Allow `False`, but ignore empty lists and dicts. diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/__init__.py b/.evergreen/legacy_config_generator/evergreen_config_lib/__init__.py index 1841636e15d..5634cb5f855 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/__init__.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/__init__.py @@ -13,6 +13,7 @@ # limitations under the License. from typing import Iterable + from evergreen_config_generator.functions import shell_exec @@ -30,7 +31,7 @@ def shell_mongoc( ): return shell_exec( script, - working_dir="mongoc", + working_dir='mongoc', test=test, errexit=errexit, xtrace=xtrace, diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py b/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py index 2131e616f4b..2929cc261df 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py @@ -14,211 +14,297 @@ from collections import OrderedDict as OD -from evergreen_config_generator.functions import ( - Function, s3_put, shell_exec) +from evergreen_config_generator.functions import Function, s3_put, shell_exec + from evergreen_config_lib import shell_mongoc build_path = '${build_variant}/${revision}/${version_id}/${build_id}' -all_functions = OD([ - ('install ssl', Function( - shell_mongoc(r''' - .evergreen/scripts/install-ssl.sh - ''', test=False, add_expansions_to_env=True), - )), - ('upload coverage', Function( - shell_mongoc(r''' - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - aws s3 cp coverage s3://mciuploads/${project}/%s/coverage/ --recursive --acl public-read --region us-east-1 - ''' % (build_path,), test=False, silent=True), - s3_put(build_path + '/coverage/index.html', aws_key='${aws_key}', - aws_secret='${aws_secret}', - local_file='mongoc/coverage/index.html', bucket='mciuploads', - permissions='public-read', content_type='text/html', - display_name='Coverage Report'), - )), - ('upload scan artifacts', Function( - shell_mongoc(r''' - if find scan -name \*.html | grep -q html; then - (cd scan && find . -name index.html -exec echo "
  • {}
  • " \;) >> scan.html - else - echo "No issues found" > scan.html - fi - '''), - shell_mongoc(r''' - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - aws s3 cp scan s3://mciuploads/${project}/%s/scan/ --recursive --acl public-read --region us-east-1 - ''' % (build_path,), test=False, silent=True), - s3_put(build_path + '/scan/index.html', aws_key='${aws_key}', - aws_secret='${aws_secret}', local_file='mongoc/scan.html', - bucket='mciuploads', permissions='public-read', - content_type='text/html', display_name='Scan Build Report'), - )), - # Use "silent=True" to hide output since errors may contain credentials. - ('run auth tests', Function( - shell_mongoc(r''' - .evergreen/scripts/run-auth-tests.sh - ''', add_expansions_to_env=True), - )), - ('link sample program', Function( - shell_mongoc(r''' - # Compile a program that links dynamically or statically to libmongoc, - # using variables from pkg-config or CMake's find_package command. - export BUILD_SAMPLE_WITH_CMAKE=${BUILD_SAMPLE_WITH_CMAKE} - export ENABLE_SSL=${ENABLE_SSL} - export ENABLE_SNAPPY=${ENABLE_SNAPPY} - LINK_STATIC= .evergreen/scripts/link-sample-program.sh - LINK_STATIC=1 .evergreen/scripts/link-sample-program.sh - ''', - include_expansions_in_env=['distro_id']), - )), - ('link sample program bson', Function( - shell_mongoc(r''' - # Compile a program that links dynamically or statically to libbson, - # using variables from pkg-config or from CMake's find_package command. - BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh - BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh - BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh - BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh - ''', - include_expansions_in_env=['distro_id']), - )), - ('link sample program MSVC', Function( - shell_mongoc(r''' - # Build libmongoc with CMake and compile a program that links - # dynamically or statically to it, using variables from CMake's - # find_package command. - export ENABLE_SSL=${ENABLE_SSL} - export ENABLE_SNAPPY=${ENABLE_SNAPPY} - . .evergreen/scripts/use-tools.sh paths - . .evergreen/scripts/find-cmake-latest.sh - export CMAKE="$(native-path "$(find_cmake_latest)")" - LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd - LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd - ''', - include_expansions_in_env=['distro_id']), - )), - ('link sample program mingw', Function( - shell_mongoc(r''' - # Build libmongoc with CMake and compile a program that links - # dynamically to it, using variables from pkg-config.exe. - . .evergreen/scripts/use-tools.sh paths - . .evergreen/scripts/find-cmake-latest.sh - export CMAKE="$(native-path "$(find_cmake_latest)")" - cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw.cmd - ''', - include_expansions_in_env=['distro_id']), - )), - ('link sample program MSVC bson', Function( - shell_mongoc(r''' - # Build libmongoc with CMake and compile a program that links - # dynamically or statically to it, using variables from CMake's - # find_package command. - export ENABLE_SSL=${ENABLE_SSL} - export ENABLE_SNAPPY=${ENABLE_SNAPPY} - . .evergreen/scripts/use-tools.sh paths - . .evergreen/scripts/find-cmake-latest.sh - export CMAKE="$(native-path "$(find_cmake_latest)")" - LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd - LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd - ''', - include_expansions_in_env=['distro_id']), - )), - ('link sample program mingw bson', Function( - shell_mongoc(r''' - # Build libmongoc with CMake and compile a program that links - # dynamically to it, using variables from pkg-config.exe. - . .evergreen/scripts/use-tools.sh paths - . .evergreen/scripts/find-cmake-latest.sh - export CMAKE="$(native-path "$(find_cmake_latest)")" - cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw-bson.cmd - '''), - )), - ('update codecov.io', Function( - shell_mongoc(r''' - # Note: coverage is currently only enabled on the ubuntu1804 distro. - # This script does not support MacOS, Windows, or non-x86_64 distros. - # Update accordingly if code coverage is expanded to other distros. - curl -Os https://uploader.codecov.io/latest/linux/codecov - chmod +x codecov - # -Z: Exit with a non-zero value if error. - # -g: Run with gcov support. - # -t: Codecov upload token. - # perl: filter verbose "Found" list and "Processing" messages. - ./codecov -Zgt "${codecov_token}" | perl -lne 'print if not m|^.*\.gcov(\.\.\.)?$|' - ''', test=False), - )), - ('compile coverage', Function( - shell_mongoc(r''' - COVERAGE=ON .evergreen/scripts/compile.sh - ''', add_expansions_to_env=True), - )), - ('build mongohouse', Function( - # Assume role to get AWS secrets. - { - "command": "ec2.assume_role", - "params": { - "role_arn": "${aws_test_secrets_role}" - } - }, - - shell_exec(r''' - cd drivers-evergreen-tools - export DRIVERS_TOOLS=$(pwd) - .evergreen/atlas_data_lake/pull-mongohouse-image.sh - ''', include_expansions_in_env=[ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ]), - )), - ('run mongohouse', Function( - shell_exec(r''' - cd drivers-evergreen-tools - export DRIVERS_TOOLS=$(pwd) - .evergreen/atlas_data_lake/run-mongohouse-image.sh - '''), - )), - ('test mongohouse', Function( - shell_mongoc(r''' - echo "Waiting for mongohouse to start..." - wait_for_mongohouse() { - for _ in $(seq 300); do - # Exit code 7: "Failed to connect to host". - if curl -s localhost:$1; (("$?" != 7)); then - return 0 +all_functions = OD( + [ + ( + 'install ssl', + Function( + shell_mongoc( + '.evergreen/scripts/install-ssl.sh', + test=False, + add_expansions_to_env=True, + ), + ), + ), + ( + 'upload coverage', + Function( + shell_mongoc( + r""" + export AWS_ACCESS_KEY_ID=${aws_key} + export AWS_SECRET_ACCESS_KEY=${aws_secret} + aws s3 cp coverage s3://mciuploads/${project}/%s/coverage/ --recursive --acl public-read --region us-east-1 + """ + % (build_path,), + test=False, + silent=True, + ), + s3_put( + build_path + '/coverage/index.html', + aws_key='${aws_key}', + aws_secret='${aws_secret}', + local_file='mongoc/coverage/index.html', + bucket='mciuploads', + permissions='public-read', + content_type='text/html', + display_name='Coverage Report', + ), + ), + ), + ( + 'upload scan artifacts', + Function( + shell_mongoc(r""" + if find scan -name \*.html | grep -q html; then + (cd scan && find . -name index.html -exec echo "
  • {}
  • " \;) >> scan.html else - sleep 1 + echo "No issues found" > scan.html fi - done - echo "Could not detect mongohouse on port $1" 1>&2 - return 1 - } - wait_for_mongohouse 27017 || exit - echo "Waiting for mongohouse to start... done." - pgrep -a "mongohouse" - export RUN_MONGOHOUSE_TESTS=ON - ./cmake-build/src/libmongoc/test-libmongoc --no-fork -l /mongohouse/* -d --skip-tests .evergreen/etc/skip-tests.txt - '''), - )), - ('run aws tests', Function( - # Assume role to get AWS secrets. - { - "command": "ec2.assume_role", - "params": { - "role_arn": "${aws_test_secrets_role}" - } - }, - - shell_mongoc(r''' - pushd ../drivers-evergreen-tools/.evergreen/auth_aws - ./setup_secrets.sh drivers/aws_auth - popd # ../drivers-evergreen-tools/.evergreen/auth_aws - ''', include_expansions_in_env=["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"]), - - shell_mongoc(r''' - pushd ../drivers-evergreen-tools/.evergreen/auth_aws - . ./activate-authawsvenv.sh - popd # ../drivers-evergreen-tools/.evergreen/auth_aws - .evergreen/scripts/run-aws-tests.sh - ''', include_expansions_in_env=["TESTCASE"]) - )), -]) + """), + shell_mongoc( + r""" + export AWS_ACCESS_KEY_ID=${aws_key} + export AWS_SECRET_ACCESS_KEY=${aws_secret} + aws s3 cp scan s3://mciuploads/${project}/%s/scan/ --recursive --acl public-read --region us-east-1 + """ + % (build_path,), + test=False, + silent=True, + ), + s3_put( + build_path + '/scan/index.html', + aws_key='${aws_key}', + aws_secret='${aws_secret}', + local_file='mongoc/scan.html', + bucket='mciuploads', + permissions='public-read', + content_type='text/html', + display_name='Scan Build Report', + ), + ), + ), + # Use "silent=True" to hide output since errors may contain credentials. + ( + 'run auth tests', + Function( + shell_mongoc( + r""" + .evergreen/scripts/run-auth-tests.sh + """, + add_expansions_to_env=True, + ), + ), + ), + ( + 'link sample program', + Function( + shell_mongoc( + r""" + # Compile a program that links dynamically or statically to libmongoc, + # using variables from pkg-config or CMake's find_package command. + export BUILD_SAMPLE_WITH_CMAKE=${BUILD_SAMPLE_WITH_CMAKE} + export ENABLE_SSL=${ENABLE_SSL} + export ENABLE_SNAPPY=${ENABLE_SNAPPY} + LINK_STATIC= .evergreen/scripts/link-sample-program.sh + LINK_STATIC=1 .evergreen/scripts/link-sample-program.sh + """, + include_expansions_in_env=['distro_id'], + ), + ), + ), + ( + 'link sample program bson', + Function( + shell_mongoc( + r""" + # Compile a program that links dynamically or statically to libbson, + # using variables from pkg-config or from CMake's find_package command. + BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh + BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh + BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh + BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh + """, + include_expansions_in_env=['distro_id'], + ), + ), + ), + ( + 'link sample program MSVC', + Function( + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically or statically to it, using variables from CMake's + # find_package command. + export ENABLE_SSL=${ENABLE_SSL} + export ENABLE_SNAPPY=${ENABLE_SNAPPY} + . .evergreen/scripts/use-tools.sh paths + . .evergreen/scripts/find-cmake-latest.sh + export CMAKE="$(native-path "$(find_cmake_latest)")" + LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd + LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd + """, + include_expansions_in_env=['distro_id'], + ), + ), + ), + ( + 'link sample program mingw', + Function( + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically to it, using variables from pkg-config.exe. + . .evergreen/scripts/use-tools.sh paths + . .evergreen/scripts/find-cmake-latest.sh + export CMAKE="$(native-path "$(find_cmake_latest)")" + cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw.cmd + """, + include_expansions_in_env=['distro_id'], + ), + ), + ), + ( + 'link sample program MSVC bson', + Function( + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically or statically to it, using variables from CMake's + # find_package command. + export ENABLE_SSL=${ENABLE_SSL} + export ENABLE_SNAPPY=${ENABLE_SNAPPY} + . .evergreen/scripts/use-tools.sh paths + . .evergreen/scripts/find-cmake-latest.sh + export CMAKE="$(native-path "$(find_cmake_latest)")" + LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd + LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd + """, + include_expansions_in_env=['distro_id'], + ), + ), + ), + ( + 'link sample program mingw bson', + Function( + shell_mongoc(r""" + # Build libmongoc with CMake and compile a program that links + # dynamically to it, using variables from pkg-config.exe. + . .evergreen/scripts/use-tools.sh paths + . .evergreen/scripts/find-cmake-latest.sh + export CMAKE="$(native-path "$(find_cmake_latest)")" + cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw-bson.cmd + """), + ), + ), + ( + 'update codecov.io', + Function( + shell_mongoc( + r""" + # Note: coverage is currently only enabled on the ubuntu1804 distro. + # This script does not support MacOS, Windows, or non-x86_64 distros. + # Update accordingly if code coverage is expanded to other distros. + curl -Os https://uploader.codecov.io/latest/linux/codecov + chmod +x codecov + # -Z: Exit with a non-zero value if error. + # -g: Run with gcov support. + # -t: Codecov upload token. + # perl: filter verbose "Found" list and "Processing" messages. + ./codecov -Zgt "${codecov_token}" | perl -lne 'print if not m|^.*\.gcov(\.\.\.)?$|' + """, + test=False, + ), + ), + ), + ( + 'compile coverage', + Function( + shell_mongoc( + 'COVERAGE=ON .evergreen/scripts/compile.sh', + add_expansions_to_env=True, + ), + ), + ), + ( + 'build mongohouse', + Function( + # Assume role to get AWS secrets. + {'command': 'ec2.assume_role', 'params': {'role_arn': '${aws_test_secrets_role}'}}, + shell_exec( + r""" + cd drivers-evergreen-tools + export DRIVERS_TOOLS=$(pwd) + .evergreen/atlas_data_lake/pull-mongohouse-image.sh + """, + include_expansions_in_env=['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN'], + ), + ), + ), + ( + 'run mongohouse', + Function( + shell_exec(r""" + cd drivers-evergreen-tools + export DRIVERS_TOOLS=$(pwd) + .evergreen/atlas_data_lake/run-mongohouse-image.sh + """), + ), + ), + ( + 'test mongohouse', + Function( + shell_mongoc(r""" + echo "Waiting for mongohouse to start..." + wait_for_mongohouse() { + for _ in $(seq 300); do + # Exit code 7: "Failed to connect to host". + if curl -s localhost:$1; (("$?" != 7)); then + return 0 + else + sleep 1 + fi + done + echo "Could not detect mongohouse on port $1" 1>&2 + return 1 + } + wait_for_mongohouse 27017 || exit + echo "Waiting for mongohouse to start... done." + pgrep -a "mongohouse" + export RUN_MONGOHOUSE_TESTS=ON + ./cmake-build/src/libmongoc/test-libmongoc --no-fork -l /mongohouse/* -d --skip-tests .evergreen/etc/skip-tests.txt + """), + ), + ), + ( + 'run aws tests', + Function( + # Assume role to get AWS secrets. + {'command': 'ec2.assume_role', 'params': {'role_arn': '${aws_test_secrets_role}'}}, + shell_mongoc( + r""" + pushd ../drivers-evergreen-tools/.evergreen/auth_aws + ./setup_secrets.sh drivers/aws_auth + popd # ../drivers-evergreen-tools/.evergreen/auth_aws + """, + include_expansions_in_env=['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN'], + ), + shell_mongoc( + r""" + pushd ../drivers-evergreen-tools/.evergreen/auth_aws + . ./activate-authawsvenv.sh + popd # ../drivers-evergreen-tools/.evergreen/auth_aws + .evergreen/scripts/run-aws-tests.sh + """, + include_expansions_in_env=['TESTCASE'], + ), + ), + ), + ] +) diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/taskgroups.py b/.evergreen/legacy_config_generator/evergreen_config_lib/taskgroups.py index 50cc79a4472..f0b8473b2a2 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/taskgroups.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/taskgroups.py @@ -13,6 +13,7 @@ # limitations under the License. from typing import Sequence + from evergreen_config_generator.taskgroups import TaskGroup all_task_groups: Sequence[TaskGroup] = [] diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py b/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py index 2ed9afaded6..cb9ad5bfc0b 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py @@ -16,24 +16,24 @@ from itertools import chain from typing import ClassVar, Iterable, Literal, Mapping, MutableMapping, MutableSequence, Optional, Sequence -from evergreen_config_generator import Value, Scalar +from evergreen_config_generator import Scalar, Value from evergreen_config_generator.functions import func, s3_put from evergreen_config_generator.tasks import ( - both_or_neither, + DependencySpec, MatrixTask, NamedTask, + Task, + both_or_neither, prohibit, require, - Task, - DependencySpec, ) -from evergreen_config_lib import shell_mongoc from packaging.version import Version +from evergreen_config_lib import shell_mongoc -ToggleStr = Literal["OFF", "ON"] +ToggleStr = Literal['OFF', 'ON'] OptToggleStr = Optional[ToggleStr] -TopologyStr = Literal["server"] +TopologyStr = Literal['server'] class CompileTask(NamedTask): @@ -45,21 +45,21 @@ def __init__( self, task_name: str, tags: Iterable[str] = (), - config: str = "debug", - compression: str | None = "default", + config: str = 'debug', + compression: str | None = 'default', suffix_commands: Iterable[Value] = (), depends_on: Iterable[DependencySpec] = (), prefix_commands: Iterable[Value] = (), - sanitize: Iterable[Literal["undefined", "address", "thread"]] = (), + sanitize: Iterable[Literal['undefined', 'address', 'thread']] = (), *, CFLAGS: str | None = None, LDFLAGS: str | None = None, EXTRA_CONFIGURE_FLAGS: str | None = None, - SSL: Literal["WINDOWS", "DARWIN", "OPENSSL", "OPENSSL_STATIC", "OFF", None] = None, + SSL: Literal['WINDOWS', 'DARWIN', 'OPENSSL', 'OPENSSL_STATIC', 'OFF', None] = None, ENABLE_SHM_COUNTERS: OptToggleStr = None, CHECK_LOG: OptToggleStr = None, TRACING: OptToggleStr = None, - SASL: Literal[None, "OFF", "AUTO", "CYRUS", "SSPI"] = None, + SASL: Literal[None, 'OFF', 'AUTO', 'CYRUS', 'SSPI'] = None, ENABLE_RDTSCP: OptToggleStr = None, SRV: OptToggleStr = None, ): @@ -71,38 +71,38 @@ def __init__( # Environment variables for .evergreen/scripts/compile.sh. self.compile_sh_opt: dict[str, str] = {} - if config != "debug": - assert config == "release" - self.compile_sh_opt["RELEASE"] = "ON" + if config != 'debug': + assert config == 'release' + self.compile_sh_opt['RELEASE'] = 'ON' if CFLAGS: - self.compile_sh_opt["CFLAGS"] = CFLAGS + self.compile_sh_opt['CFLAGS'] = CFLAGS if LDFLAGS: - self.compile_sh_opt["LDFLAGS"] = LDFLAGS + self.compile_sh_opt['LDFLAGS'] = LDFLAGS if EXTRA_CONFIGURE_FLAGS: - self.compile_sh_opt["EXTRA_CONFIGURE_FLAGS"] = EXTRA_CONFIGURE_FLAGS + self.compile_sh_opt['EXTRA_CONFIGURE_FLAGS'] = EXTRA_CONFIGURE_FLAGS if SSL: - self.compile_sh_opt["SSL"] = SSL + self.compile_sh_opt['SSL'] = SSL if ENABLE_SHM_COUNTERS: - self.compile_sh_opt["ENABLE_SHM_COUNTERS"] = ENABLE_SHM_COUNTERS + self.compile_sh_opt['ENABLE_SHM_COUNTERS'] = ENABLE_SHM_COUNTERS if CHECK_LOG: - self.compile_sh_opt["CHECK_LOG"] = CHECK_LOG + self.compile_sh_opt['CHECK_LOG'] = CHECK_LOG if TRACING: - self.compile_sh_opt["TRACING"] = TRACING + self.compile_sh_opt['TRACING'] = TRACING if SASL: - self.compile_sh_opt["SASL"] = SASL + self.compile_sh_opt['SASL'] = SASL if ENABLE_RDTSCP: - self.compile_sh_opt["ENABLE_RDTSCP"] = ENABLE_RDTSCP + self.compile_sh_opt['ENABLE_RDTSCP'] = ENABLE_RDTSCP if SRV: - self.compile_sh_opt["SRV"] = SRV + self.compile_sh_opt['SRV'] = SRV - if compression != "default": - self.compile_sh_opt["SNAPPY"] = "ON" if compression in ("all", "snappy") else "OFF" - self.compile_sh_opt["ZLIB"] = "BUNDLED" if compression in ("all", "zlib") else "OFF" - self.compile_sh_opt["ZSTD"] = "ON" if compression in ("all", "zstd") else "OFF" + if compression != 'default': + self.compile_sh_opt['SNAPPY'] = 'ON' if compression in ('all', 'snappy') else 'OFF' + self.compile_sh_opt['ZLIB'] = 'BUNDLED' if compression in ('all', 'zlib') else 'OFF' + self.compile_sh_opt['ZSTD'] = 'ON' if compression in ('all', 'zstd') else 'OFF' if sanitize: - self.compile_sh_opt["SANITIZE"] = ",".join(sanitize) + self.compile_sh_opt['SANITIZE'] = ','.join(sanitize) self.compile_sh_opt.update(type(self).cls_compile_sh_env) @@ -111,20 +111,20 @@ def additional_script_env(self) -> Mapping[str, str]: def to_dict(self): task = super(CompileTask, self).to_dict() - commands = task["commands"] + commands = task['commands'] assert isinstance(commands, MutableSequence), task commands.extend(self.prefix_commands) - script = "env" + script = 'env' for opt, value in sorted(self.compile_sh_opt.items()): script += ' %s="%s"' % (opt, value) - script += " .evergreen/scripts/compile.sh" + script += ' .evergreen/scripts/compile.sh' commands.append(func('find-cmake-latest')) commands.append(shell_mongoc(script, add_expansions_to_env=True)) - commands.append(func("upload-build")) + commands.append(func('upload-build')) commands.extend(self.suffix_commands) return task @@ -134,38 +134,38 @@ def additional_tags(self) -> Iterable[str]: class SpecialTask(CompileTask): - cls_tags: ClassVar[Sequence[str]] = ["special"] + cls_tags: ClassVar[Sequence[str]] = ['special'] class CompileWithClientSideEncryption(CompileTask): cls_compile_sh_env: ClassVar[Mapping[str, str]] = dict( # Compiling with ClientSideEncryption support requires linking against the library libmongocrypt. - COMPILE_LIBMONGOCRYPT="ON", - EXTRA_CONFIGURE_FLAGS="-DENABLE_PIC=ON", + COMPILE_LIBMONGOCRYPT='ON', + EXTRA_CONFIGURE_FLAGS='-DENABLE_PIC=ON', ) - cls_tags: ClassVar[Sequence[str]] = "client-side-encryption", "special" + cls_tags: ClassVar[Sequence[str]] = 'client-side-encryption', 'special' class CompileWithClientSideEncryptionAsan(CompileTask): cls_compile_sh_env: ClassVar[Mapping[str, str]] = dict( - CFLAGS="-fno-omit-frame-pointer", - COMPILE_LIBMONGOCRYPT="ON", - CHECK_LOG="ON", - PATH="/usr/lib/llvm-3.8/bin:$PATH", + CFLAGS='-fno-omit-frame-pointer', + COMPILE_LIBMONGOCRYPT='ON', + CHECK_LOG='ON', + PATH='/usr/lib/llvm-3.8/bin:$PATH', ) - cls_tags: ClassVar[Sequence[str]] = ["client-side-encryption"] - cls_sanitize: ClassVar[Sequence[str]] = ["address"] + cls_tags: ClassVar[Sequence[str]] = ['client-side-encryption'] + cls_sanitize: ClassVar[Sequence[str]] = ['address'] class LinkTask(NamedTask): def __init__( - self, task_name: str, suffix_commands: Iterable[Value], orchestration: Literal[True, False, "ssl"] = True + self, task_name: str, suffix_commands: Iterable[Value], orchestration: Literal[True, False, 'ssl'] = True ): - if orchestration == "ssl": + if orchestration == 'ssl': # Actual value of SSL does not matter here so long as it is not 'nossl'. - bootstrap_commands = [func("fetch-det"), func("bootstrap-mongo-orchestration", SSL="openssl")] + bootstrap_commands = [func('fetch-det'), func('bootstrap-mongo-orchestration', SSL='openssl')] elif orchestration: - bootstrap_commands = [func("fetch-det"), func("bootstrap-mongo-orchestration")] + bootstrap_commands = [func('fetch-det'), func('bootstrap-mongo-orchestration')] else: bootstrap_commands = [] @@ -177,103 +177,113 @@ def __init__( all_tasks = [ CompileTask( - "hardened-compile", - tags=["hardened"], + 'hardened-compile', + tags=['hardened'], compression=None, - CFLAGS="-fno-strict-overflow -D_FORTIFY_SOURCE=2 -fstack-protector-all -fPIE -O", - LDFLAGS="-pie -Wl,-z,relro -Wl,-z,now", + CFLAGS='-fno-strict-overflow -D_FORTIFY_SOURCE=2 -fstack-protector-all -fPIE -O', + LDFLAGS='-pie -Wl,-z,relro -Wl,-z,now', ), - CompileTask("debug-compile-compression-zlib", tags=["zlib", "compression"], compression="zlib"), - CompileTask("debug-compile-compression-snappy", tags=["snappy", "compression"], compression="snappy"), - CompileTask("debug-compile-compression-zstd", tags=["zstd", "compression"], compression="zstd"), - CompileTask("debug-compile-nosasl-nossl", tags=["debug-compile", "nosasl", "nossl"], SSL="OFF"), - CompileTask("debug-compile-lto", CFLAGS="-flto"), - CompileTask("debug-compile-lto-thin", CFLAGS="-flto=thin"), - CompileTask("debug-compile-no-counters", tags=["debug-compile", "no-counters"], ENABLE_SHM_COUNTERS="OFF"), - CompileTask("compile-tracing", TRACING="ON", CFLAGS="-Werror -Wno-cast-align"), - CompileTask("release-compile", config="release"), - CompileTask("debug-compile-nosasl-openssl", tags=["debug-compile", "nosasl", "openssl"], SSL="OPENSSL"), - CompileTask("debug-compile-nosasl-darwinssl", tags=["debug-compile", "nosasl", "darwinssl"], SSL="DARWIN"), - CompileTask("debug-compile-nosasl-winssl", tags=["debug-compile", "nosasl", "winssl"], SSL="WINDOWS"), - CompileTask("debug-compile-sasl-openssl", tags=["debug-compile", "sasl", "openssl"], SASL="AUTO", SSL="OPENSSL"), - CompileTask("debug-compile-sasl-darwinssl", tags=["debug-compile", "sasl", "darwinssl"], SASL="AUTO", SSL="DARWIN"), - CompileTask("debug-compile-rdtscp", ENABLE_RDTSCP="ON"), - CompileTask("debug-compile-sspi-winssl", tags=["debug-compile", "sspi", "winssl"], SASL="SSPI", SSL="WINDOWS"), - CompileTask("debug-compile-nosrv", tags=["debug-compile"], SRV="OFF"), - LinkTask("link-with-cmake", suffix_commands=[func("link sample program", BUILD_SAMPLE_WITH_CMAKE=1)]), + CompileTask('debug-compile-compression-zlib', tags=['zlib', 'compression'], compression='zlib'), + CompileTask('debug-compile-compression-snappy', tags=['snappy', 'compression'], compression='snappy'), + CompileTask('debug-compile-compression-zstd', tags=['zstd', 'compression'], compression='zstd'), + CompileTask('debug-compile-nosasl-nossl', tags=['debug-compile', 'nosasl', 'nossl'], SSL='OFF'), + CompileTask('debug-compile-lto', CFLAGS='-flto'), + CompileTask('debug-compile-lto-thin', CFLAGS='-flto=thin'), + CompileTask('debug-compile-no-counters', tags=['debug-compile', 'no-counters'], ENABLE_SHM_COUNTERS='OFF'), + CompileTask('compile-tracing', TRACING='ON', CFLAGS='-Werror -Wno-cast-align'), + CompileTask('release-compile', config='release'), + CompileTask('debug-compile-nosasl-openssl', tags=['debug-compile', 'nosasl', 'openssl'], SSL='OPENSSL'), + CompileTask('debug-compile-nosasl-darwinssl', tags=['debug-compile', 'nosasl', 'darwinssl'], SSL='DARWIN'), + CompileTask('debug-compile-nosasl-winssl', tags=['debug-compile', 'nosasl', 'winssl'], SSL='WINDOWS'), + CompileTask('debug-compile-sasl-openssl', tags=['debug-compile', 'sasl', 'openssl'], SASL='AUTO', SSL='OPENSSL'), + CompileTask('debug-compile-sasl-darwinssl', tags=['debug-compile', 'sasl', 'darwinssl'], SASL='AUTO', SSL='DARWIN'), + CompileTask('debug-compile-rdtscp', ENABLE_RDTSCP='ON'), + CompileTask('debug-compile-sspi-winssl', tags=['debug-compile', 'sspi', 'winssl'], SASL='SSPI', SSL='WINDOWS'), + CompileTask('debug-compile-nosrv', tags=['debug-compile'], SRV='OFF'), + LinkTask('link-with-cmake', suffix_commands=[func('link sample program', BUILD_SAMPLE_WITH_CMAKE=1)]), LinkTask( - "link-with-cmake-ssl", - suffix_commands=[func("link sample program", BUILD_SAMPLE_WITH_CMAKE=1, ENABLE_SSL=1)], + 'link-with-cmake-ssl', + suffix_commands=[func('link sample program', BUILD_SAMPLE_WITH_CMAKE=1, ENABLE_SSL=1)], ), LinkTask( - "link-with-cmake-snappy", - suffix_commands=[func("link sample program", BUILD_SAMPLE_WITH_CMAKE=1, ENABLE_SNAPPY="ON")], + 'link-with-cmake-snappy', + suffix_commands=[func('link sample program', BUILD_SAMPLE_WITH_CMAKE=1, ENABLE_SNAPPY='ON')], ), - LinkTask("link-with-cmake-mac", suffix_commands=[func("link sample program", BUILD_SAMPLE_WITH_CMAKE=1)]), - LinkTask("link-with-cmake-windows", suffix_commands=[func("link sample program MSVC")]), + LinkTask('link-with-cmake-mac', suffix_commands=[func('link sample program', BUILD_SAMPLE_WITH_CMAKE=1)]), + LinkTask('link-with-cmake-windows', suffix_commands=[func('link sample program MSVC')]), LinkTask( - "link-with-cmake-windows-ssl", - suffix_commands=[func("link sample program MSVC", ENABLE_SSL=1)], - orchestration="ssl", + 'link-with-cmake-windows-ssl', + suffix_commands=[func('link sample program MSVC', ENABLE_SSL=1)], + orchestration='ssl', ), - LinkTask("link-with-cmake-windows-snappy", suffix_commands=[func("link sample program MSVC", ENABLE_SNAPPY="ON")]), - LinkTask("link-with-cmake-mingw", suffix_commands=[func("link sample program mingw")]), - LinkTask("link-with-pkg-config", suffix_commands=[func("link sample program")]), - LinkTask("link-with-pkg-config-mac", suffix_commands=[func("link sample program")]), - LinkTask("link-with-pkg-config-ssl", suffix_commands=[func("link sample program", ENABLE_SSL=1)]), - LinkTask("link-with-bson", suffix_commands=[func("link sample program bson")], orchestration=False), - LinkTask("link-with-bson-mac", suffix_commands=[func("link sample program bson")], orchestration=False), - LinkTask("link-with-bson-windows", suffix_commands=[func("link sample program MSVC bson")], orchestration=False), - LinkTask("link-with-bson-mingw", suffix_commands=[func("link sample program mingw bson")], orchestration=False), + LinkTask('link-with-cmake-windows-snappy', suffix_commands=[func('link sample program MSVC', ENABLE_SNAPPY='ON')]), + LinkTask('link-with-cmake-mingw', suffix_commands=[func('link sample program mingw')]), + LinkTask('link-with-pkg-config', suffix_commands=[func('link sample program')]), + LinkTask('link-with-pkg-config-mac', suffix_commands=[func('link sample program')]), + LinkTask('link-with-pkg-config-ssl', suffix_commands=[func('link sample program', ENABLE_SSL=1)]), + LinkTask('link-with-bson', suffix_commands=[func('link sample program bson')], orchestration=False), + LinkTask('link-with-bson-mac', suffix_commands=[func('link sample program bson')], orchestration=False), + LinkTask('link-with-bson-windows', suffix_commands=[func('link sample program MSVC bson')], orchestration=False), + LinkTask('link-with-bson-mingw', suffix_commands=[func('link sample program mingw bson')], orchestration=False), NamedTask( - "debian-package-build", + 'debian-package-build', commands=[ - shell_mongoc('export IS_PATCH="${is_patch}"\n' ".evergreen/scripts/debian_package_build.sh"), + shell_mongoc('export IS_PATCH="${is_patch}"\n.evergreen/scripts/debian_package_build.sh'), s3_put( - local_file="deb.tar.gz", - remote_file="${branch_name}/mongo-c-driver-debian-packages-${CURRENT_VERSION}.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='deb.tar.gz', + remote_file='${branch_name}/mongo-c-driver-debian-packages-${CURRENT_VERSION}.tar.gz', + content_type='${content_type|application/x-gzip}', ), s3_put( - local_file="deb.tar.gz", - remote_file="${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-debian-packages.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='deb.tar.gz', + remote_file='${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-debian-packages.tar.gz', + content_type='${content_type|application/x-gzip}', ), s3_put( - local_file="deb-i386.tar.gz", - remote_file="${branch_name}/mongo-c-driver-debian-packages-i386-${CURRENT_VERSION}.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='deb-i386.tar.gz', + remote_file='${branch_name}/mongo-c-driver-debian-packages-i386-${CURRENT_VERSION}.tar.gz', + content_type='${content_type|application/x-gzip}', ), s3_put( - local_file="deb-i386.tar.gz", - remote_file="${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-debian-packages-i386.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='deb-i386.tar.gz', + remote_file='${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-debian-packages-i386.tar.gz', + content_type='${content_type|application/x-gzip}', ), ], ), NamedTask( - "rpm-package-build", + 'rpm-package-build', commands=[ - shell_mongoc('export IS_PATCH="${is_patch}"\n' ".evergreen/scripts/check_rpm_spec.sh"), - shell_mongoc(".evergreen/scripts/build_snapshot_rpm.sh"), + shell_mongoc('export IS_PATCH="${is_patch}"\n.evergreen/scripts/check_rpm_spec.sh'), + shell_mongoc('.evergreen/scripts/build_snapshot_rpm.sh'), s3_put( - local_file="rpm.tar.gz", - remote_file="${branch_name}/mongo-c-driver-rpm-packages-${CURRENT_VERSION}.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='rpm.tar.gz', + remote_file='${branch_name}/mongo-c-driver-rpm-packages-${CURRENT_VERSION}.tar.gz', + content_type='${content_type|application/x-gzip}', ), s3_put( - local_file="rpm.tar.gz", - remote_file="${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-rpm-packages.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='rpm.tar.gz', + remote_file='${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-rpm-packages.tar.gz', + content_type='${content_type|application/x-gzip}', + ), + shell_mongoc( + 'sudo rm -rf ../build ../mock-result ../rpm.tar.gz\n' + 'export MOCK_TARGET_CONFIG=rocky+epel-9-aarch64\n' + '.evergreen/scripts/build_snapshot_rpm.sh' + ), + shell_mongoc( + 'sudo rm -rf ../build ../mock-result ../rpm.tar.gz\n' + 'export MOCK_TARGET_CONFIG=rocky+epel-8-aarch64\n' + '.evergreen/scripts/build_snapshot_rpm.sh' ), - shell_mongoc("sudo rm -rf ../build ../mock-result ../rpm.tar.gz\n" "export MOCK_TARGET_CONFIG=rocky+epel-9-aarch64\n" ".evergreen/scripts/build_snapshot_rpm.sh"), - shell_mongoc("sudo rm -rf ../build ../mock-result ../rpm.tar.gz\n" "export MOCK_TARGET_CONFIG=rocky+epel-8-aarch64\n" ".evergreen/scripts/build_snapshot_rpm.sh"), ], ), - CompileTask("debug-compile-with-warnings", CFLAGS="-Werror -Wno-cast-align"), + CompileTask('debug-compile-with-warnings', CFLAGS='-Werror -Wno-cast-align'), NamedTask( - "install-libmongoc-after-libbson", - commands=[shell_mongoc(".evergreen/scripts/install-libmongoc-after-libbson.sh"),], + 'install-libmongoc-after-libbson', + commands=[ + shell_mongoc('.evergreen/scripts/install-libmongoc-after-libbson.sh'), + ], ), ] @@ -281,29 +291,29 @@ def __init__( class CoverageTask(MatrixTask): axes = OD( [ - ("version", ["latest"]), - ("topology", ["replica_set"]), - ("auth", [True]), - ("sasl", ["sasl"]), - ("ssl", ["openssl"]), - ("cse", [False, True]), + ('version', ['latest']), + ('topology', ['replica_set']), + ('auth', [True]), + ('sasl', ['sasl']), + ('ssl', ['openssl']), + ('cse', [False, True]), ] ) def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() - yield "test-coverage" + yield 'test-coverage' yield str(self.settings.version) if self.cse: - yield "client-side-encryption" + yield 'client-side-encryption' def name_parts(self) -> Iterable[str]: - yield "test-coverage" - yield self.display("version") - yield self.display("topology").replace("_", "-") - yield from map(self.display, ("auth", "sasl", "ssl")) + yield 'test-coverage' + yield self.display('version') + yield self.display('topology').replace('_', '-') + yield from map(self.display, ('auth', 'sasl', 'ssl')) if self.settings.cse: - yield "cse" + yield 'cse' @property def cse(self) -> bool: @@ -312,38 +322,38 @@ def cse(self) -> bool: def post_commands(self) -> Iterable[Value]: if self.cse: yield func( - "compile coverage", - SASL="AUTO", - SSL="OPENSSL", - COMPILE_LIBMONGOCRYPT="ON", + 'compile coverage', + SASL='AUTO', + SSL='OPENSSL', + COMPILE_LIBMONGOCRYPT='ON', EXTRA_CONFIGURE_FLAGS='EXTRA_CONFIGURE_FLAGS="-DENABLE_PIC=ON"', ) else: - yield func("compile coverage", SASL="AUTO", SSL="OPENSSL") + yield func('compile coverage', SASL='AUTO', SSL='OPENSSL') - yield func("fetch-det") + yield func('fetch-det') yield func( - "bootstrap-mongo-orchestration", + 'bootstrap-mongo-orchestration', MONGODB_VERSION=self.settings.version, TOPOLOGY=self.settings.topology, - AUTH=self.display("auth"), - SSL=self.display("ssl"), + AUTH=self.display('auth'), + SSL=self.display('ssl'), ) - yield func("run-simple-http-server") - extra = {"COVERAGE": "ON"} + yield func('run-simple-http-server') + extra = {'COVERAGE': 'ON'} if self.cse: - extra["CLIENT_SIDE_ENCRYPTION"] = "ON" - yield func("run-mock-kms-servers") - yield func("run-tests", AUTH=self.display("auth"), SSL=self.display("ssl"), **extra) - yield func("upload coverage") - yield func("update codecov.io") + extra['CLIENT_SIDE_ENCRYPTION'] = 'ON' + yield func('run-mock-kms-servers') + yield func('run-tests', AUTH=self.display('auth'), SSL=self.display('ssl'), **extra) + yield func('upload coverage') + yield func('update codecov.io') def do_is_valid_combination(self) -> bool: # Limit coverage tests to test-coverage-latest-replica-set-auth-sasl-openssl (+ cse). - require(self.setting_eq("topology", "replica_set")) - require(self.setting_eq("sasl", "sasl")) - require(self.setting_eq("ssl", "openssl")) - require(self.setting_eq("version", "latest")) + require(self.setting_eq('topology', 'replica_set')) + require(self.setting_eq('sasl', 'sasl')) + require(self.setting_eq('ssl', 'openssl')) + require(self.setting_eq('version', 'latest')) require(self.settings.auth is True) if not self.cse: @@ -351,9 +361,9 @@ def do_is_valid_combination(self) -> bool: return True # CSE has extra requirements - if self.settings.version != "latest": + if self.settings.version != 'latest': # We only work with 4.2 or newer for CSE - require(Version(str(self.settings.version)) >= Version("4.2")) + require(Version(str(self.settings.version)) >= Version('4.2')) return True @@ -363,64 +373,64 @@ def do_is_valid_combination(self) -> bool: class DNSTask(MatrixTask): axes = OD( [ - ("auth", [False, True]), - ("loadbalanced", [False, True]), - ("ssl", ["openssl", "winssl", "darwinssl"]), + ('auth', [False, True]), + ('loadbalanced', [False, True]), + ('ssl', ['openssl', 'winssl', 'darwinssl']), ] ) - name_prefix = "test-dns" + name_prefix = 'test-dns' def additional_dependencies(self) -> Iterable[DependencySpec]: yield self.build_task_name @property def build_task_name(self) -> str: - sasl = "sspi" if self.settings.ssl == "winssl" else "sasl" + sasl = 'sspi' if self.settings.ssl == 'winssl' else 'sasl' return f'debug-compile-{sasl}-{self.display("ssl")}' def name_parts(self) -> Iterable[str]: - yield "test-dns" + yield 'test-dns' if self.settings.auth: - yield "auth" + yield 'auth' if self.settings.loadbalanced: - yield "loadbalanced" - yield self.display("ssl") + yield 'loadbalanced' + yield self.display('ssl') def post_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self.build_task_name) - yield func("fetch-det") + yield func('fetch-build', BUILD_NAME=self.build_task_name) + yield func('fetch-det') if self.settings.loadbalanced: orchestration = func( - "bootstrap-mongo-orchestration", - TOPOLOGY="sharded_cluster", - AUTH="auth" if self.settings.auth else "noauth", - SSL="ssl", - LOAD_BALANCER="on", + 'bootstrap-mongo-orchestration', + TOPOLOGY='sharded_cluster', + AUTH='auth' if self.settings.auth else 'noauth', + SSL='ssl', + LOAD_BALANCER='on', ) else: orchestration = func( - "bootstrap-mongo-orchestration", - TOPOLOGY="replica_set", - AUTH="auth" if self.settings.auth else "noauth", - SSL="ssl", + 'bootstrap-mongo-orchestration', + TOPOLOGY='replica_set', + AUTH='auth' if self.settings.auth else 'noauth', + SSL='ssl', ) yield orchestration - dns = "on" + dns = 'on' if self.settings.loadbalanced: - dns = "loadbalanced" - yield func("fetch-det") - yield func("start-load-balancer", MONGODB_URI="mongodb://localhost:27017,localhost:27018") + dns = 'loadbalanced' + yield func('fetch-det') + yield func('start-load-balancer', MONGODB_URI='mongodb://localhost:27017,localhost:27018') elif self.settings.auth: - dns = "dns-auth" - yield func("run-tests", SSL="ssl", AUTH=self.display("auth"), DNS=dns) + dns = 'dns-auth' + yield func('run-tests', SSL='ssl', AUTH=self.display('auth'), DNS=dns) def do_is_valid_combination(self) -> bool: prohibit(bool(self.settings.loadbalanced) and bool(self.settings.auth)) # Load balancer tests only run on some Linux hosts in Evergreen until CDRIVER-4041 is resolved. - prohibit(bool(self.settings.loadbalanced) and self.settings.ssl in ["darwinssl", "winssl"]) + prohibit(bool(self.settings.loadbalanced) and self.settings.ssl in ['darwinssl', 'winssl']) return True @@ -428,51 +438,51 @@ def do_is_valid_combination(self) -> bool: class CompressionTask(MatrixTask): - axes = OD([("compression", ["zlib", "snappy", "zstd"])]) - name_prefix = "test-latest-server" + axes = OD([('compression', ['zlib', 'snappy', 'zstd'])]) + name_prefix = 'test-latest-server' def additional_dependencies(self) -> Iterable[DependencySpec]: yield self.build_task_name @property def build_task_name(self) -> str: - return f"debug-compile-{self._compressor_suffix()}" + return f'debug-compile-{self._compressor_suffix()}' def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() - yield "compression" - yield "latest" + yield 'compression' + yield 'latest' yield from self._compressor_list() def name_parts(self) -> Iterable[str]: return [self.name_prefix, self._compressor_suffix()] def post_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self.build_task_name) - yield func("fetch-det") - yield func("bootstrap-mongo-orchestration", AUTH="noauth", SSL="nossl") - yield func("run-simple-http-server") - yield func("run-tests", AUTH="noauth", SSL="nossl", COMPRESSORS=",".join(self._compressor_list())) + yield func('fetch-build', BUILD_NAME=self.build_task_name) + yield func('fetch-det') + yield func('bootstrap-mongo-orchestration', AUTH='noauth', SSL='nossl') + yield func('run-simple-http-server') + yield func('run-tests', AUTH='noauth', SSL='nossl', COMPRESSORS=','.join(self._compressor_list())) def _compressor_suffix(self): - if self.settings.compression == "zlib": - return "compression-zlib" - elif self.settings.compression == "snappy": - return "compression-snappy" - elif self.settings.compression == "zstd": - return "compression-zstd" + if self.settings.compression == 'zlib': + return 'compression-zlib' + elif self.settings.compression == 'snappy': + return 'compression-snappy' + elif self.settings.compression == 'zstd': + return 'compression-zstd' else: - return "compression" + return 'compression' def _compressor_list(self): - if self.settings.compression == "zlib": - return ["zlib"] - elif self.settings.compression == "snappy": - return ["snappy"] - elif self.settings.compression == "zstd": - return ["zstd"] + if self.settings.compression == 'zlib': + return ['zlib'] + elif self.settings.compression == 'snappy': + return ['snappy'] + elif self.settings.compression == 'zstd': + return ['zstd'] else: - return ["snappy", "zlib", "zstd"] + return ['snappy', 'zlib', 'zstd'] all_tasks = chain(all_tasks, CompressionTask.matrix()) @@ -482,11 +492,11 @@ class SpecialIntegrationTask(NamedTask): def __init__( self, task_name: str, - main_dep: str = "debug-compile-sasl-openssl", + main_dep: str = 'debug-compile-sasl-openssl', uri: str | None = None, tags: Iterable[str] = (), - version: str = "latest", - topology: str = "server", + version: str = 'latest', + topology: str = 'server', ): self._main_dep = main_dep super().__init__(task_name, depends_on=[self._main_dep], tags=tags) @@ -495,40 +505,40 @@ def __init__( self._topo = topology def pre_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self._main_dep) - yield func("fetch-det") - yield func("bootstrap-mongo-orchestration", MONGODB_VERSION=self._version, TOPOLOGY=self._topo) - yield func("run-simple-http-server") - yield func("run-tests", URI=self._uri) + yield func('fetch-build', BUILD_NAME=self._main_dep) + yield func('fetch-det') + yield func('bootstrap-mongo-orchestration', MONGODB_VERSION=self._version, TOPOLOGY=self._topo) + yield func('run-simple-http-server') + yield func('run-tests', URI=self._uri) all_tasks = chain( all_tasks, [ # Verify that retryWrites=true is ignored with standalone. - SpecialIntegrationTask("retry-true-latest-server", uri="mongodb://localhost/?retryWrites=true"), - SpecialIntegrationTask("test-latest-server-hardened", "hardened-compile", tags=["hardened", "latest"]), + SpecialIntegrationTask('retry-true-latest-server', uri='mongodb://localhost/?retryWrites=true'), + SpecialIntegrationTask('test-latest-server-hardened', 'hardened-compile', tags=['hardened', 'latest']), ], ) class AuthTask(MatrixTask): - axes = OD([("sasl", ["sasl", "sspi", False]), ("ssl", ["openssl", "darwinssl", "winssl"])]) + axes = OD([('sasl', ['sasl', 'sspi', False]), ('ssl', ['openssl', 'darwinssl', 'winssl'])]) - name_prefix = "authentication-tests" + name_prefix = 'authentication-tests' def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() - yield "authentication-tests" - yield self.display("ssl") - yield self.display("sasl") + yield 'authentication-tests' + yield self.display('ssl') + yield self.display('sasl') def additional_dependencies(self) -> Iterable[DependencySpec]: yield self.build_task_name def post_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self.build_task_name) - yield func("run auth tests") + yield func('fetch-build', BUILD_NAME=self.build_task_name) + yield func('run auth tests') @property def build_task_name(self) -> str: @@ -536,14 +546,14 @@ def build_task_name(self) -> str: def name_parts(self) -> Iterable[str]: yield self.name_prefix - yield self.display("ssl") + yield self.display('ssl') if not self.settings.sasl: - yield "nosasl" + yield 'nosasl' def do_is_valid_combination(self) -> bool: - both_or_neither(self.settings.ssl == "winssl", self.settings.sasl == "sspi") + both_or_neither(self.settings.ssl == 'winssl', self.settings.sasl == 'sspi') if not self.settings.sasl: - require(self.settings.ssl == "openssl") + require(self.settings.ssl == 'openssl') return True @@ -556,109 +566,109 @@ def __init__(self, name: str, tags: Iterable[str], get_build: str, commands: Ite self._dep = get_build def pre_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self._dep) + yield func('fetch-build', BUILD_NAME=self._dep) all_tasks = chain( all_tasks, [ PostCompileTask( - "test-mongohouse", + 'test-mongohouse', tags=[], - get_build="debug-compile-sasl-openssl", - commands=[func("fetch-det"), func("build mongohouse"), func("run mongohouse"), func("test mongohouse")], + get_build='debug-compile-sasl-openssl', + commands=[func('fetch-det'), func('build mongohouse'), func('run mongohouse'), func('test mongohouse')], ), NamedTask( - "authentication-tests-asan-memcheck", - tags=["authentication-tests", "asan"], + 'authentication-tests-asan-memcheck', + tags=['authentication-tests', 'asan'], commands=[ - func("find-cmake-latest"), + func('find-cmake-latest'), shell_mongoc( """ env SANITIZE=address SASL=AUTO SSL=OPENSSL .evergreen/scripts/compile.sh """, add_expansions_to_env=True, ), - func("run auth tests", ASAN="on"), + func('run auth tests', ASAN='on'), ], - ) + ), ], ) # Add API version tasks. -for server_version in [ "8.0", "7.0", "6.0", "5.0"]: +for server_version in ['8.0', '7.0', '6.0', '5.0']: all_tasks = chain( all_tasks, [ PostCompileTask( - "test-versioned-api-" + server_version, - tags=["versioned-api", f"{server_version}"], - get_build="debug-compile-nosasl-openssl", + 'test-versioned-api-' + server_version, + tags=['versioned-api', f'{server_version}'], + get_build='debug-compile-nosasl-openssl', commands=[ - func("fetch-det"), + func('fetch-det'), func( - "bootstrap-mongo-orchestration", - TOPOLOGY="server", - AUTH="auth", - SSL="ssl", + 'bootstrap-mongo-orchestration', + TOPOLOGY='server', + AUTH='auth', + SSL='ssl', MONGODB_VERSION=server_version, - REQUIRE_API_VERSION="true", + REQUIRE_API_VERSION='true', ), - func("run-simple-http-server"), - func("run-tests", MONGODB_API_VERSION=1, AUTH="auth", SSL="ssl"), + func('run-simple-http-server'), + func('run-tests', MONGODB_API_VERSION=1, AUTH='auth', SSL='ssl'), ], ), PostCompileTask( - "test-versioned-api-accept-version-two-" + server_version, - tags=["versioned-api", f"{server_version}"], - get_build="debug-compile-nosasl-nossl", + 'test-versioned-api-accept-version-two-' + server_version, + tags=['versioned-api', f'{server_version}'], + get_build='debug-compile-nosasl-nossl', commands=[ - func("fetch-det"), + func('fetch-det'), func( - "bootstrap-mongo-orchestration", - TOPOLOGY="server", - AUTH="noauth", - SSL="nossl", + 'bootstrap-mongo-orchestration', + TOPOLOGY='server', + AUTH='noauth', + SSL='nossl', MONGODB_VERSION=server_version, - ORCHESTRATION_FILE="versioned-api-testing.json", + ORCHESTRATION_FILE='versioned-api-testing.json', ), - func("run-simple-http-server"), - func("run-tests", MONGODB_API_VERSION=1, AUTH="noauth", SSL="nossl"), + func('run-simple-http-server'), + func('run-tests', MONGODB_API_VERSION=1, AUTH='noauth', SSL='nossl'), ], - ) - ] + ), + ], ) class IPTask(MatrixTask): axes = OD( [ - ("client", ["ipv6", "ipv4", "localhost"]), - ("server", ["ipv6", "ipv4"]), + ('client', ['ipv6', 'ipv4', 'localhost']), + ('server', ['ipv6', 'ipv4']), ] ) - name_prefix = "test-latest" + name_prefix = 'test-latest' def additional_dependencies(self) -> Iterable[DependencySpec]: - yield "debug-compile-nosasl-nossl" + yield 'debug-compile-nosasl-nossl' def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() - yield from ("nossl", "nosasl", "server", "ipv4-ipv6", "latest") + yield from ('nossl', 'nosasl', 'server', 'ipv4-ipv6', 'latest') def post_commands(self) -> Iterable[Value]: return [ - func("fetch-build", BUILD_NAME="debug-compile-nosasl-nossl"), - func("fetch-det"), - func("bootstrap-mongo-orchestration"), - func("run-simple-http-server"), + func('fetch-build', BUILD_NAME='debug-compile-nosasl-nossl'), + func('fetch-det'), + func('bootstrap-mongo-orchestration'), + func('run-simple-http-server'), func( - "run-tests", + 'run-tests', URI={ - "ipv6": "mongodb://[::1]/", - "ipv4": "mongodb://127.0.0.1/", - "localhost": "mongodb://localhost/", + 'ipv6': 'mongodb://[::1]/', + 'ipv4': 'mongodb://127.0.0.1/', + 'localhost': 'mongodb://localhost/', }[str(self.settings.client)], ), ] @@ -668,26 +678,26 @@ def name_parts(self) -> Iterable[str]: self.name_prefix, f'server-{self.display("server")}', f'client-{self.display("client")}', - "noauth", - "nosasl", - "nossl", + 'noauth', + 'nosasl', + 'nossl', ) def do_is_valid_combination(self) -> bool: # This would fail by design. - if self.settings.server == "ipv4": - prohibit(self.settings.client == "ipv6") + if self.settings.server == 'ipv4': + prohibit(self.settings.client == 'ipv6') # Default configuration is tested in other variants. - if self.settings.server == "ipv6": - prohibit(self.settings.client == "localhost") + if self.settings.server == 'ipv6': + prohibit(self.settings.client == 'localhost') return True all_tasks = chain(all_tasks, IPTask.matrix()) aws_compile_task = NamedTask( - "debug-compile-aws", + 'debug-compile-aws', commands=[ func('find-cmake-latest'), shell_mongoc( @@ -709,7 +719,7 @@ def do_is_valid_combination(self) -> bool: include_expansions_in_env=['distro_id', 'CC'], redirect_standard_error_to_output=True, ), - func("upload-build"), + func('upload-build'), ], ) @@ -719,15 +729,15 @@ def do_is_valid_combination(self) -> bool: class AWSTestTask(MatrixTask): axes = OD( [ - ("testcase", ["regular", "ec2", "ecs", "lambda", "assume_role", "assume_role_with_web_identity"]), - ("version", ["latest", "8.0", "7.0", "6.0", "5.0", "4.4"]), + ('testcase', ['regular', 'ec2', 'ecs', 'lambda', 'assume_role', 'assume_role_with_web_identity']), + ('version', ['latest', '8.0', '7.0', '6.0', '5.0', '4.4']), ] ) - name_prefix = "test-aws-openssl" + name_prefix = 'test-aws-openssl' def additional_dependencies(self) -> Iterable[DependencySpec]: - yield "debug-compile-aws" + yield 'debug-compile-aws' def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() @@ -736,21 +746,21 @@ def additional_tags(self) -> Iterable[str]: def post_commands(self) -> Iterable[Value]: return [ - func("fetch-build", BUILD_NAME="debug-compile-aws"), - func("fetch-det"), + func('fetch-build', BUILD_NAME='debug-compile-aws'), + func('fetch-det'), func( - "bootstrap-mongo-orchestration", - AUTH="auth", - ORCHESTRATION_FILE="auth-aws.json", + 'bootstrap-mongo-orchestration', + AUTH='auth', + ORCHESTRATION_FILE='auth-aws.json', MONGODB_VERSION=self.settings.version, - TOPOLOGY="server", + TOPOLOGY='server', ), - func("run aws tests", TESTCASE=str(self.settings.testcase).upper()), + func('run aws tests', TESTCASE=str(self.settings.testcase).upper()), ] @property def name(self): - return f"{self.name_prefix}-{self.settings.testcase}-{self.settings.version}" + return f'{self.name_prefix}-{self.settings.testcase}-{self.settings.version}' all_tasks = chain(all_tasks, AWSTestTask.matrix()) @@ -760,26 +770,26 @@ class OCSPTask(MatrixTask): axes = OD( [ ( - "test", + 'test', [ - "test_1", - "test_2", - "test_3", - "test_4", - "soft_fail_test", - "malicious_server_test_1", - "malicious_server_test_2", - "cache", + 'test_1', + 'test_2', + 'test_3', + 'test_4', + 'soft_fail_test', + 'malicious_server_test_1', + 'malicious_server_test_2', + 'cache', ], ), - ("delegate", ["delegate", "nodelegate"]), - ("cert", ["rsa", "ecdsa"]), - ("ssl", ["openssl", "darwinssl", "winssl"]), - ("version", ["latest", "8.0", "7.0", "6.0", "5.0", "4.4"]), + ('delegate', ['delegate', 'nodelegate']), + ('cert', ['rsa', 'ecdsa']), + ('ssl', ['openssl', 'darwinssl', 'winssl']), + ('version', ['latest', '8.0', '7.0', '6.0', '5.0', '4.4']), ] ) - name_prefix = "test-ocsp" + name_prefix = 'test-ocsp' @property def build_task_name(self) -> str: @@ -794,34 +804,34 @@ def additional_dependencies(self) -> Iterable[DependencySpec]: @property def name(self): - return f"ocsp-{self.settings.ssl}-{self.test}-{self.settings.cert}-{self.settings.delegate}-{self.settings.version}" + return f'ocsp-{self.settings.ssl}-{self.test}-{self.settings.cert}-{self.settings.delegate}-{self.settings.version}' @property def test(self) -> str: return str(self.settings.test) def post_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self.build_task_name) - yield func("fetch-det") + yield func('fetch-build', BUILD_NAME=self.build_task_name) + yield func('fetch-det') - stapling = "mustStaple" - if self.test in ["test_3", "test_4", "soft_fail_test", "cache"]: - stapling = "disableStapling" - if self.test in ["malicious_server_test_1", "malicious_server_test_2"]: - stapling = "mustStaple-disableStapling" + stapling = 'mustStaple' + if self.test in ['test_3', 'test_4', 'soft_fail_test', 'cache']: + stapling = 'disableStapling' + if self.test in ['malicious_server_test_1', 'malicious_server_test_2']: + stapling = 'mustStaple-disableStapling' orchestration = func( - "bootstrap-mongo-orchestration", + 'bootstrap-mongo-orchestration', MONGODB_VERSION=self.settings.version, - TOPOLOGY="server", - SSL="ssl", - OCSP="on", - ORCHESTRATION_FILE=f"{self.settings.cert}-basic-tls-ocsp-{stapling}.json", + TOPOLOGY='server', + SSL='ssl', + OCSP='on', + ORCHESTRATION_FILE=f'{self.settings.cert}-basic-tls-ocsp-{stapling}.json', ) # The cache test expects a revoked response from an OCSP responder, exactly like TEST_4. - test_column = "TEST_4" if self.test == "cache" else str(self.test).upper() - use_delegate = "ON" if self.settings.delegate == "delegate" else "OFF" + test_column = 'TEST_4' if self.test == 'cache' else str(self.test).upper() + use_delegate = 'ON' if self.settings.delegate == 'delegate' else 'OFF' yield ( shell_mongoc( @@ -833,7 +843,7 @@ def post_commands(self) -> Iterable[Value]: yield (orchestration) - if self.test == "cache": + if self.test == 'cache': yield ( shell_mongoc( f""" @@ -857,35 +867,35 @@ def to_dict(self): # OCSP tests should run with a batchtime of 14 days. Avoid running OCSP # tests in patch builds by default (only in commit builds). - task["patchable"] = False + task['patchable'] = False return task # Testing in OCSP has a lot of exceptions. def do_is_valid_combination(self) -> bool: - if self.settings.ssl == "darwinssl": + if self.settings.ssl == 'darwinssl': # Secure Transport quietly ignores a must-staple certificate with no stapled response. - prohibit(self.test == "malicious_server_test_2") + prohibit(self.test == 'malicious_server_test_2') # Why does this fail with Secure Transport (CSSMERR_TP_CERT_SUSPENDED)...? - prohibit(self.test == "test_3") + prohibit(self.test == 'test_3') # CDRIVER-3759: Secure Transport does not implement soft failure? - prohibit(self.test == "soft_fail_test") + prohibit(self.test == 'soft_fail_test') # Only Server 6.0+ are available on MacOS ARM64. - if self.settings.version != "latest": - prohibit(Version(self.settings.version) < Version("6.0")) + if self.settings.version != 'latest': + prohibit(Version(self.settings.version) < Version('6.0')) - if self.settings.ssl == "darwinssl" or self.settings.ssl == "winssl": + if self.settings.ssl == 'darwinssl' or self.settings.ssl == 'winssl': # ECDSA certs can't be loaded (in the PEM format they're stored) on Windows/macOS. Skip them. - prohibit(self.settings.cert == "ecdsa") + prohibit(self.settings.cert == 'ecdsa') # OCSP stapling is not supported on macOS or Windows. - prohibit(self.test in ["test_1", "test_2", "cache"]) + prohibit(self.test in ['test_1', 'test_2', 'cache']) - if self.test == "soft_fail_test" or self.test == "malicious_server_test_2" or self.test == "cache": - prohibit(self.settings.delegate == "delegate") + if self.test == 'soft_fail_test' or self.test == 'malicious_server_test_2' or self.test == 'cache': + prohibit(self.settings.delegate == 'delegate') return True diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py b/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py index 5160fa73e4a..d18ae3c97e6 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py @@ -18,18 +18,18 @@ from collections import OrderedDict as OD from typing import MutableSequence -from evergreen_config_generator.functions import shell_exec, func +from evergreen_config_generator.functions import func, shell_exec +from evergreen_config_generator.taskgroups import TaskGroup from evergreen_config_generator.tasks import NamedTask from evergreen_config_generator.variants import Variant -from evergreen_config_generator.taskgroups import TaskGroup def _create_tasks(): # passtask is expected to run on a remote Azure VM and succeed at obtaining credentials. - passtask = NamedTask(task_name="testazurekms-task") + passtask = NamedTask(task_name='testazurekms-task') passtask.commands = [ - func("fetch-source"), - func("find-cmake-latest"), + func('fetch-source'), + func('find-cmake-latest'), shell_exec( r""" echo "Building test-azurekms ... begin" @@ -71,10 +71,10 @@ def _create_tasks(): ), ] - failtask = NamedTask(task_name="testazurekms-fail-task") + failtask = NamedTask(task_name='testazurekms-fail-task') failtask.commands = [ - func("fetch-source"), - func("find-cmake-latest"), + func('fetch-source'), + func('find-cmake-latest'), shell_exec( r""" pushd mongoc @@ -100,28 +100,23 @@ def _create_tasks(): def _create_variant(): return Variant( - name="testazurekms-variant", - display_name="Azure KMS", + name='testazurekms-variant', + display_name='Azure KMS', # Azure Virtual Machine created is Debian 10. - run_on="debian11-small", - tasks=["testazurekms_task_group", "testazurekms-fail-task"], + run_on='debian11-small', + tasks=['testazurekms_task_group', 'testazurekms-fail-task'], batchtime=20160, ) # Use a batchtime of 14 days as suggested by the CSFLE test README def _create_task_group(): - task_group = TaskGroup(name="testazurekms_task_group") + task_group = TaskGroup(name='testazurekms_task_group') task_group.setup_group_can_fail_task = True task_group.setup_group_timeout_secs = 1800 # 30 minutes task_group.setup_group = [ - func("fetch-det"), + func('fetch-det'), # Assume role to get AWS secrets. - { - "command": "ec2.assume_role", - "params": { - "role_arn": "${aws_test_secrets_role}" - } - }, + {'command': 'ec2.assume_role', 'params': {'role_arn': '${aws_test_secrets_role}'}}, shell_exec( r""" DRIVERS_TOOLS=$(pwd)/drivers-evergreen-tools @@ -141,17 +136,17 @@ def _create_task_group(): $DRIVERS_TOOLS/.evergreen/csfle/azurekms/create-and-setup-vm.sh """, test=False, - include_expansions_in_env=[ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + include_expansions_in_env=['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN'], ), # Load the AZUREKMS_VMNAME expansion. OD( [ - ("command", "expansions.update"), + ('command', 'expansions.update'), ( - "params", + 'params', OD( [ - ("file", "testazurekms-expansions.yml"), + ('file', 'testazurekms-expansions.yml'), ] ), ), @@ -163,12 +158,12 @@ def _create_task_group(): # Load expansions again. The setup task may have failed before running `expansions.update`. OD( [ - ("command", "expansions.update"), + ('command', 'expansions.update'), ( - "params", + 'params', OD( [ - ("file", "testazurekms-expansions.yml"), + ('file', 'testazurekms-expansions.yml'), ] ), ), @@ -184,7 +179,7 @@ def _create_task_group(): test=False, ), ] - task_group.tasks = ["testazurekms-task"] + task_group.tasks = ['testazurekms-task'] return task_group diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py b/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py index ccdf6720021..99de9d4519d 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py @@ -17,20 +17,20 @@ from collections import OrderedDict as OD from typing import MutableSequence -from config_generator.components.funcs.find_cmake_latest import FindCMakeLatest - -from evergreen_config_generator.functions import shell_exec, func +from evergreen_config_generator.functions import func, shell_exec +from evergreen_config_generator.taskgroups import TaskGroup from evergreen_config_generator.tasks import NamedTask from evergreen_config_generator.variants import Variant -from evergreen_config_generator.taskgroups import TaskGroup + +from config_generator.components.funcs.find_cmake_latest import FindCMakeLatest def _create_tasks(): passtask = NamedTask( - task_name="testgcpkms-task", + task_name='testgcpkms-task', commands=[ - func("fetch-source"), - func("find-cmake-latest"), + func('fetch-source'), + func('find-cmake-latest'), shell_exec( r""" echo "Building test-gcpkms ... begin" @@ -70,9 +70,9 @@ def _create_tasks(): ) failtask = NamedTask( - task_name="testgcpkms-fail-task", + task_name='testgcpkms-fail-task', commands=[ - func("find-cmake-latest"), + func('find-cmake-latest'), shell_exec( r""" pushd mongoc @@ -97,21 +97,21 @@ def _create_tasks(): def _create_variant(): return Variant( - name="testgcpkms-variant", - display_name="GCP KMS", + name='testgcpkms-variant', + display_name='GCP KMS', # GCP Virtual Machine created is Debian 11. - run_on="debian11-small", - tasks=["testgcpkms_task_group", "testgcpkms-fail-task"], + run_on='debian11-small', + tasks=['testgcpkms_task_group', 'testgcpkms-fail-task'], batchtime=20160, ) # Use a batchtime of 14 days as suggested by the CSFLE test README def _create_task_group(): - task_group = TaskGroup(name="testgcpkms_task_group") + task_group = TaskGroup(name='testgcpkms_task_group') task_group.setup_group_can_fail_task = True task_group.setup_group_timeout_secs = 1800 # 30 minutes task_group.setup_group = [ - func("fetch-det"), + func('fetch-det'), # Create and set up a GCE instance using driver tools script shell_exec( r""" @@ -124,7 +124,7 @@ def _create_task_group(): test=False, ), # Load the GCPKMS_GCLOUD, GCPKMS_INSTANCE, GCPKMS_PROJECT, and GCPKMS_ZONE expansions. - OD([("command", "expansions.update"), ("params", OD([("file", "testgcpkms-expansions.yml")]))]), + OD([('command', 'expansions.update'), ('params', OD([('file', 'testgcpkms-expansions.yml')]))]), ] task_group.teardown_group = [ @@ -139,7 +139,7 @@ def _create_task_group(): test=False, ) ] - task_group.tasks = ["testgcpkms-task"] + task_group.tasks = ['testgcpkms-task'] return task_group diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/variants.py b/.evergreen/legacy_config_generator/evergreen_config_lib/variants.py index 1ffa234294d..64830b4b954 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/variants.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/variants.py @@ -16,12 +16,11 @@ from evergreen_config_generator.variants import Variant - mobile_flags = ( - " -DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY" - " -DCMAKE_FIND_ROOT_PATH_MODE_PACKAGE=ONLY" - " -DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=NEVER" - " -DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY" + ' -DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY' + ' -DCMAKE_FIND_ROOT_PATH_MODE_PACKAGE=ONLY' + ' -DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=NEVER' + ' -DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY' ) @@ -32,323 +31,323 @@ def days(n: int) -> int: all_variants = [ Variant( - "abi-compliance-check", - "ABI Compliance Check", - ["ubuntu2004-small", "ubuntu2004-medium", "ubuntu2004-large"], - ["abi-compliance-check"], + 'abi-compliance-check', + 'ABI Compliance Check', + ['ubuntu2004-small', 'ubuntu2004-medium', 'ubuntu2004-large'], + ['abi-compliance-check'], ), Variant( - "smoke", - "Smoke Tests", - "ubuntu2204-small", + 'smoke', + 'Smoke Tests', + 'ubuntu2204-small', [ - "make-docs", - "kms-divergence-check", - "release-compile", - "debug-compile-no-counters", - "compile-tracing", - "link-with-cmake", - "link-with-cmake-ssl", - "link-with-cmake-snappy", - "verify-headers", - OD([("name", "link-with-cmake-mac"), ("distros", ["macos-14-arm64"])]), - OD([("name", "link-with-cmake-windows"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", "link-with-cmake-windows-ssl"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", "link-with-cmake-windows-snappy"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", "link-with-cmake-mingw"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", "link-with-pkg-config"), ("distros", ["ubuntu2004-test"])]), - OD([("name", "link-with-pkg-config-mac"), ("distros", ["macos-14-arm64"])]), - "link-with-pkg-config-ssl", - "link-with-bson", - OD([("name", "link-with-bson-windows"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", "link-with-bson-mac"), ("distros", ["macos-14-arm64"])]), - OD([("name", "link-with-bson-mingw"), ("distros", ["windows-vsCurrent-large"])]), - "check-headers", - "debug-compile-with-warnings", - "install-libmongoc-after-libbson", + 'make-docs', + 'kms-divergence-check', + 'release-compile', + 'debug-compile-no-counters', + 'compile-tracing', + 'link-with-cmake', + 'link-with-cmake-ssl', + 'link-with-cmake-snappy', + 'verify-headers', + OD([('name', 'link-with-cmake-mac'), ('distros', ['macos-14-arm64'])]), + OD([('name', 'link-with-cmake-windows'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', 'link-with-cmake-windows-ssl'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', 'link-with-cmake-windows-snappy'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', 'link-with-cmake-mingw'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', 'link-with-pkg-config'), ('distros', ['ubuntu2004-test'])]), + OD([('name', 'link-with-pkg-config-mac'), ('distros', ['macos-14-arm64'])]), + 'link-with-pkg-config-ssl', + 'link-with-bson', + OD([('name', 'link-with-bson-windows'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', 'link-with-bson-mac'), ('distros', ['macos-14-arm64'])]), + OD([('name', 'link-with-bson-mingw'), ('distros', ['windows-vsCurrent-large'])]), + 'check-headers', + 'debug-compile-with-warnings', + 'install-libmongoc-after-libbson', ], { # Disable the MongoDB legacy shell download, which is not available in 5.0 for u22 - "SKIP_LEGACY_SHELL": "1" + 'SKIP_LEGACY_SHELL': '1' }, - tags=["pr-merge-gate"], + tags=['pr-merge-gate'], ), Variant( - "clang37", - "clang 3.7 (Archlinux)", - "archlinux-test", + 'clang37', + 'clang 3.7 (Archlinux)', + 'archlinux-test', [ - "release-compile", - "debug-compile-sasl-openssl", - "debug-compile-nosasl-openssl", - ".authentication-tests .openssl", + 'release-compile', + 'debug-compile-sasl-openssl', + 'debug-compile-nosasl-openssl', + '.authentication-tests .openssl', ], - {"CC": "clang"}, + {'CC': 'clang'}, ), Variant( - "clang100-i686", - "clang 10.0 (i686) (Ubuntu 20.04)", - "ubuntu2004-test", + 'clang100-i686', + 'clang 10.0 (i686) (Ubuntu 20.04)', + 'ubuntu2004-test', [ - "release-compile", - "debug-compile-nosasl-nossl", - ".debug-compile !.sspi .nossl .nosasl", - ".latest .nossl .nosasl", + 'release-compile', + 'debug-compile-nosasl-nossl', + '.debug-compile !.sspi .nossl .nosasl', + '.latest .nossl .nosasl', ], - {"CC": "clang", "MARCH": "i686"}, + {'CC': 'clang', 'MARCH': 'i686'}, ), Variant( - "gcc82rhel", - "GCC 8.2 (RHEL 8.0)", - "rhel80-test", + 'gcc82rhel', + 'GCC 8.2 (RHEL 8.0)', + 'rhel80-test', [ - ".hardened", - ".compression !.snappy !.zstd", - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-nosasl-openssl", - "debug-compile-sasl-openssl", - ".authentication-tests .openssl", - ".latest .nossl", + '.hardened', + '.compression !.snappy !.zstd', + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-nosasl-openssl', + 'debug-compile-sasl-openssl', + '.authentication-tests .openssl', + '.latest .nossl', ], - {"CC": "gcc"}, + {'CC': 'gcc'}, ), Variant( - "gcc102", - "GCC 10.2 (Debian 11.0)", - "debian11-large", - ["release-compile", "debug-compile-nosasl-nossl", ".latest .nossl"], - {"CC": "gcc"}, + 'gcc102', + 'GCC 10.2 (Debian 11.0)', + 'debian11-large', + ['release-compile', 'debug-compile-nosasl-nossl', '.latest .nossl'], + {'CC': 'gcc'}, ), Variant( - "gcc94-i686", - "GCC 9.4 (i686) (Ubuntu 20.04)", - "ubuntu2004-test", - ["release-compile", "debug-compile-nosasl-nossl", ".latest .nossl .nosasl"], - {"CC": "gcc", "MARCH": "i686"}, + 'gcc94-i686', + 'GCC 9.4 (i686) (Ubuntu 20.04)', + 'ubuntu2004-test', + ['release-compile', 'debug-compile-nosasl-nossl', '.latest .nossl .nosasl'], + {'CC': 'gcc', 'MARCH': 'i686'}, ), Variant( - "gcc94", - "GCC 9.4 (Ubuntu 20.04)", - "ubuntu2004-test", + 'gcc94', + 'GCC 9.4 (Ubuntu 20.04)', + 'ubuntu2004-test', [ - ".compression !.zstd", - "debug-compile-nosrv", - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-sasl-openssl", - "debug-compile-nosasl-openssl", - ".authentication-tests .openssl", - ".authentication-tests .asan", - ".test-coverage", - ".latest .nossl", - "retry-true-latest-server", - "test-dns-openssl", - "test-dns-auth-openssl", - "test-dns-loadbalanced-openssl", + '.compression !.zstd', + 'debug-compile-nosrv', + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-sasl-openssl', + 'debug-compile-nosasl-openssl', + '.authentication-tests .openssl', + '.authentication-tests .asan', + '.test-coverage', + '.latest .nossl', + 'retry-true-latest-server', + 'test-dns-openssl', + 'test-dns-auth-openssl', + 'test-dns-loadbalanced-openssl', ], - {"CC": "gcc"}, + {'CC': 'gcc'}, ), Variant( - "darwin", - "*Darwin, macOS (Apple LLVM)", - "macos-14-arm64", + 'darwin', + '*Darwin, macOS (Apple LLVM)', + 'macos-14-arm64', [ - ".compression !.snappy", - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-nosrv", - "debug-compile-sasl-darwinssl", - "debug-compile-nosasl-nossl", - ".authentication-tests .darwinssl", - ".latest .nossl", - "test-dns-darwinssl", - "test-dns-auth-darwinssl", - "debug-compile-lto", - "debug-compile-lto-thin", - "debug-compile-aws", - "test-aws-openssl-regular-latest", + '.compression !.snappy', + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-nosrv', + 'debug-compile-sasl-darwinssl', + 'debug-compile-nosasl-nossl', + '.authentication-tests .darwinssl', + '.latest .nossl', + 'test-dns-darwinssl', + 'test-dns-auth-darwinssl', + 'debug-compile-lto', + 'debug-compile-lto-thin', + 'debug-compile-aws', + 'test-aws-openssl-regular-latest', ], - {"CC": "clang"}, + {'CC': 'clang'}, ), Variant( - "darwin-intel", - "*Darwin, Intel macOS (Apple LLVM)", - "macos-14", + 'darwin-intel', + '*Darwin, Intel macOS (Apple LLVM)', + 'macos-14', [ - "debug-compile-aws", - "debug-compile-rdtscp", - "test-aws-openssl-regular-4.4", + 'debug-compile-aws', + 'debug-compile-rdtscp', + 'test-aws-openssl-regular-4.4', ], - {"CC": "clang"}, + {'CC': 'clang'}, ), Variant( - "windows-2017-32", - "Windows (i686) (VS 2017)", - "windows-vsCurrent-large", - ["debug-compile-nosasl-nossl", ".latest .nossl .nosasl"], - {"CC": "Visual Studio 15 2017"}, + 'windows-2017-32', + 'Windows (i686) (VS 2017)', + 'windows-vsCurrent-large', + ['debug-compile-nosasl-nossl', '.latest .nossl .nosasl'], + {'CC': 'Visual Studio 15 2017'}, ), Variant( - "windows-2017", - "Windows (VS 2017)", - "windows-vsCurrent-large", + 'windows-2017', + 'Windows (VS 2017)', + 'windows-vsCurrent-large', [ - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-nosasl-openssl", - "debug-compile-sspi-winssl", - "debug-compile-nosrv", - ".latest .nossl", - ".nosasl .latest .nossl", - ".compression !.snappy !.zstd !.latest", - "test-dns-winssl", - "test-dns-auth-winssl", - "debug-compile-aws", - "test-aws-openssl-regular-4.4", - "test-aws-openssl-regular-latest", + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-nosasl-openssl', + 'debug-compile-sspi-winssl', + 'debug-compile-nosrv', + '.latest .nossl', + '.nosasl .latest .nossl', + '.compression !.snappy !.zstd !.latest', + 'test-dns-winssl', + 'test-dns-auth-winssl', + 'debug-compile-aws', + 'test-aws-openssl-regular-4.4', + 'test-aws-openssl-regular-latest', # Authentication tests with OpenSSL on Windows are only run on the vs2017 variant. # Older vs variants fail to verify certificates against Atlas tests. - ".authentication-tests .openssl !.sasl", - ".authentication-tests .winssl", + '.authentication-tests .openssl !.sasl', + '.authentication-tests .winssl', ], - {"CC": "Visual Studio 15 2017 Win64"}, + {'CC': 'Visual Studio 15 2017 Win64'}, ), Variant( - "mingw-windows2016", - "MinGW-W64 (Windows Server 2016)", - "windows-vsCurrent-large", - ["debug-compile-nosasl-nossl", ".latest .nossl .nosasl .server"], - {"CC": "gcc"}, + 'mingw-windows2016', + 'MinGW-W64 (Windows Server 2016)', + 'windows-vsCurrent-large', + ['debug-compile-nosasl-nossl', '.latest .nossl .nosasl .server'], + {'CC': 'gcc'}, ), Variant( - "rhel8-power", - "Power (ppc64le) (RHEL 8)", - "rhel8-power-large", + 'rhel8-power', + 'Power (ppc64le) (RHEL 8)', + 'rhel8-power-large', [ - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-sasl-openssl", - ".latest .nossl", - "test-dns-openssl", + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-sasl-openssl', + '.latest .nossl', + 'test-dns-openssl', ], - {"CC": "gcc"}, + {'CC': 'gcc'}, patchable=False, batchtime=days(1), ), Variant( - "arm-ubuntu2004", - "*ARM (aarch64) (Ubuntu 20.04)", - "ubuntu2004-arm64-large", + 'arm-ubuntu2004', + '*ARM (aarch64) (Ubuntu 20.04)', + 'ubuntu2004-arm64-large', [ - ".compression !.snappy !.zstd", - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-nosasl-openssl", - "debug-compile-sasl-openssl", - ".authentication-tests .openssl", - ".latest .nossl", - "test-dns-openssl", + '.compression !.snappy !.zstd', + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-nosasl-openssl', + 'debug-compile-sasl-openssl', + '.authentication-tests .openssl', + '.latest .nossl', + 'test-dns-openssl', ], - {"CC": "gcc"}, + {'CC': 'gcc'}, batchtime=days(1), ), Variant( - "zseries-rhel8", - "*zSeries", - "rhel8-zseries-large", + 'zseries-rhel8', + '*zSeries', + 'rhel8-zseries-large', [ - "release-compile", + 'release-compile', # '.compression', --> TODO: waiting on ticket CDRIVER-3258 - "debug-compile-nosasl-nossl", - "debug-compile-nosasl-openssl", - "debug-compile-sasl-openssl", - ".authentication-tests .openssl", - ".latest .nossl", + 'debug-compile-nosasl-nossl', + 'debug-compile-nosasl-openssl', + 'debug-compile-sasl-openssl', + '.authentication-tests .openssl', + '.latest .nossl', ], - {"CC": "gcc"}, + {'CC': 'gcc'}, patchable=False, batchtime=days(1), ), # Run AWS tests for MongoDB 4.4 and 5.0 on Ubuntu 20.04. AWS setup scripts # expect Ubuntu 20.04+. MongoDB 4.4 and 5.0 are not available on 22.04. Variant( - "aws-ubuntu2004", - "AWS Tests (Ubuntu 20.04)", - "ubuntu2004-small", + 'aws-ubuntu2004', + 'AWS Tests (Ubuntu 20.04)', + 'ubuntu2004-small', [ - "debug-compile-aws", - ".test-aws .4.4", - ".test-aws .5.0", + 'debug-compile-aws', + '.test-aws .4.4', + '.test-aws .5.0', ], - {"CC": "clang"}, + {'CC': 'clang'}, ), Variant( - "aws-ubuntu2204", - "AWS Tests (Ubuntu 22.04)", - "ubuntu2004-small", + 'aws-ubuntu2204', + 'AWS Tests (Ubuntu 22.04)', + 'ubuntu2004-small', [ - "debug-compile-aws", - ".test-aws .6.0", - ".test-aws .7.0", - ".test-aws .8.0", - ".test-aws .latest", + 'debug-compile-aws', + '.test-aws .6.0', + '.test-aws .7.0', + '.test-aws .8.0', + '.test-aws .latest', ], - {"CC": "clang"}, + {'CC': 'clang'}, ), - Variant("mongohouse", "Mongohouse Test", "ubuntu2204-small", ["debug-compile-sasl-openssl", "test-mongohouse"], {}), + Variant('mongohouse', 'Mongohouse Test', 'ubuntu2204-small', ['debug-compile-sasl-openssl', 'test-mongohouse'], {}), Variant( - "ocsp", - "OCSP tests", - "ubuntu2004-small", + 'ocsp', + 'OCSP tests', + 'ubuntu2004-small', [ - OD([("name", "debug-compile-nosasl-openssl")]), - OD([("name", "debug-compile-nosasl-darwinssl"), ("distros", ["macos-14-arm64"])]), - OD([("name", "debug-compile-nosasl-winssl"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", ".ocsp-openssl")]), - OD([("name", ".ocsp-darwinssl"), ("distros", ["macos-14-arm64"])]), - OD([("name", ".ocsp-winssl"), ("distros", ["windows-vsCurrent-large"])]), + OD([('name', 'debug-compile-nosasl-openssl')]), + OD([('name', 'debug-compile-nosasl-darwinssl'), ('distros', ['macos-14-arm64'])]), + OD([('name', 'debug-compile-nosasl-winssl'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', '.ocsp-openssl')]), + OD([('name', '.ocsp-darwinssl'), ('distros', ['macos-14-arm64'])]), + OD([('name', '.ocsp-winssl'), ('distros', ['windows-vsCurrent-large'])]), ], {}, batchtime=days(7), display_tasks=[ { - "name": "ocsp-openssl", - "execution_tasks": [".ocsp-openssl"], + 'name': 'ocsp-openssl', + 'execution_tasks': ['.ocsp-openssl'], }, { - "name": "ocsp-darwinssl", - "execution_tasks": [".ocsp-darwinssl"], + 'name': 'ocsp-darwinssl', + 'execution_tasks': ['.ocsp-darwinssl'], }, { - "name": "ocsp-winssl", - "execution_tasks": [".ocsp-winssl"], + 'name': 'ocsp-winssl', + 'execution_tasks': ['.ocsp-winssl'], }, ], ), Variant( - "packaging", - "Linux Distro Packaging", - "debian12-latest-small", + 'packaging', + 'Linux Distro Packaging', + 'debian12-latest-small', [ - "debian-package-build", - OD([("name", "rpm-package-build"), ("distros", ["rhel90-arm64-small"])]), + 'debian-package-build', + OD([('name', 'rpm-package-build'), ('distros', ['rhel90-arm64-small'])]), ], {}, - tags=["pr-merge-gate"], + tags=['pr-merge-gate'], ), # Test 7.0+ with Ubuntu 20.04+ since MongoDB 7.0 no longer ships binaries for Ubuntu 18.04. Variant( - "versioned-api-ubuntu2004", - "Versioned API Tests (Ubuntu 20.04)", - "ubuntu2004-test", + 'versioned-api-ubuntu2004', + 'Versioned API Tests (Ubuntu 20.04)', + 'ubuntu2004-test', [ - "debug-compile-nosasl-openssl", - "debug-compile-nosasl-nossl", - ".versioned-api .5.0", - ".versioned-api .6.0", - ".versioned-api .7.0", - ".versioned-api .8.0", + 'debug-compile-nosasl-openssl', + 'debug-compile-nosasl-nossl', + '.versioned-api .5.0', + '.versioned-api .6.0', + '.versioned-api .7.0', + '.versioned-api .8.0', ], {}, ), diff --git a/.evergreen/legacy_config_generator/generate-evergreen-config.py b/.evergreen/legacy_config_generator/generate-evergreen-config.py index 904581b57e6..ee68060c7eb 100644 --- a/.evergreen/legacy_config_generator/generate-evergreen-config.py +++ b/.evergreen/legacy_config_generator/generate-evergreen-config.py @@ -23,16 +23,16 @@ """ from collections import OrderedDict as OD -from os.path import dirname, join as joinpath, normpath +from os.path import dirname, normpath +from os.path import join as joinpath from evergreen_config_generator import generate - from evergreen_config_lib.functions import all_functions -from evergreen_config_lib.tasks import all_tasks -from evergreen_config_lib.variants import all_variants from evergreen_config_lib.taskgroups import all_task_groups -from evergreen_config_lib.testgcpkms import testgcpkms_generate +from evergreen_config_lib.tasks import all_tasks from evergreen_config_lib.testazurekms import testazurekms_generate +from evergreen_config_lib.testgcpkms import testgcpkms_generate +from evergreen_config_lib.variants import all_variants task_groups = list(all_task_groups) testazurekms_generate(all_tasks, all_variants, task_groups) @@ -40,13 +40,13 @@ config = OD( [ - ("functions", all_functions), - ("tasks", all_tasks), - ("task_groups", task_groups), - ("buildvariants", all_variants), + ('functions', all_functions), + ('tasks', all_tasks), + ('task_groups', task_groups), + ('buildvariants', all_variants), ] ) this_dir = dirname(__file__) -generated_configs_dir = normpath(joinpath(this_dir, "../generated_configs")) -generate(config, joinpath(generated_configs_dir, "legacy-config.yml")) +generated_configs_dir = normpath(joinpath(this_dir, '../generated_configs')) +generate(config, joinpath(generated_configs_dir, 'legacy-config.yml')) diff --git a/.evergreen/ocsp/ecdsa/mock-delegate-revoked.sh b/.evergreen/ocsp/ecdsa/mock-delegate-revoked.sh index 1e40fba5a7c..c2a87a10356 100755 --- a/.evergreen/ocsp/ecdsa/mock-delegate-revoked.sh +++ b/.evergreen/ocsp/ecdsa/mock-delegate-revoked.sh @@ -3,6 +3,6 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ocsp-responder.crt \ --ocsp_responder_key ocsp-responder.key \ - -p 8100 \ - -v \ - --fault revoked + -p 8100 \ + -v \ + --fault revoked diff --git a/.evergreen/ocsp/ecdsa/mock-delegate-valid.sh b/.evergreen/ocsp/ecdsa/mock-delegate-valid.sh index 5074a7ecabd..5b0d33b4f94 100755 --- a/.evergreen/ocsp/ecdsa/mock-delegate-valid.sh +++ b/.evergreen/ocsp/ecdsa/mock-delegate-valid.sh @@ -3,5 +3,5 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ocsp-responder.crt \ --ocsp_responder_key ocsp-responder.key \ - -p 8100 \ - -v + -p 8100 \ + -v diff --git a/.evergreen/ocsp/ecdsa/mock-revoked.sh b/.evergreen/ocsp/ecdsa/mock-revoked.sh index a6bf2ef025e..bff4ef1f61c 100755 --- a/.evergreen/ocsp/ecdsa/mock-revoked.sh +++ b/.evergreen/ocsp/ecdsa/mock-revoked.sh @@ -4,7 +4,6 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ca.crt \ --ocsp_responder_key ca.key \ - -p 8100 \ - -v \ - --fault revoked - + -p 8100 \ + -v \ + --fault revoked diff --git a/.evergreen/ocsp/ecdsa/mock-valid.sh b/.evergreen/ocsp/ecdsa/mock-valid.sh index c89ce9e954d..d015870e5a9 100755 --- a/.evergreen/ocsp/ecdsa/mock-valid.sh +++ b/.evergreen/ocsp/ecdsa/mock-valid.sh @@ -3,5 +3,5 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ca.crt \ --ocsp_responder_key ca.key \ - -p 8100 \ - -v + -p 8100 \ + -v diff --git a/.evergreen/ocsp/ecdsa/rename.sh b/.evergreen/ocsp/ecdsa/rename.sh index 9c7df02758c..977f268a919 100755 --- a/.evergreen/ocsp/ecdsa/rename.sh +++ b/.evergreen/ocsp/ecdsa/rename.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash [ ! -f ecdsa-ca-ocsp.pem ] || mv ecdsa-ca-ocsp.pem ca.pem [ ! -f ecdsa-ca-ocsp.crt ] || mv ecdsa-ca-ocsp.crt ca.crt -[ ! -f ecdsa-ca-ocsp.key ] || mv ecdsa-ca-ocsp.key ca.key +[ ! -f ecdsa-ca-ocsp.key ] || mv ecdsa-ca-ocsp.key ca.key [ ! -f ecdsa-server-ocsp.pem ] || mv ecdsa-server-ocsp.pem server.pem [ ! -f ecdsa-server-ocsp-mustStaple.pem ] || mv ecdsa-server-ocsp-mustStaple.pem server-mustStaple.pem [ ! -f ecdsa-ocsp-responder.crt ] || mv ecdsa-ocsp-responder.crt ocsp-responder.crt diff --git a/.evergreen/ocsp/mock_ocsp_responder.py b/.evergreen/ocsp/mock_ocsp_responder.py index 0c64ff2a56b..9de06cf78c7 100644 --- a/.evergreen/ocsp/mock_ocsp_responder.py +++ b/.evergreen/ocsp/mock_ocsp_responder.py @@ -39,22 +39,22 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import unicode_literals, division, absolute_import, print_function +from __future__ import absolute_import, division, print_function, unicode_literals -import logging import base64 +import enum import inspect +import logging import re -import enum import sys import textwrap -from datetime import datetime, timezone, timedelta -from typing import Callable, Tuple, Optional +from datetime import datetime, timedelta, timezone +from typing import Callable, Optional, Tuple -from asn1crypto import x509, keys, core, ocsp +from asn1crypto import core, keys, ocsp, x509 from asn1crypto.ocsp import OCSPRequest, OCSPResponse +from flask import Flask, Response, request from oscrypto import asymmetric -from flask import Flask, request, Response __version__ = '0.10.2' __version_info__ = (0, 10, 2) @@ -66,6 +66,7 @@ else: byte_cls = bytes + def _pretty_message(string, *params): """ Takes a multi-line string and does the following: @@ -111,6 +112,7 @@ def _type_name(value): return cls.__name__ return '%s.%s' % (cls.__module__, cls.__name__) + def _writer(func): """ Decorator for a custom writer, but a default reader @@ -121,7 +123,6 @@ def _writer(func): class OCSPResponseBuilder(object): - _response_status = None _certificate = None _certificate_status = None @@ -184,12 +185,14 @@ def nonce(self, value): """ if not isinstance(value, byte_cls): - raise TypeError(_pretty_message( - ''' - nonce must be a byte string, not %s - ''', - _type_name(value) - )) + raise TypeError( + _pretty_message( + """ + nonce must be a byte string, not %s + """, + _type_name(value), + ) + ) self._nonce = value @@ -205,14 +208,16 @@ def certificate_issuer(self, value): if value is not None: is_oscrypto = isinstance(value, asymmetric.Certificate) if not is_oscrypto and not isinstance(value, x509.Certificate): - raise TypeError(_pretty_message( - ''' - certificate_issuer must be an instance of - asn1crypto.x509.Certificate or - oscrypto.asymmetric.Certificate, not %s - ''', - _type_name(value) - )) + raise TypeError( + _pretty_message( + """ + certificate_issuer must be an instance of + asn1crypto.x509.Certificate or + oscrypto.asymmetric.Certificate, not %s + """, + _type_name(value), + ) + ) if is_oscrypto: value = value.asn1 @@ -228,12 +233,14 @@ def next_update(self, value): """ if not isinstance(value, datetime): - raise TypeError(_pretty_message( - ''' - next_update must be an instance of datetime.datetime, not %s - ''', - _type_name(value) - )) + raise TypeError( + _pretty_message( + """ + next_update must be an instance of datetime.datetime, not %s + """, + _type_name(value), + ) + ) self._next_update = value @@ -253,49 +260,49 @@ def build(self, responder_private_key=None, responder_certificate=None): An asn1crypto.ocsp.OCSPResponse object of the response """ if self._response_status != 'successful': - return ocsp.OCSPResponse({ - 'response_status': self._response_status - }) + return ocsp.OCSPResponse({'response_status': self._response_status}) is_oscrypto = isinstance(responder_private_key, asymmetric.PrivateKey) if not isinstance(responder_private_key, keys.PrivateKeyInfo) and not is_oscrypto: - raise TypeError(_pretty_message( - ''' - responder_private_key must be an instance ofthe c - asn1crypto.keys.PrivateKeyInfo or - oscrypto.asymmetric.PrivateKey, not %s - ''', - _type_name(responder_private_key) - )) + raise TypeError( + _pretty_message( + """ + responder_private_key must be an instance ofthe c + asn1crypto.keys.PrivateKeyInfo or + oscrypto.asymmetric.PrivateKey, not %s + """, + _type_name(responder_private_key), + ) + ) cert_is_oscrypto = isinstance(responder_certificate, asymmetric.Certificate) if not isinstance(responder_certificate, x509.Certificate) and not cert_is_oscrypto: - raise TypeError(_pretty_message( - ''' - responder_certificate must be an instance of - asn1crypto.x509.Certificate or - oscrypto.asymmetric.Certificate, not %s - ''', - _type_name(responder_certificate) - )) + raise TypeError( + _pretty_message( + """ + responder_certificate must be an instance of + asn1crypto.x509.Certificate or + oscrypto.asymmetric.Certificate, not %s + """, + _type_name(responder_certificate), + ) + ) if cert_is_oscrypto: responder_certificate = responder_certificate.asn1 if self._certificate_status_list is None: - raise ValueError(_pretty_message( - ''' - certificate_status_list must be set if the response_status is - "successful" - ''' - )) + raise ValueError( + _pretty_message( + """ + certificate_status_list must be set if the response_status is + "successful" + """ + ) + ) def _make_extension(name, value): - return { - 'extn_id': name, - 'critical': False, - 'extn_value': value - } + return {'extn_id': name, 'critical': False, 'extn_value': value} responses = [] for serial, status in self._certificate_status_list: @@ -304,9 +311,7 @@ def _make_extension(name, value): for name, value in self._response_data_extensions.items(): response_data_extensions.append(_make_extension(name, value)) if self._nonce: - response_data_extensions.append( - _make_extension('nonce', self._nonce) - ) + response_data_extensions.append(_make_extension('nonce', self._nonce)) if not response_data_extensions: response_data_extensions = None @@ -318,12 +323,7 @@ def _make_extension(name, value): single_response_extensions.append( _make_extension( 'certificate_issuer', - [ - x509.GeneralName( - name='directory_name', - value=self._certificate_issuer.subject - ) - ] + [x509.GeneralName(name='directory_name', value=self._certificate_issuer.subject)], ) ) @@ -333,15 +333,9 @@ def _make_extension(name, value): responder_key_hash = getattr(responder_certificate.public_key, self._key_hash_algo) if status == 'good': - cert_status = ocsp.CertStatus( - name='good', - value=core.Null() - ) + cert_status = ocsp.CertStatus(name='good', value=core.Null()) elif status == 'unknown': - cert_status = ocsp.CertStatus( - name='unknown', - value=core.Null() - ) + cert_status = ocsp.CertStatus(name='unknown', value=core.Null()) else: reason = status if status != 'revoked' else 'unspecified' cert_status = ocsp.CertStatus( @@ -349,7 +343,7 @@ def _make_extension(name, value): value={ 'revocation_time': self._revocation_date, 'revocation_reason': reason, - } + }, ) issuer = self._certificate_issuer if self._certificate_issuer else responder_certificate @@ -363,27 +357,27 @@ def _make_extension(name, value): self._next_update = (self._this_update + timedelta(days=7)).replace(microsecond=0) response = { - 'cert_id': { - 'hash_algorithm': { - 'algorithm': self._key_hash_algo - }, - 'issuer_name_hash': getattr(issuer.subject, self._key_hash_algo), - 'issuer_key_hash': getattr(issuer.public_key, self._key_hash_algo), - 'serial_number': serial, - }, - 'cert_status': cert_status, - 'this_update': self._this_update, - 'next_update': self._next_update, - 'single_extensions': single_response_extensions - } + 'cert_id': { + 'hash_algorithm': {'algorithm': self._key_hash_algo}, + 'issuer_name_hash': getattr(issuer.subject, self._key_hash_algo), + 'issuer_key_hash': getattr(issuer.public_key, self._key_hash_algo), + 'serial_number': serial, + }, + 'cert_status': cert_status, + 'this_update': self._this_update, + 'next_update': self._next_update, + 'single_extensions': single_response_extensions, + } responses.append(response) - response_data = ocsp.ResponseData({ - 'responder_id': ocsp.ResponderId(name='by_key', value=responder_key_hash), - 'produced_at': produced_at, - 'responses': responses, - 'response_extensions': response_data_extensions - }) + response_data = ocsp.ResponseData( + { + 'responder_id': ocsp.ResponderId(name='by_key', value=responder_key_hash), + 'produced_at': produced_at, + 'responses': responses, + 'response_extensions': response_data_extensions, + } + ) signature_algo = responder_private_key.algorithm if signature_algo == 'ec': @@ -403,24 +397,31 @@ def _make_extension(name, value): signature_bytes = sign_func(responder_private_key, response_data.dump(), self._hash_algo) certs = None - if self._certificate_issuer and getattr(self._certificate_issuer.public_key, self._key_hash_algo) != responder_key_hash: + if ( + self._certificate_issuer + and getattr(self._certificate_issuer.public_key, self._key_hash_algo) != responder_key_hash + ): certs = [responder_certificate] - return ocsp.OCSPResponse({ - 'response_status': self._response_status, - 'response_bytes': { - 'response_type': 'basic_ocsp_response', - 'response': { - 'tbs_response_data': response_data, - 'signature_algorithm': {'algorithm': signature_algorithm_id}, - 'signature': signature_bytes, - 'certs': certs, - } + return ocsp.OCSPResponse( + { + 'response_status': self._response_status, + 'response_bytes': { + 'response_type': 'basic_ocsp_response', + 'response': { + 'tbs_response_data': response_data, + 'signature_algorithm': {'algorithm': signature_algorithm_id}, + 'signature': signature_bytes, + 'certs': certs, + }, + }, } - }) + ) + # Enums + class ResponseStatus(enum.Enum): successful = 'successful' malformed_request = 'malformed_request' @@ -445,14 +446,14 @@ class CertificateStatus(enum.Enum): # API endpoints -FAULT_REVOKED = "revoked" -FAULT_UNKNOWN = "unknown" +FAULT_REVOKED = 'revoked' +FAULT_UNKNOWN = 'unknown' app = Flask(__name__) -class OCSPResponder: - def __init__(self, issuer_cert: str, responder_cert: str, responder_key: str, - fault: str, next_update_seconds: int): + +class OCSPResponder: + def __init__(self, issuer_cert: str, responder_cert: str, responder_key: str, fault: str, next_update_seconds: int): """ Create a new OCSPResponder instance. @@ -525,11 +526,13 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: certificate_status_list = [(serial, certificate_status.value)] # Build the response - builder = OCSPResponseBuilder(**{ - 'response_status': ResponseStatus.successful.value, - 'certificate_status_list': certificate_status_list, - 'revocation_date': revocation_date, - }) + builder = OCSPResponseBuilder( + **{ + 'response_status': ResponseStatus.successful.value, + 'certificate_status_list': certificate_status_list, + 'revocation_date': revocation_date, + } + ) # Parse extensions for extension in tbs_request['request_extensions']: @@ -552,8 +555,7 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: # usually happen, according to RFC 6960 4.1.2), we should throw our # hands up in despair and run. if unknown is True and critical is True: - logger.warning('Could not parse unknown critical extension: %r', - dict(extension.native)) + logger.warning('Could not parse unknown critical extension: %r', dict(extension.native)) return self._fail(ResponseStatus.internal_error) # If it's an unknown non-critical extension, we can safely ignore it. @@ -579,18 +581,28 @@ def build_http_response(self, request_der: bytes) -> Response: responder = None + def init_responder(issuer_cert: str, responder_cert: str, responder_key: str, fault: str, next_update_seconds: int): global responder - responder = OCSPResponder(issuer_cert=issuer_cert, responder_cert=responder_cert, responder_key=responder_key, fault=fault, next_update_seconds=next_update_seconds) + responder = OCSPResponder( + issuer_cert=issuer_cert, + responder_cert=responder_cert, + responder_key=responder_key, + fault=fault, + next_update_seconds=next_update_seconds, + ) + def init(port=8080, debug=False): logger.info('Launching %sserver on port %d', 'debug' if debug else '', port) app.run(port=port, debug=debug) + @app.route('/', methods=['GET']) def _handle_root(): return 'ocsp-responder' + @app.route('/status/', defaults={'u_path': ''}, methods=['GET']) @app.route('/status/', methods=['GET']) def _handle_get(u_path): @@ -599,12 +611,13 @@ def _handle_get(u_path): An OCSP GET request contains the DER-in-base64 encoded OCSP request in the HTTP request URL. """ - if "Host" not in request.headers: - raise ValueError ("Required 'Host' header not present") + if 'Host' not in request.headers: + raise ValueError("Required 'Host' header not present") der = base64.b64decode(u_path) ocsp_request = responder.parse_ocsp_request(der) return responder.build_http_response(ocsp_request) + @app.route('/status', methods=['POST']) def _handle_post(): global responder @@ -612,7 +625,7 @@ def _handle_post(): An OCSP POST request contains the DER encoded OCSP request in the HTTP request body. """ - if "Host" not in request.headers: - raise ValueError ("Required 'Host' header not present") + if 'Host' not in request.headers: + raise ValueError("Required 'Host' header not present") ocsp_request = responder.parse_ocsp_request(request.data) return responder.build_http_response(ocsp_request) diff --git a/.evergreen/ocsp/ocsp_mock.py b/.evergreen/ocsp/ocsp_mock.py index d89b9d45b38..8ba302c09de 100755 --- a/.evergreen/ocsp/ocsp_mock.py +++ b/.evergreen/ocsp/ocsp_mock.py @@ -6,37 +6,52 @@ import argparse import logging -import sys import os +import sys -sys.path.append(os.path.join(os.getcwd() ,'src', 'third_party', 'mock_ocsp_responder')) +sys.path.append(os.path.join(os.getcwd(), 'src', 'third_party', 'mock_ocsp_responder')) import mock_ocsp_responder + def main(): """Main entry point""" - parser = argparse.ArgumentParser(description="MongoDB Mock OCSP Responder.") + parser = argparse.ArgumentParser(description='MongoDB Mock OCSP Responder.') - parser.add_argument('-p', '--port', type=int, default=8080, help="Port to listen on") + parser.add_argument('-p', '--port', type=int, default=8080, help='Port to listen on') - parser.add_argument('--ca_file', type=str, required=True, help="CA file for OCSP responder") + parser.add_argument('--ca_file', type=str, required=True, help='CA file for OCSP responder') - parser.add_argument('-v', '--verbose', action='count', help="Enable verbose tracing") + parser.add_argument('-v', '--verbose', action='count', help='Enable verbose tracing') - parser.add_argument('--ocsp_responder_cert', type=str, required=True, help="OCSP Responder Certificate") + parser.add_argument('--ocsp_responder_cert', type=str, required=True, help='OCSP Responder Certificate') - parser.add_argument('--ocsp_responder_key', type=str, required=True, help="OCSP Responder Keyfile") + parser.add_argument('--ocsp_responder_key', type=str, required=True, help='OCSP Responder Keyfile') - parser.add_argument('--fault', choices=[mock_ocsp_responder.FAULT_REVOKED, mock_ocsp_responder.FAULT_UNKNOWN, None], default=None, type=str, help="Specify a specific fault to test") + parser.add_argument( + '--fault', + choices=[mock_ocsp_responder.FAULT_REVOKED, mock_ocsp_responder.FAULT_UNKNOWN, None], + default=None, + type=str, + help='Specify a specific fault to test', + ) - parser.add_argument('--next_update_seconds', type=int, default=32400, help="Specify how long the OCSP response should be valid for") + parser.add_argument( + '--next_update_seconds', type=int, default=32400, help='Specify how long the OCSP response should be valid for' + ) args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) print('Initializing OCSP Responder') - mock_ocsp_responder.init_responder(issuer_cert=args.ca_file, responder_cert=args.ocsp_responder_cert, responder_key=args.ocsp_responder_key, fault=args.fault, next_update_seconds=args.next_update_seconds) + mock_ocsp_responder.init_responder( + issuer_cert=args.ca_file, + responder_cert=args.ocsp_responder_cert, + responder_key=args.ocsp_responder_key, + fault=args.fault, + next_update_seconds=args.next_update_seconds, + ) if args.verbose: mock_ocsp_responder.init(args.port, debug=True) @@ -45,5 +60,6 @@ def main(): print('Mock OCSP Responder is running on port %s' % (str(args.port))) + if __name__ == '__main__': main() diff --git a/.evergreen/ocsp/rsa/mock-delegate-revoked.sh b/.evergreen/ocsp/rsa/mock-delegate-revoked.sh index adf026ce1b3..d7edc9b659b 100755 --- a/.evergreen/ocsp/rsa/mock-delegate-revoked.sh +++ b/.evergreen/ocsp/rsa/mock-delegate-revoked.sh @@ -3,6 +3,6 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ocsp_responder.crt \ --ocsp_responder_key ocsp_responder.key \ - -p 8100 \ - -v \ - --fault revoked + -p 8100 \ + -v \ + --fault revoked diff --git a/.evergreen/ocsp/rsa/mock-delegate-valid.sh b/.evergreen/ocsp/rsa/mock-delegate-valid.sh index 5074a7ecabd..5b0d33b4f94 100755 --- a/.evergreen/ocsp/rsa/mock-delegate-valid.sh +++ b/.evergreen/ocsp/rsa/mock-delegate-valid.sh @@ -3,5 +3,5 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ocsp-responder.crt \ --ocsp_responder_key ocsp-responder.key \ - -p 8100 \ - -v + -p 8100 \ + -v diff --git a/.evergreen/ocsp/rsa/mock-revoked.sh b/.evergreen/ocsp/rsa/mock-revoked.sh index 4a17926b922..209629fbc75 100755 --- a/.evergreen/ocsp/rsa/mock-revoked.sh +++ b/.evergreen/ocsp/rsa/mock-revoked.sh @@ -3,6 +3,6 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ca.crt \ --ocsp_responder_key ca.key \ - -p 8100 \ - -v \ - --fault revoked + -p 8100 \ + -v \ + --fault revoked diff --git a/.evergreen/ocsp/rsa/mock-valid.sh b/.evergreen/ocsp/rsa/mock-valid.sh index c89ce9e954d..d015870e5a9 100755 --- a/.evergreen/ocsp/rsa/mock-valid.sh +++ b/.evergreen/ocsp/rsa/mock-valid.sh @@ -3,5 +3,5 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ca.crt \ --ocsp_responder_key ca.key \ - -p 8100 \ - -v + -p 8100 \ + -v diff --git a/.evergreen/scripts/build-docs.sh b/.evergreen/scripts/build-docs.sh index 850825286fb..40b5126a5c3 100755 --- a/.evergreen/scripts/build-docs.sh +++ b/.evergreen/scripts/build-docs.sh @@ -7,7 +7,7 @@ set -o errexit # Exit the script with error if any of the commands fail CMAKE=$(find_cmake_latest) # Check that a CLion user didn't accidentally convert NEWS from UTF-8 to ASCII -grep "á" NEWS > /dev/null || (echo "NEWS file appears to have lost its UTF-8 encoding?" || exit 1) +grep "á" NEWS >/dev/null || (echo "NEWS file appears to have lost its UTF-8 encoding?" || exit 1) build_dir=$MONGOC_DIR/_build/for-docs "$CMAKE" --fresh -S "$MONGOC_DIR" -B "$build_dir" \ diff --git a/.evergreen/scripts/build_snapshot_rpm.sh b/.evergreen/scripts/build_snapshot_rpm.sh index 05b7f481260..2956e15daf9 100755 --- a/.evergreen/scripts/build_snapshot_rpm.sh +++ b/.evergreen/scripts/build_snapshot_rpm.sh @@ -22,7 +22,6 @@ set -o errexit # limitations under the License. # - for arg in "$@"; do if [ "$arg" = "-h" ]; then echo "Usage: .evergreen/scripts/build_snapshot_rpm.sh" @@ -58,7 +57,7 @@ fi if [ -f "${spec_file}" ]; then echo "Found old spec file (${spec_file})...removing" - rm -f ${spec_file} + rm -f ${spec_file} fi cp "$(pwd)/.evergreen/etc/${package}.spec" .. if [ -f .evergreen/etc/spec.patch ]; then @@ -119,13 +118,17 @@ git archive --format=tar --output="$tar_filepath" --prefix="$tar_filestem/" HEAD mkdir -p "$tar_filestem" cp VERSION_CURRENT "$tar_filestem/." tar -rf "$tar_filepath" "$tar_filestem/" -gzip --keep "$tar_filepath" --stdout > "$tgz_filepath" +gzip --keep "$tar_filepath" --stdout >"$tgz_filepath" echo "Building source RPM ..." rpmbuild -bs ${spec_file} echo "Building binary RPMs ..." mock_result=$(readlink -f ../mock-result) -sudo mock --resultdir="${mock_result}" --use-bootstrap-image --isolation=simple -r ${config} --no-clean --no-cleanup-after --rebuild ~/rpmbuild/SRPMS/${package}-${snapshot_version}*.src.rpm || ( cd "${mock_result}" ; cat *.log ; exit 1 ) +sudo mock --resultdir="${mock_result}" --use-bootstrap-image --isolation=simple -r ${config} --no-clean --no-cleanup-after --rebuild ~/rpmbuild/SRPMS/${package}-${snapshot_version}*.src.rpm || ( + cd "${mock_result}" + cat *.log + exit 1 +) sudo mock -r ${config} --use-bootstrap-image --isolation=simple --copyin "${mock_result}" /tmp sudo mock -r ${config} --use-bootstrap-image --isolation=simple --cwd "/tmp/${build_dir}" --chroot -- /bin/sh -c "( @@ -140,4 +143,7 @@ if [ ! -e "${mock_root}/tmp/${build_dir}/example-client" ]; then fi sudo mock -r ${config} --use-bootstrap-image --isolation=simple --clean -(cd "${mock_result}" ; tar zcvf ../rpm.tar.gz *.rpm) +( + cd "${mock_result}" + tar zcvf ../rpm.tar.gz *.rpm +) diff --git a/.evergreen/scripts/check-preludes.py b/.evergreen/scripts/check-preludes.py index 3b5807f3707..92901e70f32 100644 --- a/.evergreen/scripts/check-preludes.py +++ b/.evergreen/scripts/check-preludes.py @@ -14,56 +14,56 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Check that public libbson/libmongoc headers all include the prelude line. -""" +"""Check that public libbson/libmongoc headers all include the prelude line.""" + import sys from pathlib import Path if len(sys.argv) != 2: - print("Usage: python check-preludes.py ") + print('Usage: python check-preludes.py ') sys.exit(1) -MONGOC_PREFIX = Path("src/libmongoc/src/mongoc") -BSON_PREFIX = Path("src/libbson/src/bson") -COMMON_PREFIX = Path("src/common/src") +MONGOC_PREFIX = Path('src/libmongoc/src/mongoc') +BSON_PREFIX = Path('src/libbson/src/bson') +COMMON_PREFIX = Path('src/common/src') checks = [ { - "name": "libmongoc", - "headers": list(MONGOC_PREFIX.glob("mongoc-*.h")), - "exclusions": [ - MONGOC_PREFIX / "mongoc-prelude.h", - MONGOC_PREFIX / "mongoc.h", + 'name': 'libmongoc', + 'headers': list(MONGOC_PREFIX.glob('mongoc-*.h')), + 'exclusions': [ + MONGOC_PREFIX / 'mongoc-prelude.h', + MONGOC_PREFIX / 'mongoc.h', ], - "include": "#include ", + 'include': '#include ', }, { - "name": "libbson", - "headers": list(BSON_PREFIX.glob("bson-*.h")), - "exclusions": [ - BSON_PREFIX / "bson-prelude.h", - BSON_PREFIX / "bson.h", + 'name': 'libbson', + 'headers': list(BSON_PREFIX.glob('bson-*.h')), + 'exclusions': [ + BSON_PREFIX / 'bson-prelude.h', + BSON_PREFIX / 'bson.h', ], - "include": "#include ", + 'include': '#include ', }, { - "name": "common", - "headers": list(COMMON_PREFIX.glob("*.h")), - "exclusions": [COMMON_PREFIX / "common-prelude.h"], - "include": "#include ", + 'name': 'common', + 'headers': list(COMMON_PREFIX.glob('*.h')), + 'exclusions': [COMMON_PREFIX / 'common-prelude.h'], + 'include': '#include ', }, ] for check in checks: - NAME = check["name"] - print(f"Checking headers for {NAME}") - assert len(check["headers"]) > 0 - for header in check["headers"]: - if header in check["exclusions"] or header.name.endswith("-private.h"): + NAME = check['name'] + print(f'Checking headers for {NAME}') + assert len(check['headers']) > 0 + for header in check['headers']: + if header in check['exclusions'] or header.name.endswith('-private.h'): continue - lines = Path(header).read_text(encoding="utf-8").splitlines() - if check["include"] not in lines: - print(f"{header} did not include prelude") + lines = Path(header).read_text(encoding='utf-8').splitlines() + if check['include'] not in lines: + print(f'{header} did not include prelude') sys.exit(1) -print("All checks passed") +print('All checks passed') diff --git a/.evergreen/scripts/check-symlink.sh b/.evergreen/scripts/check-symlink.sh index 97ca241014c..034a4c3e53e 100755 --- a/.evergreen/scripts/check-symlink.sh +++ b/.evergreen/scripts/check-symlink.sh @@ -1,5 +1,4 @@ -check_symlink() -{ +check_symlink() { SYMLINK="$INSTALL_DIR/lib/$1" EXPECTED_TARGET="$2" diff --git a/.evergreen/scripts/check_rpm_spec.sh b/.evergreen/scripts/check_rpm_spec.sh index a669842c24c..6d67c47b8ff 100755 --- a/.evergreen/scripts/check_rpm_spec.sh +++ b/.evergreen/scripts/check_rpm_spec.sh @@ -24,8 +24,7 @@ set -o errexit # Supported/used environment variables: # IS_PATCH If "true", this is an Evergreen patch build. - -on_exit () { +on_exit() { if [ -n "${SPEC_FILE}" ]; then rm -f "${SPEC_FILE}" fi @@ -33,11 +32,14 @@ on_exit () { trap on_exit EXIT if [ "${IS_PATCH}" = "true" ]; then - echo "This is a patch build...skipping RPM spec check" - exit + echo "This is a patch build...skipping RPM spec check" + exit fi SPEC_FILE=$(mktemp --tmpdir -u mongo-c-driver.XXXXXXXX.spec) curl --retry 5 https://src.fedoraproject.org/rpms/mongo-c-driver/raw/rawhide/f/mongo-c-driver.spec -sS --max-time 120 --fail --output "${SPEC_FILE}" -diff -q .evergreen/etc/mongo-c-driver.spec "${SPEC_FILE}" || (echo "Synchronize RPM spec from downstream to fix this failure. See instructions here: https://docs.google.com/document/d/1ItyBC7VN383zNXu3oUOQJYR7adfYI8ECjLMJ5kqA9X8/edit#heading=h.ahdrr3b5xv3"; exit 1) +diff -q .evergreen/etc/mongo-c-driver.spec "${SPEC_FILE}" || ( + echo "Synchronize RPM spec from downstream to fix this failure. See instructions here: https://docs.google.com/document/d/1ItyBC7VN383zNXu3oUOQJYR7adfYI8ECjLMJ5kqA9X8/edit#heading=h.ahdrr3b5xv3" + exit 1 +) diff --git a/.evergreen/scripts/compile-libmongocrypt.sh b/.evergreen/scripts/compile-libmongocrypt.sh index e719fafbbee..1debf6c95e5 100755 --- a/.evergreen/scripts/compile-libmongocrypt.sh +++ b/.evergreen/scripts/compile-libmongocrypt.sh @@ -1,9 +1,12 @@ #!/usr/bin/env bash compile_libmongocrypt() { - declare -r cmake_binary="${1:?"missing path to CMake binary"}"; shift - declare -r mongoc_dir="${1:?"missing path to mongoc directory"}"; shift - declare -r install_dir="${1:?"missing path to install directory"}"; shift + declare -r cmake_binary="${1:?"missing path to CMake binary"}" + shift + declare -r mongoc_dir="${1:?"missing path to mongoc directory"}" + shift + declare -r install_dir="${1:?"missing path to install directory"}" + shift # When updating libmongocrypt, also update openssl-compat-check.sh and the copy of libmongocrypt's kms-message in # `src/kms-message`. diff --git a/.evergreen/scripts/compile-windows.sh b/.evergreen/scripts/compile-windows.sh index 9a0bdabd3a2..91de0679239 100755 --- a/.evergreen/scripts/compile-windows.sh +++ b/.evergreen/scripts/compile-windows.sh @@ -73,7 +73,7 @@ fi configure_flags_append "-DCMAKE_BUILD_TYPE=${build_config:?}" configure_flags_append "-DENABLE_SSL=${SSL:-}" - # shellcheck source=.evergreen/scripts/find-cmake-version.sh +# shellcheck source=.evergreen/scripts/find-cmake-version.sh . "${script_dir}/find-cmake-latest.sh" declare cmake_binary cmake_binary="$(find_cmake_latest)" diff --git a/.evergreen/scripts/debian_package_build.sh b/.evergreen/scripts/debian_package_build.sh index 9c54520600e..d1bda19cac1 100755 --- a/.evergreen/scripts/debian_package_build.sh +++ b/.evergreen/scripts/debian_package_build.sh @@ -8,7 +8,7 @@ set -o errexit -on_exit () { +on_exit() { if [ -e ./unstable-chroot/debootstrap/debootstrap.log ]; then echo "Dumping debootstrap.log (64-bit)" cat ./unstable-chroot/debootstrap/debootstrap.log @@ -24,7 +24,7 @@ git config user.email "evergreen-build@example.com" git config user.name "Evergreen Build" if [ "${IS_PATCH}" = "true" ]; then - git diff HEAD > ../upstream.patch + git diff HEAD >../upstream.patch git clean -fdx git reset --hard HEAD git remote add upstream https://github.com/mongodb/mongo-c-driver @@ -36,7 +36,7 @@ if [ "${IS_PATCH}" = "true" ]; then if [ -s ../upstream.patch ]; then [ -d debian/patches ] || mkdir debian/patches mv ../upstream.patch debian/patches/ - echo upstream.patch >> debian/patches/series + echo upstream.patch >>debian/patches/series git add debian/patches/* git commit -m 'Evergreen patch build - upstream changes' git log -n1 -p @@ -46,7 +46,7 @@ fi cd .. git clone https://salsa.debian.org/installer-team/debootstrap.git debootstrap.git -export DEBOOTSTRAP_DIR=`pwd`/debootstrap.git +export DEBOOTSTRAP_DIR=$(pwd)/debootstrap.git sudo -E ./debootstrap.git/debootstrap --variant=buildd unstable ./unstable-chroot/ http://cdn-aws.deb.debian.org/debian cp -a mongoc ./unstable-chroot/tmp/ sudo chroot ./unstable-chroot /bin/bash -c '(\ @@ -68,8 +68,14 @@ sudo chroot ./unstable-chroot /bin/bash -c '(\ dpkg -i ../*.deb && \ gcc $(pkgconf --cflags bson2 mongoc2) -o example-client src/libmongoc/examples/example-client.c -lmongoc2 -lbson2 )' -[ -e ./unstable-chroot/tmp/mongoc/example-client ] || (echo "Example was not built!" ; exit 1) -(cd ./unstable-chroot/tmp/ ; tar zcvf ../../deb.tar.gz *.dsc *.orig.tar.gz *.debian.tar.xz *.build *.deb) +[ -e ./unstable-chroot/tmp/mongoc/example-client ] || ( + echo "Example was not built!" + exit 1 +) +( + cd ./unstable-chroot/tmp/ + tar zcvf ../../deb.tar.gz *.dsc *.orig.tar.gz *.debian.tar.xz *.build *.deb +) # Build a second time, to ensure a "double build" works sudo chroot ./unstable-chroot /bin/bash -c "(\ @@ -100,8 +106,14 @@ sudo chroot ./unstable-i386-chroot /bin/bash -c '(\ dpkg -i ../*.deb && \ gcc $(pkgconf --cflags bson2 mongoc2) -o example-client src/libmongoc/examples/example-client.c -lmongoc2 -lbson2 )' -[ -e ./unstable-i386-chroot/tmp/mongoc/example-client ] || (echo "Example was not built!" ; exit 1) -(cd ./unstable-i386-chroot/tmp/ ; tar zcvf ../../deb-i386.tar.gz *.dsc *.orig.tar.gz *.debian.tar.xz *.build *.deb) +[ -e ./unstable-i386-chroot/tmp/mongoc/example-client ] || ( + echo "Example was not built!" + exit 1 +) +( + cd ./unstable-i386-chroot/tmp/ + tar zcvf ../../deb-i386.tar.gz *.dsc *.orig.tar.gz *.debian.tar.xz *.build *.deb +) # Build a second time, to ensure a "double build" works sudo chroot ./unstable-i386-chroot /bin/bash -c "(\ diff --git a/.evergreen/scripts/debug-core-evergreen.sh b/.evergreen/scripts/debug-core-evergreen.sh index de0be7c542a..3b4b1336108 100755 --- a/.evergreen/scripts/debug-core-evergreen.sh +++ b/.evergreen/scripts/debug-core-evergreen.sh @@ -1,21 +1,21 @@ #!/usr/bin/env bash if [[ "${OSTYPE}" == "cygwin" ]]; then - echo "Skipping debug-core-evergreen.sh" - exit + echo "Skipping debug-core-evergreen.sh" + exit fi shopt -s nullglob for i in *.core; do - echo $i - echo "backtrace full" | gdb -q ./cmake-build/src/libmongoc/test-libmongoc $i + echo $i + echo "backtrace full" | gdb -q ./cmake-build/src/libmongoc/test-libmongoc $i done # If there is still a test-libmongoc process running (perhaps due to # deadlock, or very slow test) attach a debugger and print stacks. TEST_LIBMONGOC_PID="$(pgrep test-libmongoc)" if [ -n "$TEST_LIBMONGOC_PID" ]; then - echo "test-libmongoc processes still running with PID=$TEST_LIBMONGOC_PID" - echo "backtrace full" | gdb -q -p $TEST_LIBMONGOC_PID - kill $TEST_LIBMONGOC_PID + echo "test-libmongoc processes still running with PID=$TEST_LIBMONGOC_PID" + echo "backtrace full" | gdb -q -p $TEST_LIBMONGOC_PID + kill $TEST_LIBMONGOC_PID fi diff --git a/.evergreen/scripts/integration-tests.sh b/.evergreen/scripts/integration-tests.sh index 4ea2d5effb3..16b307ef2d9 100755 --- a/.evergreen/scripts/integration-tests.sh +++ b/.evergreen/scripts/integration-tests.sh @@ -16,7 +16,7 @@ # This script may be run locally. # -set -o errexit # Exit the script with error if any of the commands fail +set -o errexit # Exit the script with error if any of the commands fail # shellcheck source=.evergreen/scripts/env-var-utils.sh . "$(dirname "${BASH_SOURCE[0]:?}")/env-var-utils.sh" @@ -37,7 +37,7 @@ script_dir="$(to_absolute "$(dirname "${BASH_SOURCE[0]:?}")")" export DRIVERS_TOOLS DRIVERS_TOOLS="$(cd ../drivers-evergreen-tools && pwd)" # ./mongoc -> ./drivers-evergreen-tools if [[ "${OSTYPE:?}" == cygwin ]]; then - DRIVERS_TOOLS="$(cygpath -m "${DRIVERS_TOOLS:?}")" + DRIVERS_TOOLS="$(cygpath -m "${DRIVERS_TOOLS:?}")" fi export MONGO_ORCHESTRATION_HOME="${DRIVERS_TOOLS:?}/.evergreen/orchestration" @@ -46,12 +46,12 @@ export PATH="${MONGODB_BINARIES:?}:$PATH" # Workaround absence of `tls=true` URI in the `mongodb_auth_uri` field returned by mongo orchestration. if [[ -n "${REQUIRE_API_VERSION:-}" && "${SSL:?}" != nossl ]]; then - prev='$MONGODB_BINARIES/mongosh $URI $MONGO_ORCHESTRATION_HOME/require-api-version.js' + prev='$MONGODB_BINARIES/mongosh $URI $MONGO_ORCHESTRATION_HOME/require-api-version.js' - # Use `--tlsAllowInvalidCertificates` to avoid self-signed certificate errors. - next='$MONGODB_BINARIES/mongosh --tls --tlsAllowInvalidCertificates $URI $MONGO_ORCHESTRATION_HOME/require-api-version.js' + # Use `--tlsAllowInvalidCertificates` to avoid self-signed certificate errors. + next='$MONGODB_BINARIES/mongosh --tls --tlsAllowInvalidCertificates $URI $MONGO_ORCHESTRATION_HOME/require-api-version.js' - sed -i -e "s|${prev:?}|${next:?}|" "${DRIVERS_TOOLS:?}/.evergreen/run-orchestration.sh" + sed -i -e "s|${prev:?}|${next:?}|" "${DRIVERS_TOOLS:?}/.evergreen/run-orchestration.sh" fi "${DRIVERS_TOOLS:?}/.evergreen/run-orchestration.sh" @@ -60,19 +60,19 @@ echo "Waiting for mongo-orchestration to start..." wait_for_mongo_orchestration() { declare port="${1:?"wait_for_mongo_orchestration requires a server port"}" - for _ in $(seq 300); do - # Exit code 7: "Failed to connect to host". - if - curl -s --max-time 1 "localhost:${port:?}" >/dev/null - test $? -ne 7 - then - return 0 - else - sleep 1 - fi - done - echo "Could not detect mongo-orchestration on port ${port:?}" - return 1 + for _ in $(seq 300); do + # Exit code 7: "Failed to connect to host". + if + curl -s --max-time 1 "localhost:${port:?}" >/dev/null + test $? -ne 7 + then + return 0 + else + sleep 1 + fi + done + echo "Could not detect mongo-orchestration on port ${port:?}" + return 1 } wait_for_mongo_orchestration 8889 echo "Waiting for mongo-orchestration to start... done." diff --git a/.evergreen/scripts/kms-divergence-check.sh b/.evergreen/scripts/kms-divergence-check.sh index 46f4adaafc4..e2a940ed230 100755 --- a/.evergreen/scripts/kms-divergence-check.sh +++ b/.evergreen/scripts/kms-divergence-check.sh @@ -16,9 +16,9 @@ LIBMONGOCRYPT_DIR="$MONGOC_DIR/libmongocrypt-for-kms-divergence-check" LIBMONGOCRYPT_GITREF="34a9572c416e0827a1fa988baf88411c4b5f2c7b" cleanup() { - if [ -d "$LIBMONGOCRYPT_DIR" ]; then - rm -rf "$LIBMONGOCRYPT_DIR" - fi + if [ -d "$LIBMONGOCRYPT_DIR" ]; then + rm -rf "$LIBMONGOCRYPT_DIR" + fi } cleanup @@ -27,9 +27,9 @@ trap cleanup EXIT git clone -q https://github.com/mongodb/libmongocrypt "$LIBMONGOCRYPT_DIR" cd "$LIBMONGOCRYPT_DIR" git checkout "$LIBMONGOCRYPT_GITREF" --quiet -if ! diff -uNr "$LIBMONGOCRYPT_DIR/kms-message/" "$MONGOC_DIR/src/kms-message/" ; then - echo "Unexpected differences found in KMS sources!" - exit 1 +if ! diff -uNr "$LIBMONGOCRYPT_DIR/kms-message/" "$MONGOC_DIR/src/kms-message/"; then + echo "Unexpected differences found in KMS sources!" + exit 1 else - echo "No changes detected from KMS message at commit $LIBMONGOCRYPT_GITREF" + echo "No changes detected from KMS message at commit $LIBMONGOCRYPT_GITREF" fi diff --git a/.evergreen/scripts/link-sample-program-bson.sh b/.evergreen/scripts/link-sample-program-bson.sh index c17230c9b84..1d68608092b 100755 --- a/.evergreen/scripts/link-sample-program-bson.sh +++ b/.evergreen/scripts/link-sample-program-bson.sh @@ -1,11 +1,10 @@ #!/usr/bin/env bash -set -o errexit # Exit the script with error if any of the commands fail +set -o errexit # Exit the script with error if any of the commands fail # Supported/used environment variables: # LINK_STATIC Whether to statically link to libbson # BUILD_SAMPLE_WITH_CMAKE Link program w/ CMake. Default: use pkg-config. - echo "LINK_STATIC=$LINK_STATIC BUILD_SAMPLE_WITH_CMAKE=$BUILD_SAMPLE_WITH_CMAKE" DIR=$(dirname $0) @@ -15,8 +14,8 @@ CMAKE=$(find_cmake_latest) # The major version of the project. Appears in certain install filenames. _full_version=$(cat "$DIR/../../VERSION_CURRENT") -version="${_full_version%-*}" # 1.2.3-dev → 1.2.3 -major="${version%%.*}" # 1.2.3 → 1 +version="${_full_version%-*}" # 1.2.3-dev → 1.2.3 +major="${version%%.*}" # 1.2.3 → 1 echo "major version: $major" echo " full version: $version" @@ -30,7 +29,7 @@ else LDD=ldd fi -SRCROOT=`pwd` +SRCROOT=$(pwd) SCRATCH_DIR=$(pwd)/.scratch rm -rf "$SCRATCH_DIR" mkdir -p "$SCRATCH_DIR" diff --git a/.evergreen/scripts/link-sample-program.sh b/.evergreen/scripts/link-sample-program.sh index 52d4c0f1e60..0d61cf10389 100755 --- a/.evergreen/scripts/link-sample-program.sh +++ b/.evergreen/scripts/link-sample-program.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -set -o errexit # Exit the script with error if any of the commands fail +set -o errexit # Exit the script with error if any of the commands fail # Supported/used environment variables: # LINK_STATIC Whether to statically link to libmongoc @@ -8,7 +8,6 @@ set -o errexit # Exit the script with error if any of the commands fail # ENABLE_SNAPPY Set -DENABLE_SNAPPY # CMAKE Path to cmake executable. - echo "LINK_STATIC=$LINK_STATIC BUILD_SAMPLE_WITH_CMAKE=$BUILD_SAMPLE_WITH_CMAKE" DIR=$(dirname $0) @@ -18,8 +17,8 @@ CMAKE=$(find_cmake_latest) # The major version of the project. Appears in certain install filenames. _full_version=$(cat "$DIR/../../VERSION_CURRENT") -version="${_full_version%-*}" # 1.2.3-dev → 1.2.3 -major="${version%%.*}" # 1.2.3 → 1 +version="${_full_version%-*}" # 1.2.3-dev → 1.2.3 +major="${version%%.*}" # 1.2.3 → 1 echo "major version: $major" echo " full version: $version" @@ -33,7 +32,7 @@ else LDD=ldd fi -SRCROOT=`pwd` +SRCROOT=$(pwd) SCRATCH_DIR=$(pwd)/.scratch rm -rf "$SCRATCH_DIR" mkdir -p "$SCRATCH_DIR" @@ -57,15 +56,14 @@ fi if [ "$ENABLE_SSL" ]; then if [ "$OS" = "darwin" ]; then - SSL_CMAKE_OPTION="-DENABLE_SSL:BOOL=DARWIN" + SSL_CMAKE_OPTION="-DENABLE_SSL:BOOL=DARWIN" else - SSL_CMAKE_OPTION="-DENABLE_SSL:BOOL=OPENSSL" + SSL_CMAKE_OPTION="-DENABLE_SSL:BOOL=OPENSSL" fi else SSL_CMAKE_OPTION="-DENABLE_SSL:BOOL=OFF" fi - if [ "$LINK_STATIC" ]; then STATIC_CMAKE_OPTION="-DENABLE_STATIC=ON -DENABLE_TESTS=ON" else diff --git a/.evergreen/scripts/man-pages-to-html.sh b/.evergreen/scripts/man-pages-to-html.sh index e7e5b5fdf80..df6d7ea8ac3 100755 --- a/.evergreen/scripts/man-pages-to-html.sh +++ b/.evergreen/scripts/man-pages-to-html.sh @@ -38,7 +38,7 @@ echo "
    "
     
     for doc in $2/*.3; do
    -  fullpath=`pwd`/$doc
    +  fullpath=$(pwd)/$doc
       name=$(basename $doc)
     
       if [ ! -e "$fullpath" ]; then
    diff --git a/.evergreen/scripts/openssl-compat-setup.sh b/.evergreen/scripts/openssl-compat-setup.sh
    index f009102241e..76cde08ae7e 100755
    --- a/.evergreen/scripts/openssl-compat-setup.sh
    +++ b/.evergreen/scripts/openssl-compat-setup.sh
    @@ -25,14 +25,14 @@ openssl_install_dir="${mongoc_dir:?}/openssl-install-dir"
     openssl_download "${OPENSSL_VERSION:?}"
     
     rm -rf "${openssl_install_dir:?}"
    -mkdir  "${openssl_install_dir:?}" # For openssl.cnf.
    +mkdir "${openssl_install_dir:?}" # For openssl.cnf.
     
     declare -a config_flags=(
       "--prefix=${openssl_install_dir:?}"
       "--openssldir=${openssl_install_dir:?}/ssl"
       "--libdir=lib"
    -  "shared" # Enable shared libraries.
    -  "-fPIC" # For static libraries.
    +  "shared"                              # Enable shared libraries.
    +  "-fPIC"                               # For static libraries.
       "-Wl,-rpath,${openssl_install_dir:?}" # For shared libraries.
     )
     
    @@ -79,9 +79,9 @@ echo "Building and installing OpenSSL ${OPENSSL_VERSION:?}..."
           PATH="${openssl_install_dir:?}/bin:${PATH:-}" \
           LD_LIBRARY_PATH="${openssl_install_dir:?}/lib:${LD_LIBRARY_PATH:-}" \
           openssl fipsinstall \
    -        -out "${openssl_install_dir:?}/ssl/fipsmodule.cnf" \
    -        -module "${openssl_install_dir:?}/lib/ossl-modules/fips.so" \
    -        -quiet
    +      -out "${openssl_install_dir:?}/ssl/fipsmodule.cnf" \
    +      -module "${openssl_install_dir:?}/lib/ossl-modules/fips.so" \
    +      -quiet
     
         # Verification.
         echo "Verifying OpenSSL FIPS 3.0 module is enabled..."
    diff --git a/.evergreen/scripts/run-auth-tests.sh b/.evergreen/scripts/run-auth-tests.sh
    index df473f3251b..ed39e6252bd 100755
    --- a/.evergreen/scripts/run-auth-tests.sh
    +++ b/.evergreen/scripts/run-auth-tests.sh
    @@ -27,36 +27,36 @@ chmod 700 "${secrets_dir:?}"
     
     # Create certificate to test X509 auth with Atlas on cloud-prod:
     atlas_x509_path="${secrets_dir:?}/atlas_x509.pem"
    -echo "${atlas_x509_cert_base64:?}" | base64 --decode > "${secrets_dir:?}/atlas_x509.pem"
    +echo "${atlas_x509_cert_base64:?}" | base64 --decode >"${secrets_dir:?}/atlas_x509.pem"
     # Fix path on Windows:
     if $IS_WINDOWS; then
    -    atlas_x509_path="$(cygpath -m "${atlas_x509_path:?}")"
    +  atlas_x509_path="$(cygpath -m "${atlas_x509_path:?}")"
     fi
     
     # Create certificate to test X509 auth with Atlas on cloud-dev
     atlas_x509_dev_path="${secrets_dir:?}/atlas_x509_dev.pem"
    -echo "${atlas_x509_dev_cert_base64:?}" | base64 --decode > "${atlas_x509_dev_path:?}"
    +echo "${atlas_x509_dev_cert_base64:?}" | base64 --decode >"${atlas_x509_dev_path:?}"
     # Fix path on Windows:
     if $IS_WINDOWS; then
    -    atlas_x509_dev_path="$(cygpath -m "${atlas_x509_dev_path}")"
    +  atlas_x509_dev_path="$(cygpath -m "${atlas_x509_dev_path}")"
     fi
     
     # Create Kerberos config and keytab files.
     echo "Setting up Kerberos ... begin"
     if command -v kinit >/dev/null; then
    -    # Copy host config and append realm:
    -    if [ -e /etc/krb5.conf ]; then
    -      cat /etc/krb5.conf > "${secrets_dir:?}/krb5.conf"
    -    fi
    -    cat "${mongoc_dir:?}/.evergreen/etc/kerberos.realm" >> "${secrets_dir:?}/krb5.conf"
    -    # Set up keytab:
    -    echo "${keytab:?}" | base64 --decode > "${secrets_dir:?}/drivers.keytab"
    -    # Initialize kerberos:
    -    KRB5_CONFIG="${secrets_dir:?}/krb5.conf" kinit -k -t "${secrets_dir:?}/drivers.keytab" -p drivers@LDAPTEST.10GEN.CC
    -    echo "Setting up Kerberos ... done"
    +  # Copy host config and append realm:
    +  if [ -e /etc/krb5.conf ]; then
    +    cat /etc/krb5.conf >"${secrets_dir:?}/krb5.conf"
    +  fi
    +  cat "${mongoc_dir:?}/.evergreen/etc/kerberos.realm" >>"${secrets_dir:?}/krb5.conf"
    +  # Set up keytab:
    +  echo "${keytab:?}" | base64 --decode >"${secrets_dir:?}/drivers.keytab"
    +  # Initialize kerberos:
    +  KRB5_CONFIG="${secrets_dir:?}/krb5.conf" kinit -k -t "${secrets_dir:?}/drivers.keytab" -p drivers@LDAPTEST.10GEN.CC
    +  echo "Setting up Kerberos ... done"
     else
    -    echo "No 'kinit' detected"
    -    echo "Setting up Kerberos ... skipping"
    +  echo "No 'kinit' detected"
    +  echo "Setting up Kerberos ... skipping"
     fi
     
     declare c_timeout="connectTimeoutMS=30000&serverSelectionTryOnce=false"
    @@ -178,8 +178,8 @@ if [[ "${ssl}" != "OFF" ]]; then
         # Windows Server 2008 hosts do not appear to share TLS 1.2 cipher suites with Atlas Serverless.
         WINDOWS_OSNAME="$(systeminfo | grep 'OS Name:' | awk -F ':' '{print $2}')"
         if [[ "${WINDOWS_OSNAME}" == *"Windows Server 2008"* ]]; then
    -        echo "Detected Windows Server 2008 ... skipping Atlas Serverless test due to no shared cipher suites."
    -        HAS_CIPHERSUITES_FOR_SERVERLESS="NO"
    +      echo "Detected Windows Server 2008 ... skipping Atlas Serverless test due to no shared cipher suites."
    +      HAS_CIPHERSUITES_FOR_SERVERLESS="NO"
         fi
       fi
       if [[ "${HAS_CIPHERSUITES_FOR_SERVERLESS}" == "YES" ]]; then
    diff --git a/.evergreen/scripts/run-aws-tests.sh b/.evergreen/scripts/run-aws-tests.sh
    index a77300fe36b..82423f99bd3 100755
    --- a/.evergreen/scripts/run-aws-tests.sh
    +++ b/.evergreen/scripts/run-aws-tests.sh
    @@ -8,7 +8,6 @@
     # Example:
     # TESTCASE=EC2 run-aws-tests.sh
     
    -
     set -o errexit
     set -o pipefail
     
    @@ -51,7 +50,6 @@ expect_failure() {
       "${test_awsauth:?}" "${1:?}" "EXPECT_FAILURE" || exit
     }
     
    -
     if [[ "${TESTCASE:?}" == "REGULAR" ]]; then
       echo "===== Testing regular auth via URI ====="
     
    @@ -148,29 +146,29 @@ if [[ "${TESTCASE:?}" == "ASSUME_ROLE_WITH_WEB_IDENTITY" ]]; then
     
       echo "Valid credentials via Web Identity - should succeed"
       AWS_ROLE_ARN="${AWS_ROLE_ARN:?}" \
    -  AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    +    AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
         expect_success "mongodb://localhost/?authMechanism=MONGODB-AWS"
     
       echo "Valid credentials via Web Identity with session name - should succeed"
       AWS_ROLE_ARN="${AWS_ROLE_ARN:?}" \
    -  AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    -  AWS_ROLE_SESSION_NAME=test \
    +    AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    +    AWS_ROLE_SESSION_NAME=test \
         expect_success "mongodb://localhost/?authMechanism=MONGODB-AWS"
     
       echo "Invalid AWS_ROLE_ARN via Web Identity with session name - should fail"
       AWS_ROLE_ARN="invalid_role_arn" \
    -  AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    +    AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
         expect_failure "mongodb://localhost/?authMechanism=MONGODB-AWS"
     
       echo "Invalid AWS_WEB_IDENTITY_TOKEN_FILE via Web Identity with session name - should fail"
       AWS_ROLE_ARN="${AWS_ROLE_ARN:?}" \
    -  AWS_WEB_IDENTITY_TOKEN_FILE="/invalid/path" \
    +    AWS_WEB_IDENTITY_TOKEN_FILE="/invalid/path" \
         expect_failure "mongodb://localhost/?authMechanism=MONGODB-AWS"
     
       echo "Invalid AWS_ROLE_SESSION_NAME via Web Identity with session name - should fail"
       AWS_ROLE_ARN="${AWS_ROLE_ARN:?}" \
    -  AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    -  AWS_ROLE_SESSION_NAME="contains_invalid_character_^" \
    +    AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    +    AWS_ROLE_SESSION_NAME="contains_invalid_character_^" \
         expect_failure "mongodb://localhost/?authMechanism=MONGODB-AWS"
       exit
     fi
    diff --git a/.evergreen/scripts/run-ocsp-test.sh b/.evergreen/scripts/run-ocsp-test.sh
    index 7396170c7af..2a5cb29600b 100755
    --- a/.evergreen/scripts/run-ocsp-test.sh
    +++ b/.evergreen/scripts/run-ocsp-test.sh
    @@ -90,14 +90,14 @@ command -V "${mongoc_ping:?}"
     # Custom OpenSSL library may be installed. Only prepend to LD_LIBRARY_PATH when
     # necessary to avoid conflicting with system binary requirements.
     if [[ -d "${openssl_install_dir:?}" ]]; then
    -    if [[ -d "${openssl_install_dir:?}/lib64" ]]; then
    -        LD_LIBRARY_PATH="${openssl_install_dir:?}/lib64:${LD_LIBRARY_PATH:-}"
    -        DYLD_LIBRARY_PATH="${openssl_install_dir:?}/lib64:${DYLD_LIBRARY_PATH:-}"
    -    else
    -        LD_LIBRARY_PATH="${openssl_install_dir:?}/lib:${LD_LIBRARY_PATH:-}"
    -        DYLD_LIBRARY_PATH="${openssl_install_dir:?}/lib:${DYLD_LIBRARY_PATH:-}"
    -    fi
    -    export LD_LIBRARY_PATH DYLD_LIBRARY_PATH
    +  if [[ -d "${openssl_install_dir:?}/lib64" ]]; then
    +    LD_LIBRARY_PATH="${openssl_install_dir:?}/lib64:${LD_LIBRARY_PATH:-}"
    +    DYLD_LIBRARY_PATH="${openssl_install_dir:?}/lib64:${DYLD_LIBRARY_PATH:-}"
    +  else
    +    LD_LIBRARY_PATH="${openssl_install_dir:?}/lib:${LD_LIBRARY_PATH:-}"
    +    DYLD_LIBRARY_PATH="${openssl_install_dir:?}/lib:${DYLD_LIBRARY_PATH:-}"
    +  fi
    +  export LD_LIBRARY_PATH DYLD_LIBRARY_PATH
     fi
     
     expect_success() {
    diff --git a/.evergreen/scripts/run-tests.sh b/.evergreen/scripts/run-tests.sh
    index a52e8c89eeb..0d705fe536b 100755
    --- a/.evergreen/scripts/run-tests.sh
    +++ b/.evergreen/scripts/run-tests.sh
    @@ -111,9 +111,9 @@ fi
     # Sanitizer environment variables.
     export ASAN_OPTIONS="detect_leaks=1 abort_on_error=1 symbolize=1"
     export ASAN_SYMBOLIZER_PATH
    -if command -v "/opt/mongodbtoolchain/v4/bin/llvm-symbolizer" > /dev/null; then
    +if command -v "/opt/mongodbtoolchain/v4/bin/llvm-symbolizer" >/dev/null; then
       ASAN_SYMBOLIZER_PATH="/opt/mongodbtoolchain/v4/bin/llvm-symbolizer"
    -elif command -v "/opt/mongodbtoolchain/v3/bin/llvm-symbolizer" > /dev/null; then
    +elif command -v "/opt/mongodbtoolchain/v3/bin/llvm-symbolizer" >/dev/null; then
       ASAN_SYMBOLIZER_PATH="/opt/mongodbtoolchain/v3/bin/llvm-symbolizer"
     fi
     export TSAN_OPTIONS="suppressions=.tsan-suppressions"
    diff --git a/.evergreen/scripts/simple_http_server.py b/.evergreen/scripts/simple_http_server.py
    index b83276afa37..482762d293b 100755
    --- a/.evergreen/scripts/simple_http_server.py
    +++ b/.evergreen/scripts/simple_http_server.py
    @@ -11,20 +11,17 @@ def do_GET(self):
             self.send_response(http.HTTPStatus.OK)
             self.send_header('Content-type', 'text/html')
             self.end_headers()
    -        self.wfile.write(
    -            'Response to GET by simple HTTP server'.encode('utf-8'))
    +        self.wfile.write('Response to GET by simple HTTP server'.encode('utf-8'))
     
         def do_POST(self):
             self.send_response(http.HTTPStatus.OK)
             self.send_header('Content-type', 'text/html')
             self.end_headers()
    -        self.wfile.write(
    -            'Response to POST by simple HTTP server'.encode('utf-8'))
    +        self.wfile.write('Response to POST by simple HTTP server'.encode('utf-8'))
     
     
     def main():
    -    HTTPServer(server_address=('', 18000),
    -               RequestHandlerClass=Simple).serve_forever()
    +    HTTPServer(server_address=('', 18000), RequestHandlerClass=Simple).serve_forever()
     
     
     if __name__ == '__main__':
    diff --git a/build/bottle.py b/build/bottle.py
    index cb7dd208320..84e5729ffca 100644
    --- a/build/bottle.py
    +++ b/build/bottle.py
    @@ -14,6 +14,7 @@
     """
     
     from __future__ import print_function
    +
     import sys
     
     __author__ = 'Marcel Hellkamp'
    @@ -31,18 +32,16 @@
     def _cli_parse(args):  # pragma: no coverage
         from argparse import ArgumentParser
     
    -    parser = ArgumentParser(prog=args[0], usage="%(prog)s [options] package.module:app")
    +    parser = ArgumentParser(prog=args[0], usage='%(prog)s [options] package.module:app')
         opt = parser.add_argument
    -    opt("--version", action="store_true", help="show version number.")
    -    opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.")
    -    opt("-s", "--server", default='wsgiref', help="use SERVER as backend.")
    -    opt("-p", "--plugin", action="append", help="install additional plugin/s.")
    -    opt("-c", "--conf", action="append", metavar="FILE",
    -        help="load config values from FILE.")
    -    opt("-C", "--param", action="append", metavar="NAME=VALUE",
    -        help="override config values.")
    -    opt("--debug", action="store_true", help="start server in debug mode.")
    -    opt("--reload", action="store_true", help="auto-reload on file changes.")
    +    opt('--version', action='store_true', help='show version number.')
    +    opt('-b', '--bind', metavar='ADDRESS', help='bind socket to ADDRESS.')
    +    opt('-s', '--server', default='wsgiref', help='use SERVER as backend.')
    +    opt('-p', '--plugin', action='append', help='install additional plugin/s.')
    +    opt('-c', '--conf', action='append', metavar='FILE', help='load config values from FILE.')
    +    opt('-C', '--param', action='append', metavar='NAME=VALUE', help='override config values.')
    +    opt('--debug', action='store_true', help='start server in debug mode.')
    +    opt('--reload', action='store_true', help='auto-reload on file changes.')
         opt('app', help='WSGI app entry point.', nargs='?')
     
         cli_args = parser.parse_args(args[1:])
    @@ -56,9 +55,11 @@ def _cli_patch(cli_args):  # pragma: no coverage
         if opts.server:
             if opts.server.startswith('gevent'):
                 import gevent.monkey
    +
                 gevent.monkey.patch_all()
             elif opts.server.startswith('eventlet'):
                 import eventlet
    +
                 eventlet.monkey_patch()
     
     
    @@ -69,41 +70,61 @@ def _cli_patch(cli_args):  # pragma: no coverage
     # Imports and Python 2/3 unification ##########################################
     ###############################################################################
     
    -import base64, calendar, email.utils, functools, hmac, itertools,\
    -       mimetypes, os, re, tempfile, threading, time, warnings, weakref, hashlib
    -
    -from types import FunctionType
    -from datetime import date as datedate, datetime, timedelta
    +import base64
    +import calendar
    +import email.utils
    +import functools
    +import hashlib
    +import hmac
    +import itertools
    +import mimetypes
    +import os
    +import re
    +import tempfile
    +import threading
    +import time
    +import warnings
    +import weakref
    +from datetime import date as datedate
    +from datetime import datetime, timedelta
     from tempfile import NamedTemporaryFile
     from traceback import format_exc, print_exc
    +from types import FunctionType
     from unicodedata import normalize
     
     try:
    -    from ujson import dumps as json_dumps, loads as json_lds
    +    from ujson import dumps as json_dumps
    +    from ujson import loads as json_lds
     except ImportError:
    -    from json import dumps as json_dumps, loads as json_lds
    +    from json import dumps as json_dumps
    +    from json import loads as json_lds
     
     py = sys.version_info
     py3k = py.major > 2
     
     # Lots of stdlib and builtin differences.
     if py3k:
    -    import http.client as httplib
         import _thread as thread
    -    from urllib.parse import urljoin, SplitResult as UrlSplitResult
    -    from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
    +    import http.client as httplib
    +    from urllib.parse import SplitResult as UrlSplitResult
    +    from urllib.parse import quote as urlquote
    +    from urllib.parse import unquote as urlunquote
    +    from urllib.parse import urlencode, urljoin
    +
         urlunquote = functools.partial(urlunquote, encoding='latin1')
    -    from http.cookies import SimpleCookie, Morsel, CookieError
    -    from collections.abc import MutableMapping as DictMixin
    -    from types import ModuleType as new_module
    -    import pickle
    -    from io import BytesIO
         import configparser
    +    import pickle
    +    from collections.abc import MutableMapping as DictMixin
         from datetime import timezone
    +    from http.cookies import CookieError, Morsel, SimpleCookie
    +    from io import BytesIO
    +    from types import ModuleType as new_module
    +
         UTC = timezone.utc
         # getfullargspec was deprecated in 3.5 and un-deprecated in 3.6
         # getargspec was deprecated in 3.0 and removed in 3.11
         from inspect import getfullargspec
    +
         def getargspec(func):
             spec = getfullargspec(func)
             kwargs = makelist(spec[0]) + makelist(spec.kwonlyargs)
    @@ -118,25 +139,35 @@ def getargspec(func):
         def _raise(*a):
             raise a[0](a[1]).with_traceback(a[2])
     else:  # 2.x
    -    warnings.warn("Python 2 support will be dropped in Bottle 0.14", DeprecationWarning)
    -    import httplib
    -    import thread
    -    from urlparse import urljoin, SplitResult as UrlSplitResult
    -    from urllib import urlencode, quote as urlquote, unquote as urlunquote
    -    from Cookie import SimpleCookie, Morsel, CookieError
    +    warnings.warn('Python 2 support will be dropped in Bottle 0.14', DeprecationWarning)
    +    from collections import MutableMapping as DictMixin
    +    from datetime import tzinfo
    +    from imp import new_module
    +    from inspect import getargspec
         from itertools import imap
    +    from urllib import quote as urlquote
    +    from urllib import unquote as urlunquote
    +    from urllib import urlencode
    +
    +    import ConfigParser as configparser
         import cPickle as pickle
    -    from imp import new_module
    +    import httplib
    +    import thread
    +    from Cookie import CookieError, Morsel, SimpleCookie
         from StringIO import StringIO as BytesIO
    -    import ConfigParser as configparser
    -    from collections import MutableMapping as DictMixin
    -    from inspect import getargspec
    -    from datetime import tzinfo
    +    from urlparse import SplitResult as UrlSplitResult
    +    from urlparse import urljoin
     
         class _UTC(tzinfo):
    -        def utcoffset(self, dt): return timedelta(0)
    -        def tzname(self, dt): return "UTC"
    -        def dst(self, dt): return timedelta(0)
    +        def utcoffset(self, dt):
    +            return timedelta(0)
    +
    +        def tzname(self, dt):
    +            return 'UTC'
    +
    +        def dst(self, dt):
    +            return timedelta(0)
    +
         UTC = _UTC()
     
         unicode = unicode
    @@ -144,6 +175,7 @@ def dst(self, dt): return timedelta(0)
     
         exec(compile('def _raise(*a): raise a[0], a[1], a[2]', '', 'exec'))
     
    +
     # Some helpers for string/byte handling
     def tob(s, enc='utf8'):
         if isinstance(s, unicode):
    @@ -154,7 +186,7 @@ def tob(s, enc='utf8'):
     def touni(s, enc='utf8', err='strict'):
         if isinstance(s, bytes):
             return s.decode(enc, err)
    -    return unicode("" if s is None else s)
    +    return unicode('' if s is None else s)
     
     
     tonat = touni if py3k else tob
    @@ -164,7 +196,7 @@ def _stderr(*args):
         try:
             print(*args, file=sys.stderr)
         except (IOError, AttributeError):
    -        pass # Some environments do not allow printing (mod_wsgi)
    +        pass  # Some environments do not allow printing (mod_wsgi)
     
     
     # A bug in functools causes it to break if the wrapper is an instance method
    @@ -174,14 +206,18 @@ def update_wrapper(wrapper, wrapped, *a, **ka):
         except AttributeError:
             pass
     
    +
     # These helpers are used at module level and need to be defined first.
     # And yes, I know PEP-8, but sometimes a lower-case classname makes more sense.
     
     
     def depr(major, minor, cause, fix, stacklevel=3):
    -    text = "Warning: Use of deprecated feature or API. (Deprecated in Bottle-%d.%d)\n"\
    -           "Cause: %s\n"\
    -           "Fix: %s\n" % (major, minor, cause, fix)
    +    text = 'Warning: Use of deprecated feature or API. (Deprecated in Bottle-%d.%d)\nCause: %s\nFix: %s\n' % (
    +        major,
    +        minor,
    +        cause,
    +        fix,
    +    )
         if DEBUG == 'strict':
             raise DeprecationWarning(text)
         warnings.warn(text, DeprecationWarning, stacklevel=stacklevel)
    @@ -198,7 +234,7 @@ def makelist(data):  # This is just too handy
     
     
     class DictProperty(object):
    -    """ Property that maps to a key in a local dict-like attribute. """
    +    """Property that maps to a key in a local dict-like attribute."""
     
         def __init__(self, attr, key=None, read_only=False):
             self.attr, self.key, self.read_only = attr, key, read_only
    @@ -209,37 +245,42 @@ def __call__(self, func):
             return self
     
         def __get__(self, obj, cls):
    -        if obj is None: return self
    +        if obj is None:
    +            return self
             key, storage = self.key, getattr(obj, self.attr)
    -        if key not in storage: storage[key] = self.getter(obj)
    +        if key not in storage:
    +            storage[key] = self.getter(obj)
             return storage[key]
     
         def __set__(self, obj, value):
    -        if self.read_only: raise AttributeError("Read-Only property.")
    +        if self.read_only:
    +            raise AttributeError('Read-Only property.')
             getattr(obj, self.attr)[self.key] = value
     
         def __delete__(self, obj):
    -        if self.read_only: raise AttributeError("Read-Only property.")
    +        if self.read_only:
    +            raise AttributeError('Read-Only property.')
             del getattr(obj, self.attr)[self.key]
     
     
     class cached_property(object):
    -    """ A property that is only computed once per instance and then replaces
    -        itself with an ordinary attribute. Deleting the attribute resets the
    -        property. """
    +    """A property that is only computed once per instance and then replaces
    +    itself with an ordinary attribute. Deleting the attribute resets the
    +    property."""
     
         def __init__(self, func):
             update_wrapper(self, func)
             self.func = func
     
         def __get__(self, obj, cls):
    -        if obj is None: return self
    +        if obj is None:
    +            return self
             value = obj.__dict__[self.func.__name__] = self.func(obj)
             return value
     
     
     class lazy_attribute(object):
    -    """ A property that caches itself to the class object. """
    +    """A property that caches itself to the class object."""
     
         def __init__(self, func):
             functools.update_wrapper(self, func, updated=[])
    @@ -257,55 +298,57 @@ def __get__(self, obj, cls):
     
     
     class BottleException(Exception):
    -    """ A base class for exceptions used by bottle. """
    +    """A base class for exceptions used by bottle."""
    +
         pass
     
    +
     ###############################################################################
     # Routing ######################################################################
     ###############################################################################
     
     
     class RouteError(BottleException):
    -    """ This is a base class for all routing related exceptions """
    +    """This is a base class for all routing related exceptions"""
     
     
     class RouteReset(BottleException):
    -    """ If raised by a plugin or request handler, the route is reset and all
    -        plugins are re-applied. """
    +    """If raised by a plugin or request handler, the route is reset and all
    +    plugins are re-applied."""
     
     
     class RouterUnknownModeError(RouteError):
    -
         pass
     
     
     class RouteSyntaxError(RouteError):
    -    """ The route parser found something not supported by this router. """
    +    """The route parser found something not supported by this router."""
     
     
     class RouteBuildError(RouteError):
    -    """ The route could not be built. """
    +    """The route could not be built."""
     
     
     def _re_flatten(p):
    -    """ Turn all capturing groups in a regular expression pattern into
    -        non-capturing groups. """
    +    """Turn all capturing groups in a regular expression pattern into
    +    non-capturing groups."""
         if '(' not in p:
             return p
    -    return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))', lambda m: m.group(0) if
    -                  len(m.group(1)) % 2 else m.group(1) + '(?:', p)
    +    return re.sub(
    +        r'(\\*)(\(\?P<[^>]+>|\((?!\?))', lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p
    +    )
     
     
     class Router(object):
    -    """ A Router is an ordered collection of route->target pairs. It is used to
    -        efficiently match WSGI requests against a number of routes and return
    -        the first target that satisfies the request. The target may be anything,
    -        usually a string, ID or callable object. A route consists of a path-rule
    -        and a HTTP method.
    -
    -        The path-rule is either a static path (e.g. `/contact`) or a dynamic
    -        path that contains wildcards (e.g. `/wiki/`). The wildcard syntax
    -        and details on the matching order are described in docs:`routing`.
    +    """A Router is an ordered collection of route->target pairs. It is used to
    +    efficiently match WSGI requests against a number of routes and return
    +    the first target that satisfies the request. The target may be anything,
    +    usually a string, ID or callable object. A route consists of a path-rule
    +    and a HTTP method.
    +
    +    The path-rule is either a static path (e.g. `/contact`) or a dynamic
    +    path that contains wildcards (e.g. `/wiki/`). The wildcard syntax
    +    and details on the matching order are described in docs:`routing`.
         """
     
         default_pattern = '[^/]+'
    @@ -325,35 +368,34 @@ def __init__(self, strict=False):
             #: If true, static routes are no longer checked first.
             self.strict_order = strict
             self.filters = {
    -            're': lambda conf: (_re_flatten(conf or self.default_pattern),
    -                                None, None),
    +            're': lambda conf: (_re_flatten(conf or self.default_pattern), None, None),
                 'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))),
                 'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))),
    -            'path': lambda conf: (r'.+?', None, None)
    +            'path': lambda conf: (r'.+?', None, None),
             }
     
         def add_filter(self, name, func):
    -        """ Add a filter. The provided function is called with the configuration
    +        """Add a filter. The provided function is called with the configuration
             string as parameter and must return a (regexp, to_python, to_url) tuple.
    -        The first element is a string, the last two are callables or None. """
    +        The first element is a string, the last two are callables or None."""
             self.filters[name] = func
     
    -    rule_syntax = re.compile('(\\\\*)'
    +    rule_syntax = re.compile(
    +        '(\\\\*)'
             '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'
    -          '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'
    -            '(?::((?:\\\\.|[^\\\\>])+)?)?)?>))')
    +        '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'
    +        '(?::((?:\\\\.|[^\\\\>])+)?)?)?>))'
    +    )
     
         def _itertokens(self, rule):
             offset, prefix = 0, ''
             for match in self.rule_syntax.finditer(rule):
    -            prefix += rule[offset:match.start()]
    +            prefix += rule[offset : match.start()]
                 g = match.groups()
                 if g[2] is not None:
    -                depr(0, 13, "Use of old route syntax.",
    -                            "Use  instead of :name in routes.",
    -                            stacklevel=4)
    +                depr(0, 13, 'Use of old route syntax.', 'Use  instead of :name in routes.', stacklevel=4)
                 if len(g[0]) % 2:  # Escaped wildcard
    -                prefix += match.group(0)[len(g[0]):]
    +                prefix += match.group(0)[len(g[0]) :]
                     offset = match.end()
                     continue
                 if prefix:
    @@ -365,7 +407,7 @@ def _itertokens(self, rule):
                 yield prefix + rule[offset:], None, None
     
         def add(self, rule, method, target, name=None):
    -        """ Add a new rule or replace the target for an existing rule. """
    +        """Add a new rule or replace the target for an existing rule."""
             anons = 0  # Number of anonymous wildcards found
             keys = []  # Names of keys
             pattern = ''  # Regular expression pattern with named groups
    @@ -376,7 +418,8 @@ def add(self, rule, method, target, name=None):
             for key, mode, conf in self._itertokens(rule):
                 if mode:
                     is_static = False
    -                if mode == 'default': mode = self.default_filter
    +                if mode == 'default':
    +                    mode = self.default_filter
                     mask, in_filter, out_filter = self.filters[mode](conf)
                     if not key:
                         pattern += '(?:%s)' % mask
    @@ -385,14 +428,16 @@ def add(self, rule, method, target, name=None):
                     else:
                         pattern += '(?P<%s>%s)' % (key, mask)
                         keys.append(key)
    -                if in_filter: filters.append((key, in_filter))
    +                if in_filter:
    +                    filters.append((key, in_filter))
                     builder.append((key, out_filter or str))
                 elif key:
                     pattern += re.escape(key)
                     builder.append((None, key))
     
             self.builder[rule] = builder
    -        if name: self.builder[name] = builder
    +        if name:
    +            self.builder[name] = builder
     
             if is_static and not self.strict_order:
                 self.static.setdefault(method, {})
    @@ -403,7 +448,7 @@ def add(self, rule, method, target, name=None):
                 re_pattern = re.compile('^(%s)$' % pattern)
                 re_match = re_pattern.match
             except re.error as e:
    -            raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, e))
    +            raise RouteSyntaxError('Could not add Route: %s (%s)' % (rule, e))
     
             if filters:
     
    @@ -429,8 +474,7 @@ def getargs(path):
                 if DEBUG:
                     msg = 'Route <%s %s> overwrites a previously defined route'
                     warnings.warn(msg % (method, rule), RuntimeWarning, stacklevel=3)
    -            self.dyna_routes[method][
    -                self._groups[flatpat, method]] = whole_rule
    +            self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule
             else:
                 self.dyna_routes.setdefault(method, []).append(whole_rule)
                 self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1
    @@ -442,7 +486,7 @@ def _compile(self, method):
             comborules = self.dyna_regexes[method] = []
             maxgroups = self._MAX_GROUPS_PER_PATTERN
             for x in range(0, len(all_rules), maxgroups):
    -            some = all_rules[x:x + maxgroups]
    +            some = all_rules[x : x + maxgroups]
                 combined = (flatpat for (_, flatpat, _, _) in some)
                 combined = '|'.join('(^%s$)' % flatpat for flatpat in combined)
                 combined = re.compile(combined).match
    @@ -450,10 +494,10 @@ def _compile(self, method):
                 comborules.append((combined, rules))
     
         def build(self, _name, *anons, **query):
    -        """ Build an URL by filling the wildcards in a rule. """
    +        """Build an URL by filling the wildcards in a rule."""
             builder = self.builder.get(_name)
             if not builder:
    -            raise RouteBuildError("No route with that name.", _name)
    +            raise RouteBuildError('No route with that name.', _name)
             try:
                 for i, value in enumerate(anons):
                     query['anon%d' % i] = value
    @@ -463,7 +507,7 @@ def build(self, _name, *anons, **query):
                 raise RouteBuildError('Missing URL argument: %r' % E.args[0])
     
         def match(self, environ):
    -        """ Return a (target, url_args) tuple or raise HTTPError(400/404/405). """
    +        """Return a (target, url_args) tuple or raise HTTPError(400/404/405)."""
             verb = environ['REQUEST_METHOD'].upper()
             path = environ['PATH_INFO'] or '/'
     
    @@ -492,23 +536,20 @@ def match(self, environ):
                     if match:
                         allowed.add(method)
             if allowed:
    -            allow_header = ",".join(sorted(allowed))
    -            raise HTTPError(405, "Method not allowed.", Allow=allow_header)
    +            allow_header = ','.join(sorted(allowed))
    +            raise HTTPError(405, 'Method not allowed.', Allow=allow_header)
     
             # No matching route and no alternative method found. We give up
    -        raise HTTPError(404, "Not found: " + repr(path))
    +        raise HTTPError(404, 'Not found: ' + repr(path))
     
     
     class Route(object):
    -    """ This class wraps a route callback along with route specific metadata and
    -        configuration and applies Plugins on demand. It is also responsible for
    -        turning an URL path rule into a regular expression usable by the Router.
    +    """This class wraps a route callback along with route specific metadata and
    +    configuration and applies Plugins on demand. It is also responsible for
    +    turning an URL path rule into a regular expression usable by the Router.
         """
     
    -    def __init__(self, app, rule, method, callback,
    -                 name=None,
    -                 plugins=None,
    -                 skiplist=None, **config):
    +    def __init__(self, app, rule, method, callback, name=None, plugins=None, skiplist=None, **config):
             #: The application this route is installed to.
             self.app = app
             #: The path-rule string (e.g. ``/wiki/``).
    @@ -531,28 +572,32 @@ def __init__(self, app, rule, method, callback,
     
         @cached_property
         def call(self):
    -        """ The route callback with all plugins applied. This property is
    -            created on demand and then cached to speed up subsequent requests."""
    +        """The route callback with all plugins applied. This property is
    +        created on demand and then cached to speed up subsequent requests."""
             return self._make_callback()
     
         def reset(self):
    -        """ Forget any cached values. The next time :attr:`call` is accessed,
    -            all plugins are re-applied. """
    +        """Forget any cached values. The next time :attr:`call` is accessed,
    +        all plugins are re-applied."""
             self.__dict__.pop('call', None)
     
         def prepare(self):
    -        """ Do all on-demand work immediately (useful for debugging)."""
    +        """Do all on-demand work immediately (useful for debugging)."""
             self.call
     
         def all_plugins(self):
    -        """ Yield all Plugins affecting this route. """
    +        """Yield all Plugins affecting this route."""
             unique = set()
             for p in reversed(self.app.plugins + self.plugins):
    -            if True in self.skiplist: break
    +            if True in self.skiplist:
    +                break
                 name = getattr(p, 'name', False)
    -            if name and (name in self.skiplist or name in unique): continue
    -            if p in self.skiplist or type(p) in self.skiplist: continue
    -            if name: unique.add(name)
    +            if name and (name in self.skiplist or name in unique):
    +                continue
    +            if p in self.skiplist or type(p) in self.skiplist:
    +                continue
    +            if name:
    +                unique.add(name)
                 yield p
     
         def _make_callback(self):
    @@ -570,8 +615,8 @@ def _make_callback(self):
             return callback
     
         def get_undecorated_callback(self):
    -        """ Return the callback. If the callback is a decorated function, try to
    -            recover the original function. """
    +        """Return the callback. If the callback is a decorated function, try to
    +        recover the original function."""
             func = self.callback
             func = getattr(func, '__func__' if py3k else 'im_func', func)
             closure_attr = '__closure__' if py3k else 'func_closure'
    @@ -582,41 +627,45 @@ def get_undecorated_callback(self):
                 # in case of decorators with multiple arguments
                 if not isinstance(func, FunctionType):
                     # pick first FunctionType instance from multiple arguments
    -                func = filter(lambda x: isinstance(x, FunctionType),
    -                              map(lambda x: x.cell_contents, attributes))
    +                func = filter(lambda x: isinstance(x, FunctionType), map(lambda x: x.cell_contents, attributes))
                     func = list(func)[0]  # py3 support
             return func
     
         def get_callback_args(self):
    -        """ Return a list of argument names the callback (most likely) accepts
    -            as keyword arguments. If the callback is a decorated function, try
    -            to recover the original function before inspection. """
    +        """Return a list of argument names the callback (most likely) accepts
    +        as keyword arguments. If the callback is a decorated function, try
    +        to recover the original function before inspection."""
             return getargspec(self.get_undecorated_callback())[0]
     
         def get_config(self, key, default=None):
    -        """ Lookup a config field and return its value, first checking the
    -            route.config, then route.app.config."""
    -        depr(0, 13, "Route.get_config() is deprecated.",
    -                    "The Route.config property already includes values from the"
    -                    " application config for missing keys. Access it directly.")
    +        """Lookup a config field and return its value, first checking the
    +        route.config, then route.app.config."""
    +        depr(
    +            0,
    +            13,
    +            'Route.get_config() is deprecated.',
    +            'The Route.config property already includes values from the'
    +            ' application config for missing keys. Access it directly.',
    +        )
             return self.config.get(key, default)
     
         def __repr__(self):
             cb = self.get_undecorated_callback()
             return '<%s %s -> %s:%s>' % (self.method, self.rule, cb.__module__, cb.__name__)
     
    +
     ###############################################################################
     # Application Object ###########################################################
     ###############################################################################
     
     
     class Bottle(object):
    -    """ Each Bottle object represents a single, distinct web application and
    -        consists of routes, callbacks, plugins, resources and configuration.
    -        Instances are callable WSGI applications.
    +    """Each Bottle object represents a single, distinct web application and
    +    consists of routes, callbacks, plugins, resources and configuration.
    +    Instances are callable WSGI applications.
     
    -        :param catchall: If true (default), handle all exceptions. Turn off to
    -                         let debugging middleware handle exceptions.
    +    :param catchall: If true (default), handle all exceptions. Turn off to
    +                     let debugging middleware handle exceptions.
         """
     
         @lazy_attribute
    @@ -628,22 +677,25 @@ def _global_config(cls):
         def __init__(self, **kwargs):
             #: A :class:`ConfigDict` for app specific configuration.
             self.config = self._global_config._make_overlay()
    -        self.config._add_change_listener(
    -            functools.partial(self.trigger_hook, 'config'))
    +        self.config._add_change_listener(functools.partial(self.trigger_hook, 'config'))
     
    -        self.config.update({
    -            "catchall": True
    -        })
    +        self.config.update({'catchall': True})
     
             if kwargs.get('catchall') is False:
    -            depr(0, 13, "Bottle(catchall) keyword argument.",
    -                        "The 'catchall' setting is now part of the app "
    -                        "configuration. Fix: `app.config['catchall'] = False`")
    +            depr(
    +                0,
    +                13,
    +                'Bottle(catchall) keyword argument.',
    +                "The 'catchall' setting is now part of the app configuration. Fix: `app.config['catchall'] = False`",
    +            )
                 self.config['catchall'] = False
             if kwargs.get('autojson') is False:
    -            depr(0, 13, "Bottle(autojson) keyword argument.",
    -                 "The 'autojson' setting is now part of the app "
    -                 "configuration. Fix: `app.config['json.enable'] = False`")
    +            depr(
    +                0,
    +                13,
    +                'Bottle(autojson) keyword argument.',
    +                "The 'autojson' setting is now part of the app configuration. Fix: `app.config['json.enable'] = False`",
    +            )
                 self.config['json.disable'] = True
     
             self._mounts = []
    @@ -671,15 +723,15 @@ def _hooks(self):
             return dict((name, []) for name in self.__hook_names)
     
         def add_hook(self, name, func):
    -        """ Attach a callback to a hook. Three hooks are currently implemented:
    -
    -            before_request
    -                Executed once before each request. The request context is
    -                available, but no routing has happened yet.
    -            after_request
    -                Executed once after each request regardless of its outcome.
    -            app_reset
    -                Called whenever :meth:`Bottle.reset` is called.
    +        """Attach a callback to a hook. Three hooks are currently implemented:
    +
    +        before_request
    +            Executed once before each request. The request context is
    +            available, but no routing has happened yet.
    +        after_request
    +            Executed once after each request regardless of its outcome.
    +        app_reset
    +            Called whenever :meth:`Bottle.reset` is called.
             """
             if name in self.__hook_reversed:
                 self._hooks[name].insert(0, func)
    @@ -687,18 +739,18 @@ def add_hook(self, name, func):
                 self._hooks[name].append(func)
     
         def remove_hook(self, name, func):
    -        """ Remove a callback from a hook. """
    +        """Remove a callback from a hook."""
             if name in self._hooks and func in self._hooks[name]:
                 self._hooks[name].remove(func)
                 return True
     
         def trigger_hook(self, __name, *args, **kwargs):
    -        """ Trigger a hook and return a list of results. """
    +        """Trigger a hook and return a list of results."""
             return [hook(*args, **kwargs) for hook in self._hooks[__name][:]]
     
         def hook(self, name):
    -        """ Return a decorator that attaches a callback to a hook. See
    -            :meth:`add_hook` for details."""
    +        """Return a decorator that attaches a callback to a hook. See
    +        :meth:`add_hook` for details."""
     
             def decorator(func):
                 self.add_hook(name, func)
    @@ -725,8 +777,7 @@ def start_response(status, headerlist, exc_info=None):
                             # follow PEP-3333 (which requires latin1) or used a
                             # pre-encoding other than utf8 :/
                             status = status.encode('latin1').decode('utf8')
    -                        headerlist = [(k, v.encode('latin1').decode('utf8'))
    -                                      for (k, v) in headerlist]
    +                        headerlist = [(k, v.encode('latin1').decode('utf8')) for (k, v) in headerlist]
                         rs.status = status
                         for name, value in headerlist:
                             rs.add_header(name, value)
    @@ -749,18 +800,30 @@ def start_response(status, headerlist, exc_info=None):
     
         def _mount_app(self, prefix, app, **options):
             if app in self._mounts or '_mount.app' in app.config:
    -            depr(0, 13, "Application mounted multiple times. Falling back to WSGI mount.",
    -                 "Clone application before mounting to a different location.")
    +            depr(
    +                0,
    +                13,
    +                'Application mounted multiple times. Falling back to WSGI mount.',
    +                'Clone application before mounting to a different location.',
    +            )
                 return self._mount_wsgi(prefix, app, **options)
     
             if options:
    -            depr(0, 13, "Unsupported mount options. Falling back to WSGI mount.",
    -                 "Do not specify any route options when mounting bottle application.")
    +            depr(
    +                0,
    +                13,
    +                'Unsupported mount options. Falling back to WSGI mount.',
    +                'Do not specify any route options when mounting bottle application.',
    +            )
                 return self._mount_wsgi(prefix, app, **options)
     
    -        if not prefix.endswith("/"):
    -            depr(0, 13, "Prefix must end in '/'. Falling back to WSGI mount.",
    -                 "Consider adding an explicit redirect from '/prefix' to '/prefix/' in the parent application.")
    +        if not prefix.endswith('/'):
    +            depr(
    +                0,
    +                13,
    +                "Prefix must end in '/'. Falling back to WSGI mount.",
    +                "Consider adding an explicit redirect from '/prefix' to '/prefix/' in the parent application.",
    +            )
                 return self._mount_wsgi(prefix, app, **options)
     
             self._mounts.append(app)
    @@ -771,25 +834,25 @@ def _mount_app(self, prefix, app, **options):
                 self.add_route(route)
     
         def mount(self, prefix, app, **options):
    -        """ Mount an application (:class:`Bottle` or plain WSGI) to a specific
    -            URL prefix. Example::
    +        """Mount an application (:class:`Bottle` or plain WSGI) to a specific
    +        URL prefix. Example::
     
    -                parent_app.mount('/prefix/', child_app)
    +            parent_app.mount('/prefix/', child_app)
     
    -            :param prefix: path prefix or `mount-point`.
    -            :param app: an instance of :class:`Bottle` or a WSGI application.
    +        :param prefix: path prefix or `mount-point`.
    +        :param app: an instance of :class:`Bottle` or a WSGI application.
     
    -            Plugins from the parent application are not applied to the routes
    -            of the mounted child application. If you need plugins in the child
    -            application, install them separately.
    +        Plugins from the parent application are not applied to the routes
    +        of the mounted child application. If you need plugins in the child
    +        application, install them separately.
     
    -            While it is possible to use path wildcards within the prefix path
    -            (:class:`Bottle` childs only), it is highly discouraged.
    +        While it is possible to use path wildcards within the prefix path
    +        (:class:`Bottle` childs only), it is highly discouraged.
     
    -            The prefix path must end with a slash. If you want to access the
    -            root of the child application via `/prefix` in addition to
    -            `/prefix/`, consider adding a route with a 307 redirect to the
    -            parent application.
    +        The prefix path must end with a slash. If you want to access the
    +        root of the child application via `/prefix` in addition to
    +        `/prefix/`, consider adding a route with a 307 redirect to the
    +        parent application.
             """
     
             if not prefix.startswith('/'):
    @@ -801,49 +864,54 @@ def mount(self, prefix, app, **options):
                 return self._mount_wsgi(prefix, app, **options)
     
         def merge(self, routes):
    -        """ Merge the routes of another :class:`Bottle` application or a list of
    -            :class:`Route` objects into this application. The routes keep their
    -            'owner', meaning that the :data:`Route.app` attribute is not
    -            changed. """
    +        """Merge the routes of another :class:`Bottle` application or a list of
    +        :class:`Route` objects into this application. The routes keep their
    +        'owner', meaning that the :data:`Route.app` attribute is not
    +        changed."""
             if isinstance(routes, Bottle):
                 routes = routes.routes
             for route in routes:
                 self.add_route(route)
     
         def install(self, plugin):
    -        """ Add a plugin to the list of plugins and prepare it for being
    -            applied to all routes of this application. A plugin may be a simple
    -            decorator or an object that implements the :class:`Plugin` API.
    +        """Add a plugin to the list of plugins and prepare it for being
    +        applied to all routes of this application. A plugin may be a simple
    +        decorator or an object that implements the :class:`Plugin` API.
             """
    -        if hasattr(plugin, 'setup'): plugin.setup(self)
    +        if hasattr(plugin, 'setup'):
    +            plugin.setup(self)
             if not callable(plugin) and not hasattr(plugin, 'apply'):
    -            raise TypeError("Plugins must be callable or implement .apply()")
    +            raise TypeError('Plugins must be callable or implement .apply()')
             self.plugins.append(plugin)
             self.reset()
             return plugin
     
         def uninstall(self, plugin):
    -        """ Uninstall plugins. Pass an instance to remove a specific plugin, a type
    -            object to remove all plugins that match that type, a string to remove
    -            all plugins with a matching ``name`` attribute or ``True`` to remove all
    -            plugins. Return the list of removed plugins. """
    +        """Uninstall plugins. Pass an instance to remove a specific plugin, a type
    +        object to remove all plugins that match that type, a string to remove
    +        all plugins with a matching ``name`` attribute or ``True`` to remove all
    +        plugins. Return the list of removed plugins."""
             removed, remove = [], plugin
             for i, plugin in list(enumerate(self.plugins))[::-1]:
    -            if remove is True or remove is plugin or remove is type(plugin) \
    -            or getattr(plugin, 'name', True) == remove:
    +            if remove is True or remove is plugin or remove is type(plugin) or getattr(plugin, 'name', True) == remove:
                     removed.append(plugin)
                     del self.plugins[i]
    -                if hasattr(plugin, 'close'): plugin.close()
    -        if removed: self.reset()
    +                if hasattr(plugin, 'close'):
    +                    plugin.close()
    +        if removed:
    +            self.reset()
             return removed
     
         def reset(self, route=None):
    -        """ Reset all routes (force plugins to be re-applied) and clear all
    -            caches. If an ID or route object is given, only that specific route
    -            is affected. """
    -        if route is None: routes = self.routes
    -        elif isinstance(route, Route): routes = [route]
    -        else: routes = [self.routes[route]]
    +        """Reset all routes (force plugins to be re-applied) and clear all
    +        caches. If an ID or route object is given, only that specific route
    +        is affected."""
    +        if route is None:
    +            routes = self.routes
    +        elif isinstance(route, Route):
    +            routes = [route]
    +        else:
    +            routes = [self.routes[route]]
             for route in routes:
                 route.reset()
             if DEBUG:
    @@ -852,120 +920,116 @@ def reset(self, route=None):
             self.trigger_hook('app_reset')
     
         def close(self):
    -        """ Close the application and all installed plugins. """
    +        """Close the application and all installed plugins."""
             for plugin in self.plugins:
    -            if hasattr(plugin, 'close'): plugin.close()
    +            if hasattr(plugin, 'close'):
    +                plugin.close()
     
         def run(self, **kwargs):
    -        """ Calls :func:`run` with the same parameters. """
    +        """Calls :func:`run` with the same parameters."""
             run(self, **kwargs)
     
         def match(self, environ):
    -        """ Search for a matching route and return a (:class:`Route`, urlargs)
    -            tuple. The second value is a dictionary with parameters extracted
    -            from the URL. Raise :exc:`HTTPError` (404/405) on a non-match."""
    +        """Search for a matching route and return a (:class:`Route`, urlargs)
    +        tuple. The second value is a dictionary with parameters extracted
    +        from the URL. Raise :exc:`HTTPError` (404/405) on a non-match."""
             return self.router.match(environ)
     
         def get_url(self, routename, **kargs):
    -        """ Return a string that matches a named route """
    +        """Return a string that matches a named route"""
             scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
             location = self.router.build(routename, **kargs).lstrip('/')
             return urljoin(urljoin('/', scriptname), location)
     
         def add_route(self, route):
    -        """ Add a route object, but do not change the :data:`Route.app`
    -            attribute."""
    +        """Add a route object, but do not change the :data:`Route.app`
    +        attribute."""
             self.routes.append(route)
             self.router.add(route.rule, route.method, route, name=route.name)
    -        if DEBUG: route.prepare()
    -
    -    def route(self,
    -              path=None,
    -              method='GET',
    -              callback=None,
    -              name=None,
    -              apply=None,
    -              skip=None, **config):
    -        """ A decorator to bind a function to a request URL. Example::
    -
    -                @app.route('/hello/')
    -                def hello(name):
    -                    return 'Hello %s' % name
    -
    -            The ```` part is a wildcard. See :class:`Router` for syntax
    -            details.
    -
    -            :param path: Request path or a list of paths to listen to. If no
    -              path is specified, it is automatically generated from the
    -              signature of the function.
    -            :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of
    -              methods to listen to. (default: `GET`)
    -            :param callback: An optional shortcut to avoid the decorator
    -              syntax. ``route(..., callback=func)`` equals ``route(...)(func)``
    -            :param name: The name for this route. (default: None)
    -            :param apply: A decorator or plugin or a list of plugins. These are
    -              applied to the route callback in addition to installed plugins.
    -            :param skip: A list of plugins, plugin classes or names. Matching
    -              plugins are not installed to this route. ``True`` skips all.
    -
    -            Any additional keyword arguments are stored as route-specific
    -            configuration and passed to plugins (see :meth:`Plugin.apply`).
    +        if DEBUG:
    +            route.prepare()
    +
    +    def route(self, path=None, method='GET', callback=None, name=None, apply=None, skip=None, **config):
    +        """A decorator to bind a function to a request URL. Example::
    +
    +            @app.route('/hello/')
    +            def hello(name):
    +                return 'Hello %s' % name
    +
    +        The ```` part is a wildcard. See :class:`Router` for syntax
    +        details.
    +
    +        :param path: Request path or a list of paths to listen to. If no
    +          path is specified, it is automatically generated from the
    +          signature of the function.
    +        :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of
    +          methods to listen to. (default: `GET`)
    +        :param callback: An optional shortcut to avoid the decorator
    +          syntax. ``route(..., callback=func)`` equals ``route(...)(func)``
    +        :param name: The name for this route. (default: None)
    +        :param apply: A decorator or plugin or a list of plugins. These are
    +          applied to the route callback in addition to installed plugins.
    +        :param skip: A list of plugins, plugin classes or names. Matching
    +          plugins are not installed to this route. ``True`` skips all.
    +
    +        Any additional keyword arguments are stored as route-specific
    +        configuration and passed to plugins (see :meth:`Plugin.apply`).
             """
    -        if callable(path): path, callback = None, path
    +        if callable(path):
    +            path, callback = None, path
             plugins = makelist(apply)
             skiplist = makelist(skip)
     
             def decorator(callback):
    -            if isinstance(callback, basestring): callback = load(callback)
    +            if isinstance(callback, basestring):
    +                callback = load(callback)
                 for rule in makelist(path) or yieldroutes(callback):
                     for verb in makelist(method):
                         verb = verb.upper()
    -                    route = Route(self, rule, verb, callback,
    -                                  name=name,
    -                                  plugins=plugins,
    -                                  skiplist=skiplist, **config)
    +                    route = Route(self, rule, verb, callback, name=name, plugins=plugins, skiplist=skiplist, **config)
                         self.add_route(route)
                 return callback
     
             return decorator(callback) if callback else decorator
     
         def get(self, path=None, method='GET', **options):
    -        """ Equals :meth:`route`. """
    +        """Equals :meth:`route`."""
             return self.route(path, method, **options)
     
         def post(self, path=None, method='POST', **options):
    -        """ Equals :meth:`route` with a ``POST`` method parameter. """
    +        """Equals :meth:`route` with a ``POST`` method parameter."""
             return self.route(path, method, **options)
     
         def put(self, path=None, method='PUT', **options):
    -        """ Equals :meth:`route` with a ``PUT`` method parameter. """
    +        """Equals :meth:`route` with a ``PUT`` method parameter."""
             return self.route(path, method, **options)
     
         def delete(self, path=None, method='DELETE', **options):
    -        """ Equals :meth:`route` with a ``DELETE`` method parameter. """
    +        """Equals :meth:`route` with a ``DELETE`` method parameter."""
             return self.route(path, method, **options)
     
         def patch(self, path=None, method='PATCH', **options):
    -        """ Equals :meth:`route` with a ``PATCH`` method parameter. """
    +        """Equals :meth:`route` with a ``PATCH`` method parameter."""
             return self.route(path, method, **options)
     
         def error(self, code=500, callback=None):
    -        """ Register an output handler for a HTTP error code. Can
    -            be used as a decorator or called directly ::
    +        """Register an output handler for a HTTP error code. Can
    +        be used as a decorator or called directly ::
     
    -                def error_handler_500(error):
    -                    return 'error_handler_500'
    +            def error_handler_500(error):
    +                return 'error_handler_500'
     
    -                app.error(code=500, callback=error_handler_500)
    +            app.error(code=500, callback=error_handler_500)
     
    -                @app.error(404)
    -                def error_handler_404(error):
    -                    return 'error_handler_404'
    +            @app.error(404)
    +            def error_handler_404(error):
    +                return 'error_handler_404'
     
             """
     
             def decorator(callback):
    -            if isinstance(callback, basestring): callback = load(callback)
    +            if isinstance(callback, basestring):
    +                callback = load(callback)
                 self.error_handler[int(code)] = callback
                 return callback
     
    @@ -984,7 +1048,7 @@ def _handle(self, environ):
             response.bind()
     
             try:
    -            while True: # Remove in 0.14 together with RouteReset
    +            while True:  # Remove in 0.14 together with RouteReset
                     out = None
                     try:
                         self.trigger_hook('before_request')
    @@ -998,9 +1062,12 @@ def _handle(self, environ):
                         out = E
                         break
                     except RouteReset:
    -                    depr(0, 13, "RouteReset exception deprecated",
    -                                "Call route.call() after route.reset() and "
    -                                "return the result.")
    +                    depr(
    +                        0,
    +                        13,
    +                        'RouteReset exception deprecated',
    +                        'Call route.call() after route.reset() and return the result.',
    +                    )
                         route.reset()
                         continue
                     finally:
    @@ -1014,18 +1081,19 @@ def _handle(self, environ):
             except (KeyboardInterrupt, SystemExit, MemoryError):
                 raise
             except Exception as E:
    -            if not self.catchall: raise
    +            if not self.catchall:
    +                raise
                 stacktrace = format_exc()
                 environ['wsgi.errors'].write(stacktrace)
                 environ['wsgi.errors'].flush()
                 environ['bottle.exc_info'] = sys.exc_info()
    -            out = HTTPError(500, "Internal Server Error", E, stacktrace)
    +            out = HTTPError(500, 'Internal Server Error', E, stacktrace)
                 out.apply(response)
     
             return out
     
         def _cast(self, out, peek=None):
    -        """ Try to convert the parameter into something WSGI compatible and set
    +        """Try to convert the parameter into something WSGI compatible and set
             correct HTTP headers when possible.
             Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
             iterable of strings and iterable of unicodes
    @@ -1037,8 +1105,7 @@ def _cast(self, out, peek=None):
                     response['Content-Length'] = 0
                 return []
             # Join lists of byte or unicode strings. Mixed lists are NOT supported
    -        if isinstance(out, (tuple, list))\
    -        and isinstance(out[0], (bytes, unicode)):
    +        if isinstance(out, (tuple, list)) and isinstance(out[0], (bytes, unicode)):
                 out = out[0][0:0].join(out)  # b'abc'[0:0] -> b''
             # Encode unicode strings
             if isinstance(out, unicode):
    @@ -1052,8 +1119,7 @@ def _cast(self, out, peek=None):
             # TODO: Handle these explicitly in handle() or make them iterable.
             if isinstance(out, HTTPError):
                 out.apply(response)
    -            out = self.error_handler.get(out.status_code,
    -                                         self.default_error_handler)(out)
    +            out = self.error_handler.get(out.status_code, self.default_error_handler)(out)
                 return self._cast(out)
             if isinstance(out, HTTPResponse):
                 out.apply(response)
    @@ -1079,7 +1145,8 @@ def _cast(self, out, peek=None):
             except (KeyboardInterrupt, SystemExit, MemoryError):
                 raise
             except Exception as error:
    -            if not self.catchall: raise
    +            if not self.catchall:
    +                raise
                 first = HTTPError(500, 'Unhandled exception', error, format_exc())
     
             # These are the inner types allowed in iterator or generator objects.
    @@ -1098,13 +1165,13 @@ def _cast(self, out, peek=None):
             return new_iter
     
         def wsgi(self, environ, start_response):
    -        """ The bottle WSGI-interface. """
    +        """The bottle WSGI-interface."""
             try:
                 out = self._cast(self._handle(environ))
                 # rfc2616 section 4.3
    -            if response._status_code in (100, 101, 204, 304)\
    -            or environ['REQUEST_METHOD'] == 'HEAD':
    -                if hasattr(out, 'close'): out.close()
    +            if response._status_code in (100, 101, 204, 304) or environ['REQUEST_METHOD'] == 'HEAD':
    +                if hasattr(out, 'close'):
    +                    out.close()
                     out = []
                 exc_info = environ.get('bottle.exc_info')
                 if exc_info is not None:
    @@ -1114,13 +1181,14 @@ def wsgi(self, environ, start_response):
             except (KeyboardInterrupt, SystemExit, MemoryError):
                 raise
             except Exception as E:
    -            if not self.catchall: raise
    -            err = '

    Critical error while processing request: %s

    ' \ - % html_escape(environ.get('PATH_INFO', '/')) + if not self.catchall: + raise + err = '

    Critical error while processing request: %s

    ' % html_escape(environ.get('PATH_INFO', '/')) if DEBUG: - err += '

    Error:

    \n
    \n%s\n
    \n' \ - '

    Traceback:

    \n
    \n%s\n
    \n' \ - % (html_escape(repr(E)), html_escape(format_exc())) + err += '

    Error:

    \n
    \n%s\n
    \n

    Traceback:

    \n
    \n%s\n
    \n' % ( + html_escape(repr(E)), + html_escape(format_exc()), + ) environ['wsgi.errors'].write(err) environ['wsgi.errors'].flush() headers = [('Content-Type', 'text/html; charset=UTF-8')] @@ -1128,11 +1196,11 @@ def wsgi(self, environ, start_response): return [tob(err)] def __call__(self, environ, start_response): - """ Each instance of :class:'Bottle' is a WSGI application. """ + """Each instance of :class:'Bottle' is a WSGI application.""" return self.wsgi(environ, start_response) def __enter__(self): - """ Use this application as default for all module-level shortcuts. """ + """Use this application as default for all module-level shortcuts.""" default_app.push(self) return self @@ -1141,30 +1209,31 @@ def __exit__(self, exc_type, exc_value, traceback): def __setattr__(self, name, value): if name in self.__dict__: - raise AttributeError("Attribute %s already defined. Plugin conflict?" % name) + raise AttributeError('Attribute %s already defined. Plugin conflict?' % name) object.__setattr__(self, name, value) + ############################################################################### # HTTP and WSGI Tools ########################################################## ############################################################################### class BaseRequest(object): - """ A wrapper for WSGI environment dictionaries that adds a lot of - convenient access methods and properties. Most of them are read-only. + """A wrapper for WSGI environment dictionaries that adds a lot of + convenient access methods and properties. Most of them are read-only. - Adding new attributes to a request actually adds them to the environ - dictionary (as 'bottle.request.ext.'). This is the recommended - way to store and access request-specific data. + Adding new attributes to a request actually adds them to the environ + dictionary (as 'bottle.request.ext.'). This is the recommended + way to store and access request-specific data. """ - __slots__ = ('environ', ) + __slots__ = ('environ',) #: Maximum size of memory buffer for :attr:`body` in bytes. MEMFILE_MAX = 102400 def __init__(self, environ=None): - """ Wrap a WSGI environ dictionary. """ + """Wrap a WSGI environ dictionary.""" #: The wrapped WSGI environ dictionary. This is the only real attribute. #: All other attributes actually are read-only properties. self.environ = {} if environ is None else environ @@ -1172,52 +1241,52 @@ def __init__(self, environ=None): @DictProperty('environ', 'bottle.app', read_only=True) def app(self): - """ Bottle application handling this request. """ + """Bottle application handling this request.""" raise RuntimeError('This request is not connected to an application.') @DictProperty('environ', 'bottle.route', read_only=True) def route(self): - """ The bottle :class:`Route` object that matches this request. """ + """The bottle :class:`Route` object that matches this request.""" raise RuntimeError('This request is not connected to a route.') @DictProperty('environ', 'route.url_args', read_only=True) def url_args(self): - """ The arguments extracted from the URL. """ + """The arguments extracted from the URL.""" raise RuntimeError('This request is not connected to a route.') @property def path(self): - """ The value of ``PATH_INFO`` with exactly one prefixed slash (to fix - broken clients and avoid the "empty path" edge case). """ + """The value of ``PATH_INFO`` with exactly one prefixed slash (to fix + broken clients and avoid the "empty path" edge case).""" return '/' + self.environ.get('PATH_INFO', '').lstrip('/') @property def method(self): - """ The ``REQUEST_METHOD`` value as an uppercase string. """ + """The ``REQUEST_METHOD`` value as an uppercase string.""" return self.environ.get('REQUEST_METHOD', 'GET').upper() @DictProperty('environ', 'bottle.request.headers', read_only=True) def headers(self): - """ A :class:`WSGIHeaderDict` that provides case-insensitive access to - HTTP request headers. """ + """A :class:`WSGIHeaderDict` that provides case-insensitive access to + HTTP request headers.""" return WSGIHeaderDict(self.environ) def get_header(self, name, default=None): - """ Return the value of a request header, or a given default value. """ + """Return the value of a request header, or a given default value.""" return self.headers.get(name, default) @DictProperty('environ', 'bottle.request.cookies', read_only=True) def cookies(self): - """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT - decoded. Use :meth:`get_cookie` if you expect signed cookies. """ + """Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT + decoded. Use :meth:`get_cookie` if you expect signed cookies.""" cookies = SimpleCookie(self.environ.get('HTTP_COOKIE', '')).values() return FormsDict((c.key, c.value) for c in cookies) def get_cookie(self, key, default=None, secret=None, digestmod=hashlib.sha256): - """ Return the content of a cookie. To read a `Signed Cookie`, the - `secret` must match the one used to create the cookie (see - :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing - cookie or wrong signature), return a default value. """ + """Return the content of a cookie. To read a `Signed Cookie`, the + `secret` must match the one used to create the cookie (see + :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing + cookie or wrong signature), return a default value.""" value = self.cookies.get(key) if secret: # See BaseResponse.set_cookie for details on signed cookies. @@ -1233,10 +1302,10 @@ def get_cookie(self, key, default=None, secret=None, digestmod=hashlib.sha256): @DictProperty('environ', 'bottle.request.query', read_only=True) def query(self): - """ The :attr:`query_string` parsed into a :class:`FormsDict`. These - values are sometimes called "URL arguments" or "GET parameters", but - not to be confused with "URL wildcards" as they are provided by the - :class:`Router`. """ + """The :attr:`query_string` parsed into a :class:`FormsDict`. These + values are sometimes called "URL arguments" or "GET parameters", but + not to be confused with "URL wildcards" as they are provided by the + :class:`Router`.""" get = self.environ['bottle.get'] = FormsDict() pairs = _parse_qsl(self.environ.get('QUERY_STRING', '')) for key, value in pairs: @@ -1245,10 +1314,10 @@ def query(self): @DictProperty('environ', 'bottle.request.forms', read_only=True) def forms(self): - """ Form values parsed from an `url-encoded` or `multipart/form-data` - encoded POST or PUT request body. The result is returned as a - :class:`FormsDict`. All keys and values are strings. File uploads - are stored separately in :attr:`files`. """ + """Form values parsed from an `url-encoded` or `multipart/form-data` + encoded POST or PUT request body. The result is returned as a + :class:`FormsDict`. All keys and values are strings. File uploads + are stored separately in :attr:`files`.""" forms = FormsDict() forms.recode_unicode = self.POST.recode_unicode for name, item in self.POST.allitems(): @@ -1258,8 +1327,8 @@ def forms(self): @DictProperty('environ', 'bottle.request.params', read_only=True) def params(self): - """ A :class:`FormsDict` with the combined values of :attr:`query` and - :attr:`forms`. File uploads are stored in :attr:`files`. """ + """A :class:`FormsDict` with the combined values of :attr:`query` and + :attr:`forms`. File uploads are stored in :attr:`files`.""" params = FormsDict() for key, value in self.query.allitems(): params[key] = value @@ -1269,8 +1338,8 @@ def params(self): @DictProperty('environ', 'bottle.request.files', read_only=True) def files(self): - """ File uploads parsed from `multipart/form-data` encoded POST or PUT - request body. The values are instances of :class:`FileUpload`. + """File uploads parsed from `multipart/form-data` encoded POST or PUT + request body. The values are instances of :class:`FileUpload`. """ files = FormsDict() @@ -1282,11 +1351,11 @@ def files(self): @DictProperty('environ', 'bottle.request.json', read_only=True) def json(self): - """ If the ``Content-Type`` header is ``application/json`` or - ``application/json-rpc``, this property holds the parsed content - of the request body. Only requests smaller than :attr:`MEMFILE_MAX` - are processed to avoid memory exhaustion. - Invalid JSON raises a 400 error response. + """If the ``Content-Type`` header is ``application/json`` or + ``application/json-rpc``, this property holds the parsed content + of the request body. Only requests smaller than :attr:`MEMFILE_MAX` + are processed to avoid memory exhaustion. + Invalid JSON raises a 400 error response. """ ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0] if ctype in ('application/json', 'application/json-rpc'): @@ -1303,7 +1372,8 @@ def _iter_body(self, read, bufsize): maxread = max(0, self.content_length) while maxread: part = read(min(maxread, bufsize)) - if not part: break + if not part: + break yield part maxread -= len(part) @@ -1316,20 +1386,24 @@ def _iter_chunked(read, bufsize): while header[-2:] != rn: c = read(1) header += c - if not c: raise err - if len(header) > bufsize: raise err + if not c: + raise err + if len(header) > bufsize: + raise err size, _, _ = header.partition(sem) try: maxread = int(tonat(size.strip()), 16) except ValueError: raise err - if maxread == 0: break + if maxread == 0: + break buff = bs while maxread > 0: if not buff: buff = read(min(maxread, bufsize)) part, buff = buff[:maxread], buff[maxread:] - if not part: raise err + if not part: + raise err yield part maxread -= len(part) if read(2) != rn: @@ -1357,8 +1431,8 @@ def _body(self): return body def _get_body_string(self, maxread): - """ Read body into a string. Raise HTTPError(413) on requests that are - too large. """ + """Read body into a string. Raise HTTPError(413) on requests that are + too large.""" if self.content_length > maxread: raise HTTPError(413, 'Request entity too large') data = self.body.read(maxread + 1) @@ -1368,28 +1442,27 @@ def _get_body_string(self, maxread): @property def body(self): - """ The HTTP request body as a seek-able file-like object. Depending on - :attr:`MEMFILE_MAX`, this is either a temporary file or a - :class:`io.BytesIO` instance. Accessing this property for the first - time reads and replaces the ``wsgi.input`` environ variable. - Subsequent accesses just do a `seek(0)` on the file object. """ + """The HTTP request body as a seek-able file-like object. Depending on + :attr:`MEMFILE_MAX`, this is either a temporary file or a + :class:`io.BytesIO` instance. Accessing this property for the first + time reads and replaces the ``wsgi.input`` environ variable. + Subsequent accesses just do a `seek(0)` on the file object.""" self._body.seek(0) return self._body @property def chunked(self): - """ True if Chunked transfer encoding was. """ - return 'chunked' in self.environ.get( - 'HTTP_TRANSFER_ENCODING', '').lower() + """True if Chunked transfer encoding was.""" + return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower() #: An alias for :attr:`query`. GET = query @DictProperty('environ', 'bottle.request.post', read_only=True) def POST(self): - """ The values of :attr:`forms` and :attr:`files` combined into a single - :class:`FormsDict`. Values are either strings (form values) or - instances of :class:`FileUpload`. + """The values of :attr:`forms` and :attr:`files` combined into a single + :class:`FormsDict`. Values are either strings (form values) or + instances of :class:`FileUpload`. """ post = FormsDict() content_type = self.environ.get('CONTENT_TYPE', '') @@ -1403,40 +1476,43 @@ def POST(self): return post post.recode_unicode = False - charset = options.get("charset", "utf8") - boundary = options.get("boundary") + charset = options.get('charset', 'utf8') + boundary = options.get('boundary') if not boundary: - raise MultipartError("Invalid content type header, missing boundary") - parser = _MultipartParser(self.body, boundary, self.content_length, - mem_limit=self.MEMFILE_MAX, memfile_limit=self.MEMFILE_MAX, - charset=charset) + raise MultipartError('Invalid content type header, missing boundary') + parser = _MultipartParser( + self.body, + boundary, + self.content_length, + mem_limit=self.MEMFILE_MAX, + memfile_limit=self.MEMFILE_MAX, + charset=charset, + ) for part in parser.parse(): if not part.filename and part.is_buffered(): post[part.name] = tonat(part.value, 'utf8') else: - post[part.name] = FileUpload(part.file, part.name, - part.filename, part.headerlist) + post[part.name] = FileUpload(part.file, part.name, part.filename, part.headerlist) return post @property def url(self): - """ The full request URI including hostname and scheme. If your app - lives behind a reverse proxy or load balancer and you get confusing - results, make sure that the ``X-Forwarded-Host`` header is set - correctly. """ + """The full request URI including hostname and scheme. If your app + lives behind a reverse proxy or load balancer and you get confusing + results, make sure that the ``X-Forwarded-Host`` header is set + correctly.""" return self.urlparts.geturl() @DictProperty('environ', 'bottle.request.urlparts', read_only=True) def urlparts(self): - """ The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. - The tuple contains (scheme, host, path, query_string and fragment), - but the fragment is always empty because it is not visible to the - server. """ + """The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. + The tuple contains (scheme, host, path, query_string and fragment), + but the fragment is always empty because it is not visible to the + server.""" env = self.environ - http = env.get('HTTP_X_FORWARDED_PROTO') \ - or env.get('wsgi.url_scheme', 'http') + http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http') host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST') if not host: # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients. @@ -1449,93 +1525,96 @@ def urlparts(self): @property def fullpath(self): - """ Request path including :attr:`script_name` (if present). """ + """Request path including :attr:`script_name` (if present).""" return urljoin(self.script_name, self.path.lstrip('/')) @property def query_string(self): - """ The raw :attr:`query` part of the URL (everything in between ``?`` - and ``#``) as a string. """ + """The raw :attr:`query` part of the URL (everything in between ``?`` + and ``#``) as a string.""" return self.environ.get('QUERY_STRING', '') @property def script_name(self): - """ The initial portion of the URL's `path` that was removed by a higher - level (server or routing middleware) before the application was - called. This script path is returned with leading and tailing - slashes. """ + """The initial portion of the URL's `path` that was removed by a higher + level (server or routing middleware) before the application was + called. This script path is returned with leading and tailing + slashes.""" script_name = self.environ.get('SCRIPT_NAME', '').strip('/') return '/' + script_name + '/' if script_name else '/' def path_shift(self, shift=1): - """ Shift path segments from :attr:`path` to :attr:`script_name` and - vice versa. + """Shift path segments from :attr:`path` to :attr:`script_name` and + vice versa. - :param shift: The number of path segments to shift. May be negative - to change the shift direction. (default: 1) + :param shift: The number of path segments to shift. May be negative + to change the shift direction. (default: 1) """ script, path = path_shift(self.environ.get('SCRIPT_NAME', '/'), self.path, shift) self['SCRIPT_NAME'], self['PATH_INFO'] = script, path @property def content_length(self): - """ The request body length as an integer. The client is responsible to - set this header. Otherwise, the real length of the body is unknown - and -1 is returned. In this case, :attr:`body` will be empty. """ + """The request body length as an integer. The client is responsible to + set this header. Otherwise, the real length of the body is unknown + and -1 is returned. In this case, :attr:`body` will be empty.""" return int(self.environ.get('CONTENT_LENGTH') or -1) @property def content_type(self): - """ The Content-Type header as a lowercase-string (default: empty). """ + """The Content-Type header as a lowercase-string (default: empty).""" return self.environ.get('CONTENT_TYPE', '').lower() @property def is_xhr(self): - """ True if the request was triggered by a XMLHttpRequest. This only - works with JavaScript libraries that support the `X-Requested-With` - header (most of the popular libraries do). """ + """True if the request was triggered by a XMLHttpRequest. This only + works with JavaScript libraries that support the `X-Requested-With` + header (most of the popular libraries do).""" requested_with = self.environ.get('HTTP_X_REQUESTED_WITH', '') return requested_with.lower() == 'xmlhttprequest' @property def is_ajax(self): - """ Alias for :attr:`is_xhr`. "Ajax" is not the right term. """ + """Alias for :attr:`is_xhr`. "Ajax" is not the right term.""" return self.is_xhr @property def auth(self): - """ HTTP authentication data as a (user, password) tuple. This - implementation currently supports basic (not digest) authentication - only. If the authentication happened at a higher level (e.g. in the - front web-server or a middleware), the password field is None, but - the user field is looked up from the ``REMOTE_USER`` environ - variable. On any errors, None is returned. """ + """HTTP authentication data as a (user, password) tuple. This + implementation currently supports basic (not digest) authentication + only. If the authentication happened at a higher level (e.g. in the + front web-server or a middleware), the password field is None, but + the user field is looked up from the ``REMOTE_USER`` environ + variable. On any errors, None is returned.""" basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION', '')) - if basic: return basic + if basic: + return basic ruser = self.environ.get('REMOTE_USER') - if ruser: return (ruser, None) + if ruser: + return (ruser, None) return None @property def remote_route(self): - """ A list of all IPs that were involved in this request, starting with - the client IP and followed by zero or more proxies. This does only - work if all proxies support the ```X-Forwarded-For`` header. Note - that this information can be forged by malicious clients. """ + """A list of all IPs that were involved in this request, starting with + the client IP and followed by zero or more proxies. This does only + work if all proxies support the ```X-Forwarded-For`` header. Note + that this information can be forged by malicious clients.""" proxy = self.environ.get('HTTP_X_FORWARDED_FOR') - if proxy: return [ip.strip() for ip in proxy.split(',')] + if proxy: + return [ip.strip() for ip in proxy.split(',')] remote = self.environ.get('REMOTE_ADDR') return [remote] if remote else [] @property def remote_addr(self): - """ The client IP as a string. Note that this information can be forged - by malicious clients. """ + """The client IP as a string. Note that this information can be forged + by malicious clients.""" route = self.remote_route return route[0] if route else None def copy(self): - """ Return a new :class:`Request` with a shallow :attr:`environ` copy. """ + """Return a new :class:`Request` with a shallow :attr:`environ` copy.""" return Request(self.environ.copy()) def get(self, value, default=None): @@ -1545,8 +1624,8 @@ def __getitem__(self, key): return self.environ[key] def __delitem__(self, key): - self[key] = "" - del (self.environ[key]) + self[key] = '' + del self.environ[key] def __iter__(self): return iter(self.environ) @@ -1558,7 +1637,7 @@ def keys(self): return self.environ.keys() def __setitem__(self, key, value): - """ Change an environ value and clear all caches that depend on it. """ + """Change an environ value and clear all caches that depend on it.""" if self.environ.get('bottle.request.readonly'): raise KeyError('The environ dictionary is read-only.') @@ -1580,7 +1659,7 @@ def __repr__(self): return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url) def __getattr__(self, name): - """ Search in self.environ for additional user defined attributes. """ + """Search in self.environ for additional user defined attributes.""" try: var = self.environ['bottle.request.ext.%s' % name] return var.__get__(self) if hasattr(var, '__get__') else var @@ -1588,30 +1667,31 @@ def __getattr__(self, name): raise AttributeError('Attribute %r not defined.' % name) def __setattr__(self, name, value): - """ Define new attributes that are local to the bound request environment. """ - if name == 'environ': return object.__setattr__(self, name, value) + """Define new attributes that are local to the bound request environment.""" + if name == 'environ': + return object.__setattr__(self, name, value) key = 'bottle.request.ext.%s' % name if hasattr(self, name): - raise AttributeError("Attribute already defined: %s" % name) + raise AttributeError('Attribute already defined: %s' % name) self.environ[key] = value def __delattr__(self, name): try: del self.environ['bottle.request.ext.%s' % name] except KeyError: - raise AttributeError("Attribute not defined: %s" % name) + raise AttributeError('Attribute not defined: %s' % name) def _hkey(key): if '\n' in key or '\r' in key or '\0' in key: - raise ValueError("Header names must not contain control characters: %r" % key) + raise ValueError('Header names must not contain control characters: %r' % key) return key.title().replace('_', '-') def _hval(value): value = tonat(value) if '\n' in value or '\r' in value or '\0' in value: - raise ValueError("Header value must not contain control characters: %r" % value) + raise ValueError('Header value must not contain control characters: %r' % value) return value @@ -1622,7 +1702,8 @@ def __init__(self, name, reader=None, writer=None, default=''): self.__doc__ = 'Current value of the %r header.' % name.title() def __get__(self, obj, _): - if obj is None: return self + if obj is None: + return self value = obj.get_header(self.name, self.default) return self.reader(value) if self.reader else value @@ -1634,11 +1715,11 @@ def __delete__(self, obj): class BaseResponse(object): - """ Storage class for a response body as well as headers and cookies. + """Storage class for a response body as well as headers and cookies. - This class does support dict-like case-insensitive item-access to - headers, but is NOT a dict. Most notably, iterating over a response - yields parts of the body and not the headers. + This class does support dict-like case-insensitive item-access to + headers, but is NOT a dict. Most notably, iterating over a response + yields parts of the body and not the headers. """ default_status = 200 @@ -1648,13 +1729,22 @@ class BaseResponse(object): # (rfc2616 section 10.2.3 and 10.3.5) bad_headers = { 204: frozenset(('Content-Type', 'Content-Length')), - 304: frozenset(('Allow', 'Content-Encoding', 'Content-Language', - 'Content-Length', 'Content-Range', 'Content-Type', - 'Content-Md5', 'Last-Modified')) + 304: frozenset( + ( + 'Allow', + 'Content-Encoding', + 'Content-Language', + 'Content-Length', + 'Content-Range', + 'Content-Type', + 'Content-Md5', + 'Last-Modified', + ) + ), } def __init__(self, body='', status=None, headers=None, **more_headers): - """ Create a new response object. + """Create a new response object. :param body: The response body as one of the supported types. :param status: Either an HTTP status code (e.g. 200) or a status line @@ -1678,7 +1768,7 @@ def __init__(self, body='', status=None, headers=None, **more_headers): self.add_header(name, value) def copy(self, cls=None): - """ Returns a copy of self. """ + """Returns a copy of self.""" cls = cls or BaseResponse assert issubclass(cls, BaseResponse) copy = cls() @@ -1686,9 +1776,9 @@ def copy(self, cls=None): copy._headers = dict((k, v[:]) for (k, v) in self._headers.items()) if self._cookies: cookies = copy._cookies = SimpleCookie() - for k,v in self._cookies.items(): + for k, v in self._cookies.items(): cookies[k] = v.value - cookies[k].update(v) # also copy cookie attributes + cookies[k].update(v) # also copy cookie attributes return copy def __iter__(self): @@ -1700,12 +1790,12 @@ def close(self): @property def status_line(self): - """ The HTTP status line as a string (e.g. ``404 Not Found``).""" + """The HTTP status line as a string (e.g. ``404 Not Found``).""" return self._status_line @property def status_code(self): - """ The HTTP status code as an integer (e.g. 404).""" + """The HTTP status code as an integer (e.g. 404).""" return self._status_code def _set_status(self, status): @@ -1727,18 +1817,21 @@ def _get_status(self): return self._status_line status = property( - _get_status, _set_status, None, - ''' A writeable property to change the HTTP response status. It accepts + _get_status, + _set_status, + None, + """ A writeable property to change the HTTP response status. It accepts either a numeric code (100-999) or a string with a custom reason phrase (e.g. "404 Brain not found"). Both :data:`status_line` and :data:`status_code` are updated accordingly. The return value is - always a status string. ''') + always a status string. """, + ) del _get_status, _set_status @property def headers(self): - """ An instance of :class:`HeaderDict`, a case-insensitive dict-like - view on the response headers. """ + """An instance of :class:`HeaderDict`, a case-insensitive dict-like + view on the response headers.""" hdict = HeaderDict() hdict.dict = self._headers return hdict @@ -1756,33 +1849,33 @@ def __setitem__(self, name, value): self._headers[_hkey(name)] = [_hval(value)] def get_header(self, name, default=None): - """ Return the value of a previously defined header. If there is no - header with that name, return a default value. """ + """Return the value of a previously defined header. If there is no + header with that name, return a default value.""" return self._headers.get(_hkey(name), [default])[-1] def set_header(self, name, value): - """ Create a new response header, replacing any previously defined - headers with the same name. """ + """Create a new response header, replacing any previously defined + headers with the same name.""" self._headers[_hkey(name)] = [_hval(value)] def add_header(self, name, value): - """ Add an additional response header, not removing duplicates. """ + """Add an additional response header, not removing duplicates.""" self._headers.setdefault(_hkey(name), []).append(_hval(value)) def iter_headers(self): - """ Yield (header, value) tuples, skipping headers that are not - allowed with the current response status code. """ + """Yield (header, value) tuples, skipping headers that are not + allowed with the current response status code.""" return self.headerlist def _wsgi_status_line(self): - """ WSGI conform status line (latin1-encodeable) """ + """WSGI conform status line (latin1-encodeable)""" if py3k: return self._status_line.encode('utf8').decode('latin1') return self._status_line @property def headerlist(self): - """ WSGI conform list of (header, value) tuples. """ + """WSGI conform list of (header, value) tuples.""" out = [] headers = list(self._headers.items()) if 'Content-Type' not in self._headers: @@ -1801,56 +1894,55 @@ def headerlist(self): content_type = HeaderProperty('Content-Type') content_length = HeaderProperty('Content-Length', reader=int, default=-1) expires = HeaderProperty( - 'Expires', - reader=lambda x: datetime.fromtimestamp(parse_date(x), UTC), - writer=lambda x: http_date(x)) + 'Expires', reader=lambda x: datetime.fromtimestamp(parse_date(x), UTC), writer=lambda x: http_date(x) + ) @property def charset(self, default='UTF-8'): - """ Return the charset specified in the content-type header (default: utf8). """ + """Return the charset specified in the content-type header (default: utf8).""" if 'charset=' in self.content_type: return self.content_type.split('charset=')[-1].split(';')[0].strip() return default def set_cookie(self, name, value, secret=None, digestmod=hashlib.sha256, **options): - """ Create a new cookie or replace an old one. If the `secret` parameter is - set, create a `Signed Cookie` (described below). - - :param name: the name of the cookie. - :param value: the value of the cookie. - :param secret: a signature key required for signed cookies. - - Additionally, this method accepts all RFC 2109 attributes that are - supported by :class:`cookie.Morsel`, including: - - :param maxage: maximum age in seconds. (default: None) - :param expires: a datetime object or UNIX timestamp. (default: None) - :param domain: the domain that is allowed to read the cookie. - (default: current domain) - :param path: limits the cookie to a given path (default: current path) - :param secure: limit the cookie to HTTPS connections (default: off). - :param httponly: prevents client-side javascript to read this cookie - (default: off, requires Python 2.6 or newer). - :param samesite: Control or disable third-party use for this cookie. - Possible values: `lax`, `strict` or `none` (default). - - If neither `expires` nor `maxage` is set (default), the cookie will - expire at the end of the browser session (as soon as the browser - window is closed). - - Signed cookies may store any pickle-able object and are - cryptographically signed to prevent manipulation. Keep in mind that - cookies are limited to 4kb in most browsers. - - Warning: Pickle is a potentially dangerous format. If an attacker - gains access to the secret key, he could forge cookies that execute - code on server side if unpickled. Using pickle is discouraged and - support for it will be removed in later versions of bottle. - - Warning: Signed cookies are not encrypted (the client can still see - the content) and not copy-protected (the client can restore an old - cookie). The main intention is to make pickling and unpickling - save, not to store secret information at client side. + """Create a new cookie or replace an old one. If the `secret` parameter is + set, create a `Signed Cookie` (described below). + + :param name: the name of the cookie. + :param value: the value of the cookie. + :param secret: a signature key required for signed cookies. + + Additionally, this method accepts all RFC 2109 attributes that are + supported by :class:`cookie.Morsel`, including: + + :param maxage: maximum age in seconds. (default: None) + :param expires: a datetime object or UNIX timestamp. (default: None) + :param domain: the domain that is allowed to read the cookie. + (default: current domain) + :param path: limits the cookie to a given path (default: current path) + :param secure: limit the cookie to HTTPS connections (default: off). + :param httponly: prevents client-side javascript to read this cookie + (default: off, requires Python 2.6 or newer). + :param samesite: Control or disable third-party use for this cookie. + Possible values: `lax`, `strict` or `none` (default). + + If neither `expires` nor `maxage` is set (default), the cookie will + expire at the end of the browser session (as soon as the browser + window is closed). + + Signed cookies may store any pickle-able object and are + cryptographically signed to prevent manipulation. Keep in mind that + cookies are limited to 4kb in most browsers. + + Warning: Pickle is a potentially dangerous format. If an attacker + gains access to the secret key, he could forge cookies that execute + code on server side if unpickled. Using pickle is discouraged and + support for it will be removed in later versions of bottle. + + Warning: Signed cookies are not encrypted (the client can still see + the content) and not copy-protected (the client can restore an old + cookie). The main intention is to make pickling and unpickling + save, not to store secret information at client side. """ if not self._cookies: self._cookies = SimpleCookie() @@ -1862,12 +1954,14 @@ def set_cookie(self, name, value, secret=None, digestmod=hashlib.sha256, **optio if secret: if not isinstance(value, basestring): - depr(0, 13, "Pickling of arbitrary objects into cookies is " - "deprecated.", "Only store strings in cookies. " - "JSON strings are fine, too.") + depr( + 0, + 13, + 'Pickling of arbitrary objects into cookies is deprecated.', + 'Only store strings in cookies. JSON strings are fine, too.', + ) encoded = base64.b64encode(pickle.dumps([name, value], -1)) - sig = base64.b64encode(hmac.new(tob(secret), encoded, - digestmod=digestmod).digest()) + sig = base64.b64encode(hmac.new(tob(secret), encoded, digestmod=digestmod).digest()) value = touni(tob('!') + sig + tob('?') + encoded) elif not isinstance(value, basestring): raise TypeError('Secret key required for non-string cookies.') @@ -1879,23 +1973,23 @@ def set_cookie(self, name, value, secret=None, digestmod=hashlib.sha256, **optio self._cookies[name] = value for key, value in options.items(): - if key in ('max_age', 'maxage'): # 'maxage' variant added in 0.13 + if key in ('max_age', 'maxage'): # 'maxage' variant added in 0.13 key = 'max-age' if isinstance(value, timedelta): value = value.seconds + value.days * 24 * 3600 if key == 'expires': value = http_date(value) - if key in ('same_site', 'samesite'): # 'samesite' variant added in 0.13 - key, value = 'samesite', (value or "none").lower() + if key in ('same_site', 'samesite'): # 'samesite' variant added in 0.13 + key, value = 'samesite', (value or 'none').lower() if value not in ('lax', 'strict', 'none'): - raise CookieError("Invalid value for SameSite") + raise CookieError('Invalid value for SameSite') if key in ('secure', 'httponly') and not value: continue self._cookies[name][key] = value def delete_cookie(self, key, **kwargs): - """ Delete a cookie. Be sure to use the same `domain` and `path` - settings as used to create the cookie. """ + """Delete a cookie. Be sure to use the same `domain` and `path` + settings as used to create the cookie.""" kwargs['max_age'] = -1 kwargs['expires'] = 0 self.set_cookie(key, '', **kwargs) @@ -1914,7 +2008,7 @@ def fget(_): try: return ls.var except AttributeError: - raise RuntimeError("Request context not initialized.") + raise RuntimeError('Request context not initialized.') def fset(_, value): ls.var = value @@ -1926,21 +2020,23 @@ def fdel(_): class LocalRequest(BaseRequest): - """ A thread-local subclass of :class:`BaseRequest` with a different - set of attributes for each thread. There is usually only one global - instance of this class (:data:`request`). If accessed during a - request/response cycle, this instance always refers to the *current* - request (even on a multithreaded server). """ + """A thread-local subclass of :class:`BaseRequest` with a different + set of attributes for each thread. There is usually only one global + instance of this class (:data:`request`). If accessed during a + request/response cycle, this instance always refers to the *current* + request (even on a multithreaded server).""" + bind = BaseRequest.__init__ environ = _local_property() class LocalResponse(BaseResponse): - """ A thread-local subclass of :class:`BaseResponse` with a different - set of attributes for each thread. There is usually only one global - instance of this class (:data:`response`). Its attributes are used - to build the HTTP response at the end of the request/response cycle. + """A thread-local subclass of :class:`BaseResponse` with a different + set of attributes for each thread. There is usually only one global + instance of this class (:data:`response`). Its attributes are used + to build the HTTP response at the end of the request/response cycle. """ + bind = BaseResponse.__init__ _status_line = _local_property() _status_code = _local_property() @@ -1954,18 +2050,18 @@ class LocalResponse(BaseResponse): class HTTPResponse(Response, BottleException): - """ A subclass of :class:`Response` that can be raised or returned from request - handlers to short-curcuit request processing and override changes made to the - global :data:`request` object. This bypasses error handlers, even if the status - code indicates an error. Return or raise :class:`HTTPError` to trigger error - handlers. + """A subclass of :class:`Response` that can be raised or returned from request + handlers to short-curcuit request processing and override changes made to the + global :data:`request` object. This bypasses error handlers, even if the status + code indicates an error. Return or raise :class:`HTTPError` to trigger error + handlers. """ def __init__(self, body='', status=None, headers=None, **more_headers): super(HTTPResponse, self).__init__(body, status, headers, **more_headers) def apply(self, other): - """ Copy the state of this response to a different :class:`Response` object. """ + """Copy the state of this response to a different :class:`Response` object.""" other._status_code = self._status_code other._status_line = self._status_line other._headers = self._headers @@ -1974,19 +2070,16 @@ def apply(self, other): class HTTPError(HTTPResponse): - """ A subclass of :class:`HTTPResponse` that triggers error handlers. """ + """A subclass of :class:`HTTPResponse` that triggers error handlers.""" default_status = 500 - def __init__(self, - status=None, - body=None, - exception=None, - traceback=None, **more_headers): + def __init__(self, status=None, body=None, exception=None, traceback=None, **more_headers): self.exception = exception self.traceback = traceback super(HTTPError, self).__init__(body, status, **more_headers) + ############################################################################### # Plugins ###################################################################### ############################################################################### @@ -2004,20 +2097,25 @@ def __init__(self, json_dumps=json_dumps): self.json_dumps = json_dumps def setup(self, app): - app.config._define('json.enable', default=True, validate=bool, - help="Enable or disable automatic dict->json filter.") - app.config._define('json.ascii', default=False, validate=bool, - help="Use only 7-bit ASCII characters in output.") - app.config._define('json.indent', default=True, validate=bool, - help="Add whitespace to make json more readable.") - app.config._define('json.dump_func', default=None, - help="If defined, use this function to transform" - " dict into json. The other options no longer" - " apply.") + app.config._define( + 'json.enable', default=True, validate=bool, help='Enable or disable automatic dict->json filter.' + ) + app.config._define( + 'json.ascii', default=False, validate=bool, help='Use only 7-bit ASCII characters in output.' + ) + app.config._define( + 'json.indent', default=True, validate=bool, help='Add whitespace to make json more readable.' + ) + app.config._define( + 'json.dump_func', + default=None, + help='If defined, use this function to transform dict into json. The other options no longer apply.', + ) def apply(self, callback, route): dumps = self.json_dumps - if not self.json_dumps: return callback + if not self.json_dumps: + return callback @functools.wraps(callback) def wrapper(*a, **ka): @@ -2027,9 +2125,9 @@ def wrapper(*a, **ka): rv = resp if isinstance(rv, dict): - #Attempt to serialize, raises exception on failure + # Attempt to serialize, raises exception on failure json_response = dumps(rv) - #Set content type only if serialization successful + # Set content type only if serialization successful response.content_type = 'application/json' return json_response elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): @@ -2041,10 +2139,11 @@ def wrapper(*a, **ka): class TemplatePlugin(object): - """ This plugin applies the :func:`view` decorator to all routes with a - `template` config parameter. If the parameter is a tuple, the second - element must be a dict with additional options (e.g. `template_engine`) - or default variables for the template. """ + """This plugin applies the :func:`view` decorator to all routes with a + `template` config parameter. If the parameter is a tuple, the second + element must be a dict with additional options (e.g. `template_engine`) + or default variables for the template.""" + name = 'template' api = 2 @@ -2064,37 +2163,38 @@ def apply(self, callback, route): #: Not a plugin, but part of the plugin API. TODO: Find a better place. class _ImportRedirect(object): def __init__(self, name, impmask): - """ Create a virtual package that redirects imports (see PEP 302). """ + """Create a virtual package that redirects imports (see PEP 302).""" self.name = name self.impmask = impmask self.module = sys.modules.setdefault(name, new_module(name)) - self.module.__dict__.update({ - '__file__': __file__, - '__path__': [], - '__all__': [], - '__loader__': self - }) + self.module.__dict__.update({'__file__': __file__, '__path__': [], '__all__': [], '__loader__': self}) sys.meta_path.append(self) def find_spec(self, fullname, path, target=None): - if '.' not in fullname: return - if fullname.rsplit('.', 1)[0] != self.name: return + if '.' not in fullname: + return + if fullname.rsplit('.', 1)[0] != self.name: + return from importlib.util import spec_from_loader + return spec_from_loader(fullname, self) def find_module(self, fullname, path=None): - if '.' not in fullname: return - if fullname.rsplit('.', 1)[0] != self.name: return + if '.' not in fullname: + return + if fullname.rsplit('.', 1)[0] != self.name: + return return self def create_module(self, spec): return self.load_module(spec.name) def exec_module(self, module): - pass # This probably breaks importlib.reload() :/ + pass # This probably breaks importlib.reload() :/ def load_module(self, fullname): - if fullname in sys.modules: return sys.modules[fullname] + if fullname in sys.modules: + return sys.modules[fullname] modname = fullname.rsplit('.', 1)[1] realname = self.impmask % modname __import__(realname) @@ -2103,15 +2203,16 @@ def load_module(self, fullname): module.__loader__ = self return module + ############################################################################### # Common Utilities ############################################################# ############################################################################### class MultiDict(DictMixin): - """ This dict stores multiple values per key, but behaves exactly like a - normal dict in that it returns only the newest value for any given key. - There are special methods available to access the full list of values. + """This dict stores multiple values per key, but behaves exactly like a + normal dict in that it returns only the newest value for any given key. + There are special methods available to access the full list of values. """ def __init__(self, *a, **k): @@ -2178,14 +2279,14 @@ def allitems(self): return [(k, v) for k, vl in self.dict.iteritems() for v in vl] def get(self, key, default=None, index=-1, type=None): - """ Return the most recent value for a key. - - :param default: The default value to be returned if the key is not - present or the type conversion fails. - :param index: An index for the list of available values. - :param type: If defined, this callable is used to cast the value - into a specific type. Exception are suppressed and result in - the default value to be returned. + """Return the most recent value for a key. + + :param default: The default value to be returned if the key is not + present or the type conversion fails. + :param index: An index for the list of available values. + :param type: If defined, this callable is used to cast the value + into a specific type. Exception are suppressed and result in + the default value to be returned. """ try: val = self.dict[key][index] @@ -2195,15 +2296,15 @@ def get(self, key, default=None, index=-1, type=None): return default def append(self, key, value): - """ Add a new value to the list of values for this key. """ + """Add a new value to the list of values for this key.""" self.dict.setdefault(key, []).append(value) def replace(self, key, value): - """ Replace the list of values with a single value. """ + """Replace the list of values with a single value.""" self.dict[key] = [value] def getall(self, key): - """ Return a (possibly empty) list of values for a key. """ + """Return a (possibly empty) list of values for a key.""" return self.dict.get(key) or [] #: Aliases for WTForms to mimic other multi-dict APIs (Django) @@ -2212,12 +2313,12 @@ def getall(self, key): class FormsDict(MultiDict): - """ This :class:`MultiDict` subclass is used to store request form data. - Additionally to the normal dict-like item access methods (which return - unmodified data as native strings), this container also supports - attribute-like access to its values. Attributes are automatically de- - or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing - attributes default to an empty string. """ + """This :class:`MultiDict` subclass is used to store request form data. + Additionally to the normal dict-like item access methods (which return + unmodified data as native strings), this container also supports + attribute-like access to its values. Attributes are automatically de- + or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing + attributes default to an empty string.""" #: Encoding used for attribute values. input_encoding = 'utf8' @@ -2234,9 +2335,9 @@ def _fix(self, s, encoding=None): return s def decode(self, encoding=None): - """ Returns a copy with all keys and values de- or recoded to match - :attr:`input_encoding`. Some libraries (e.g. WTForms) want a - unicode dictionary. """ + """Returns a copy with all keys and values de- or recoded to match + :attr:`input_encoding`. Some libraries (e.g. WTForms) want a + unicode dictionary.""" copy = FormsDict() enc = copy.input_encoding = encoding or self.input_encoding copy.recode_unicode = False @@ -2245,7 +2346,7 @@ def decode(self, encoding=None): return copy def getunicode(self, name, default=None, encoding=None): - """ Return the value as a unicode string, or the default. """ + """Return the value as a unicode string, or the default.""" try: return self._fix(self[name], encoding) except (UnicodeError, KeyError): @@ -2257,13 +2358,15 @@ def __getattr__(self, name, default=unicode()): return super(FormsDict, self).__getattr__(name) return self.getunicode(name, default=default) + class HeaderDict(MultiDict): - """ A case-insensitive version of :class:`MultiDict` that defaults to - replace the old value instead of appending it. """ + """A case-insensitive version of :class:`MultiDict` that defaults to + replace the old value instead of appending it.""" def __init__(self, *a, **ka): self.dict = {} - if a or ka: self.update(*a, **ka) + if a or ka: + self.update(*a, **ka) def __contains__(self, key): return _hkey(key) in self.dict @@ -2296,16 +2399,17 @@ def filter(self, names): class WSGIHeaderDict(DictMixin): - """ This dict-like class wraps a WSGI environ dict and provides convenient - access to HTTP_* fields. Keys and values are native strings - (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI - environment contains non-native string values, these are de- or encoded - using a lossless 'latin1' character set. - - The API will remain stable even on changes to the relevant PEPs. - Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one - that uses non-native strings.) + """This dict-like class wraps a WSGI environ dict and provides convenient + access to HTTP_* fields. Keys and values are native strings + (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI + environment contains non-native string values, these are de- or encoded + using a lossless 'latin1' character set. + + The API will remain stable even on changes to the relevant PEPs. + Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one + that uses non-native strings.) """ + #: List of keys that do not have a ``HTTP_`` prefix. cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH') @@ -2313,14 +2417,14 @@ def __init__(self, environ): self.environ = environ def _ekey(self, key): - """ Translate header field name to CGI/WSGI environ key. """ + """Translate header field name to CGI/WSGI environ key.""" key = key.replace('-', '_').upper() if key in self.cgikeys: return key return 'HTTP_' + key def raw(self, key, default=None): - """ Return the header value as is (may be bytes or unicode). """ + """Return the header value as is (may be bytes or unicode).""" return self.environ.get(self._ekey(key), default) def __getitem__(self, key): @@ -2333,10 +2437,10 @@ def __getitem__(self, key): return val def __setitem__(self, key, value): - raise TypeError("%s is read-only." % self.__class__) + raise TypeError('%s is read-only.' % self.__class__) def __delitem__(self, key): - raise TypeError("%s is read-only." % self.__class__) + raise TypeError('%s is read-only.' % self.__class__) def __iter__(self): for key in self.environ: @@ -2354,14 +2458,16 @@ def __len__(self): def __contains__(self, key): return self._ekey(key) in self.environ + _UNSET = object() + class ConfigDict(dict): - """ A dict-like configuration storage with additional support for - namespaces, validators, meta-data and overlays. + """A dict-like configuration storage with additional support for + namespaces, validators, meta-data and overlays. - This dict-like class is heavily optimized for read access. - Read-only methods and item access should be as fast as a native dict. + This dict-like class is heavily optimized for read access. + Read-only methods and item access should be as fast as a native dict. """ __slots__ = ('_meta', '_change_listener', '_overlays', '_virtual_keys', '_source', '__weakref__') @@ -2379,16 +2485,15 @@ def __init__(self): def load_module(self, name, squash=True): """Load values from a Python module. - Import a python module by name and add all upper-case module-level - variables to this config dict. + Import a python module by name and add all upper-case module-level + variables to this config dict. - :param name: Module name to import and load. - :param squash: If true (default), nested dicts are assumed to - represent namespaces and flattened (see :meth:`load_dict`). + :param name: Module name to import and load. + :param squash: If true (default), nested dicts are assumed to + represent namespaces and flattened (see :meth:`load_dict`). """ config_obj = load(name) - obj = {key: getattr(config_obj, key) - for key in dir(config_obj) if key.isupper()} + obj = {key: getattr(config_obj, key) for key in dir(config_obj) if key.isupper()} if squash: self.load_dict(obj) @@ -2397,25 +2502,24 @@ def load_module(self, name, squash=True): return self def load_config(self, filename, **options): - """ Load values from ``*.ini`` style config files using configparser. + """Load values from ``*.ini`` style config files using configparser. - INI style sections (e.g. ``[section]``) are used as namespace for - all keys within that section. Both section and key names may contain - dots as namespace separators and are converted to lower-case. + INI style sections (e.g. ``[section]``) are used as namespace for + all keys within that section. Both section and key names may contain + dots as namespace separators and are converted to lower-case. - The special sections ``[bottle]`` and ``[ROOT]`` refer to the root - namespace and the ``[DEFAULT]`` section defines default values for all - other sections. + The special sections ``[bottle]`` and ``[ROOT]`` refer to the root + namespace and the ``[DEFAULT]`` section defines default values for all + other sections. - :param filename: The path of a config file, or a list of paths. - :param options: All keyword parameters are passed to the underlying - :class:`python:configparser.ConfigParser` constructor call. + :param filename: The path of a config file, or a list of paths. + :param options: All keyword parameters are passed to the underlying + :class:`python:configparser.ConfigParser` constructor call. """ options.setdefault('allow_no_value', True) if py3k: - options.setdefault('interpolation', - configparser.ExtendedInterpolation()) + options.setdefault('interpolation', configparser.ExtendedInterpolation()) conf = configparser.ConfigParser(**options) conf.read(filename) for section in conf.sections(): @@ -2427,12 +2531,12 @@ def load_config(self, filename, **options): return self def load_dict(self, source, namespace=''): - """ Load values from a dictionary structure. Nesting can be used to - represent namespaces. + """Load values from a dictionary structure. Nesting can be used to + represent namespaces. - >>> c = ConfigDict() - >>> c.load_dict({'some': {'namespace': {'key': 'value'} } }) - {'some.namespace.key': 'value'} + >>> c = ConfigDict() + >>> c.load_dict({'some': {'namespace': {'key': 'value'} } }) + {'some.namespace.key': 'value'} """ for key, value in source.items(): if isinstance(key, basestring): @@ -2446,11 +2550,11 @@ def load_dict(self, source, namespace=''): return self def update(self, *a, **ka): - """ If the first parameter is a string, all keys are prefixed with this - namespace. Apart from that it works just as the usual dict.update(). + """If the first parameter is a string, all keys are prefixed with this + namespace. Apart from that it works just as the usual dict.update(). - >>> c = ConfigDict() - >>> c.update('some.namespace', key='value') + >>> c = ConfigDict() + >>> c.update('some.namespace', key='value') """ prefix = '' if a and isinstance(a[0], basestring): @@ -2484,7 +2588,7 @@ def __delitem__(self, key): if key not in self: raise KeyError(key) if key in self._virtual_keys: - raise KeyError("Virtual keys cannot be deleted: %s" % key) + raise KeyError('Virtual keys cannot be deleted: %s' % key) if self._source and key in self._source: # Not virtual, but present in source -> Restore virtual value @@ -2497,7 +2601,7 @@ def __delitem__(self, key): overlay._delete_virtual(key) def _set_virtual(self, key, value): - """ Recursively set or update virtual keys. """ + """Recursively set or update virtual keys.""" if key in self and key not in self._virtual_keys: return # Do nothing for non-virtual keys. @@ -2509,7 +2613,7 @@ def _set_virtual(self, key, value): overlay._set_virtual(key, value) def _delete_virtual(self, key): - """ Recursively delete virtual entry. """ + """Recursively delete virtual entry.""" if key not in self._virtual_keys: return # Do nothing for non-virtual keys. @@ -2530,22 +2634,22 @@ def _add_change_listener(self, func): return func def meta_get(self, key, metafield, default=None): - """ Return the value of a meta field for a key. """ + """Return the value of a meta field for a key.""" return self._meta.get(key, {}).get(metafield, default) def meta_set(self, key, metafield, value): - """ Set the meta field for a key to a new value. - - Meta-fields are shared between all members of an overlay tree. + """Set the meta field for a key to a new value. + + Meta-fields are shared between all members of an overlay tree. """ self._meta.setdefault(key, {})[metafield] = value def meta_list(self, key): - """ Return an iterable of meta field names defined for a key. """ + """Return an iterable of meta field names defined for a key.""" return self._meta.get(key, {}).keys() def _define(self, key, default=_UNSET, help=_UNSET, validate=_UNSET): - """ (Unstable) Shortcut for plugins to define own config parameters. """ + """(Unstable) Shortcut for plugins to define own config parameters.""" if default is not _UNSET: self.setdefault(key, default) if help is not _UNSET: @@ -2560,28 +2664,28 @@ def _iter_overlays(self): yield overlay def _make_overlay(self): - """ (Unstable) Create a new overlay that acts like a chained map: Values - missing in the overlay are copied from the source map. Both maps - share the same meta entries. - - Entries that were copied from the source are called 'virtual'. You - can not delete virtual keys, but overwrite them, which turns them - into non-virtual entries. Setting keys on an overlay never affects - its source, but may affect any number of child overlays. - - Other than collections.ChainMap or most other implementations, this - approach does not resolve missing keys on demand, but instead - actively copies all values from the source to the overlay and keeps - track of virtual and non-virtual keys internally. This removes any - lookup-overhead. Read-access is as fast as a build-in dict for both - virtual and non-virtual keys. - - Changes are propagated recursively and depth-first. A failing - on-change handler in an overlay stops the propagation of virtual - values and may result in an partly updated tree. Take extra care - here and make sure that on-change handlers never fail. - - Used by Route.config + """(Unstable) Create a new overlay that acts like a chained map: Values + missing in the overlay are copied from the source map. Both maps + share the same meta entries. + + Entries that were copied from the source are called 'virtual'. You + can not delete virtual keys, but overwrite them, which turns them + into non-virtual entries. Setting keys on an overlay never affects + its source, but may affect any number of child overlays. + + Other than collections.ChainMap or most other implementations, this + approach does not resolve missing keys on demand, but instead + actively copies all values from the source to the overlay and keeps + track of virtual and non-virtual keys internally. This removes any + lookup-overhead. Read-access is as fast as a build-in dict for both + virtual and non-virtual keys. + + Changes are propagated recursively and depth-first. A failing + on-change handler in an overlay stops the propagation of virtual + values and may result in an partly updated tree. Take extra care + here and make sure that on-change handlers never fail. + + Used by Route.config """ # Cleanup dead references self._overlays[:] = [ref for ref in self._overlays if ref() is not None] @@ -2595,21 +2699,20 @@ def _make_overlay(self): return overlay - - class AppStack(list): - """ A stack-like list. Calling it returns the head of the stack. """ + """A stack-like list. Calling it returns the head of the stack.""" def __call__(self): - """ Return the current default application. """ + """Return the current default application.""" return self.default def push(self, value=None): - """ Add a new :class:`Bottle` instance to the stack """ + """Add a new :class:`Bottle` instance to the stack""" if not isinstance(value, Bottle): value = Bottle() self.append(value) return value + new_app = push @property @@ -2624,7 +2727,8 @@ class WSGIFileWrapper(object): def __init__(self, fp, buffer_size=1024 * 64): self.fp, self.buffer_size = fp, buffer_size for attr in 'fileno', 'close', 'read', 'readlines', 'tell', 'seek': - if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr)) + if hasattr(fp, attr): + setattr(self, attr, getattr(fp, attr)) def __iter__(self): buff, read = self.buffer_size, self.read @@ -2635,8 +2739,8 @@ def __iter__(self): class _closeiter(object): - """ This only exists to be able to attach a .close method to iterators that - do not support attribute assignment (most of itertools). """ + """This only exists to be able to attach a .close method to iterators that + do not support attribute assignment (most of itertools).""" def __init__(self, iterator, close=None): self.iterator = iterator @@ -2651,13 +2755,13 @@ def close(self): class ResourceManager(object): - """ This class manages a list of search paths and helps to find and open - application-bound resources (files). + """This class manages a list of search paths and helps to find and open + application-bound resources (files). - :param base: default value for :meth:`add_path` calls. - :param opener: callable used to open resources. - :param cachemode: controls which lookups are cached. One of 'all', - 'found' or 'none'. + :param base: default value for :meth:`add_path` calls. + :param opener: callable used to open resources. + :param cachemode: controls which lookups are cached. One of 'all', + 'found' or 'none'. """ def __init__(self, base='./', opener=open, cachemode='all'): @@ -2671,21 +2775,21 @@ def __init__(self, base='./', opener=open, cachemode='all'): self.cache = {} def add_path(self, path, base=None, index=None, create=False): - """ Add a new path to the list of search paths. Return False if the - path does not exist. + """Add a new path to the list of search paths. Return False if the + path does not exist. - :param path: The new search path. Relative paths are turned into - an absolute and normalized form. If the path looks like a file - (not ending in `/`), the filename is stripped off. - :param base: Path used to absolutize relative search paths. - Defaults to :attr:`base` which defaults to ``os.getcwd()``. - :param index: Position within the list of search paths. Defaults - to last index (appends to the list). + :param path: The new search path. Relative paths are turned into + an absolute and normalized form. If the path looks like a file + (not ending in `/`), the filename is stripped off. + :param base: Path used to absolutize relative search paths. + Defaults to :attr:`base` which defaults to ``os.getcwd()``. + :param index: Position within the list of search paths. Defaults + to last index (appends to the list). - The `base` parameter makes it easy to reference files installed - along with a python module or package:: + The `base` parameter makes it easy to reference files installed + along with a python module or package:: - res.add_path('./resources/', __file__) + res.add_path('./resources/', __file__) """ base = os.path.abspath(os.path.dirname(base or self.base)) path = os.path.abspath(os.path.join(base, os.path.dirname(path))) @@ -2702,22 +2806,25 @@ def add_path(self, path, base=None, index=None, create=False): return os.path.exists(path) def __iter__(self): - """ Iterate over all existing files in all registered paths. """ + """Iterate over all existing files in all registered paths.""" search = self.path[:] while search: path = search.pop() - if not os.path.isdir(path): continue + if not os.path.isdir(path): + continue for name in os.listdir(path): full = os.path.join(path, name) - if os.path.isdir(full): search.append(full) - else: yield full + if os.path.isdir(full): + search.append(full) + else: + yield full def lookup(self, name): - """ Search for a resource and return an absolute file path, or `None`. + """Search for a resource and return an absolute file path, or `None`. - The :attr:`path` list is searched in order. The first match is - returned. Symlinks are followed. The result is cached to speed up - future lookups. """ + The :attr:`path` list is searched in order. The first match is + returned. Symlinks are followed. The result is cached to speed up + future lookups.""" if name not in self.cache or DEBUG: for path in self.path: fpath = os.path.join(path, name) @@ -2730,15 +2837,16 @@ def lookup(self, name): return self.cache[name] def open(self, name, mode='r', *args, **kwargs): - """ Find a resource and return a file object, or raise IOError. """ + """Find a resource and return a file object, or raise IOError.""" fname = self.lookup(name) - if not fname: raise IOError("Resource %r not found." % name) + if not fname: + raise IOError('Resource %r not found.' % name) return self.opener(fname, mode=mode, *args, **kwargs) class FileUpload(object): def __init__(self, fileobj, name, filename, headers=None): - """ Wrapper for a single file uploaded via ``multipart/form-data``. """ + """Wrapper for a single file uploaded via ``multipart/form-data``.""" #: Open file(-like) object (BytesIO buffer or temporary file) self.file = fileobj #: Name of the upload form field @@ -2752,18 +2860,18 @@ def __init__(self, fileobj, name, filename, headers=None): content_length = HeaderProperty('Content-Length', reader=int, default=-1) def get_header(self, name, default=None): - """ Return the value of a header within the multipart part. """ + """Return the value of a header within the multipart part.""" return self.headers.get(name, default) @cached_property def filename(self): - """ Name of the file on the client file system, but normalized to ensure - file system compatibility. An empty filename is returned as 'empty'. + """Name of the file on the client file system, but normalized to ensure + file system compatibility. An empty filename is returned as 'empty'. - Only ASCII letters, digits, dashes, underscores and dots are - allowed in the final filename. Accents are removed, if possible. - Whitespace is replaced by a single dash. Leading or tailing dots - or dashes are removed. The filename is limited to 255 characters. + Only ASCII letters, digits, dashes, underscores and dots are + allowed in the final filename. Accents are removed, if possible. + Whitespace is replaced by a single dash. Leading or tailing dots + or dashes are removed. The filename is limited to 255 characters. """ fname = self.raw_filename if not isinstance(fname, unicode): @@ -2775,22 +2883,23 @@ def filename(self): fname = re.sub(r'[-\s]+', '-', fname).strip('.-') return fname[:255] or 'empty' - def _copy_file(self, fp, chunk_size=2 ** 16): + def _copy_file(self, fp, chunk_size=2**16): read, write, offset = self.file.read, fp.write, self.file.tell() while 1: buf = read(chunk_size) - if not buf: break + if not buf: + break write(buf) self.file.seek(offset) - def save(self, destination, overwrite=False, chunk_size=2 ** 16): - """ Save file to disk or copy its content to an open file(-like) object. - If *destination* is a directory, :attr:`filename` is added to the - path. Existing files are not overwritten by default (IOError). + def save(self, destination, overwrite=False, chunk_size=2**16): + """Save file to disk or copy its content to an open file(-like) object. + If *destination* is a directory, :attr:`filename` is added to the + path. Existing files are not overwritten by default (IOError). - :param destination: File path, directory or file(-like) object. - :param overwrite: If True, replace existing files. (default: False) - :param chunk_size: Bytes to read at a time. (default: 64kb) + :param destination: File path, directory or file(-like) object. + :param overwrite: If True, replace existing files. (default: False) + :param chunk_size: Bytes to read at a time. (default: 64kb) """ if isinstance(destination, basestring): # Except file-likes here if os.path.isdir(destination): @@ -2802,30 +2911,31 @@ def save(self, destination, overwrite=False, chunk_size=2 ** 16): else: self._copy_file(destination, chunk_size) + ############################################################################### # Application Helper ########################################################### ############################################################################### def abort(code=500, text='Unknown Error.'): - """ Aborts execution and causes a HTTP error. """ + """Aborts execution and causes a HTTP error.""" raise HTTPError(code, text) def redirect(url, code=None): - """ Aborts execution and causes a 303 or 302 redirect, depending on - the HTTP protocol version. """ + """Aborts execution and causes a 303 or 302 redirect, depending on + the HTTP protocol version.""" if not code: - code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302 + code = 303 if request.get('SERVER_PROTOCOL') == 'HTTP/1.1' else 302 res = response.copy(cls=HTTPResponse) res.status = code - res.body = "" + res.body = '' res.set_header('Location', urljoin(request.url, url)) raise res def _rangeiter(fp, offset, limit, bufsize=1024 * 1024): - """ Yield chunks from a range in a file. """ + """Yield chunks from a range in a file.""" fp.seek(offset) while limit > 0: part = fp.read(min(limit, bufsize)) @@ -2835,41 +2945,36 @@ def _rangeiter(fp, offset, limit, bufsize=1024 * 1024): yield part -def static_file(filename, root, - mimetype=True, - download=False, - charset='UTF-8', - etag=None, - headers=None): - """ Open a file in a safe way and return an instance of :exc:`HTTPResponse` - that can be sent back to the client. - - :param filename: Name or path of the file to send, relative to ``root``. - :param root: Root path for file lookups. Should be an absolute directory - path. - :param mimetype: Provide the content-type header (default: guess from - file extension) - :param download: If True, ask the browser to open a `Save as...` dialog - instead of opening the file with the associated program. You can - specify a custom filename as a string. If not specified, the - original filename is used (default: False). - :param charset: The charset for files with a ``text/*`` mime-type. - (default: UTF-8) - :param etag: Provide a pre-computed ETag header. If set to ``False``, - ETag handling is disabled. (default: auto-generate ETag header) - :param headers: Additional headers dict to add to the response. - - While checking user input is always a good idea, this function provides - additional protection against malicious ``filename`` parameters from - breaking out of the ``root`` directory and leaking sensitive information - to an attacker. - - Read-protected files or files outside of the ``root`` directory are - answered with ``403 Access Denied``. Missing files result in a - ``404 Not Found`` response. Conditional requests (``If-Modified-Since``, - ``If-None-Match``) are answered with ``304 Not Modified`` whenever - possible. ``HEAD`` and ``Range`` requests (used by download managers to - check or continue partial downloads) are also handled automatically. +def static_file(filename, root, mimetype=True, download=False, charset='UTF-8', etag=None, headers=None): + """Open a file in a safe way and return an instance of :exc:`HTTPResponse` + that can be sent back to the client. + + :param filename: Name or path of the file to send, relative to ``root``. + :param root: Root path for file lookups. Should be an absolute directory + path. + :param mimetype: Provide the content-type header (default: guess from + file extension) + :param download: If True, ask the browser to open a `Save as...` dialog + instead of opening the file with the associated program. You can + specify a custom filename as a string. If not specified, the + original filename is used (default: False). + :param charset: The charset for files with a ``text/*`` mime-type. + (default: UTF-8) + :param etag: Provide a pre-computed ETag header. If set to ``False``, + ETag handling is disabled. (default: auto-generate ETag header) + :param headers: Additional headers dict to add to the response. + + While checking user input is always a good idea, this function provides + additional protection against malicious ``filename`` parameters from + breaking out of the ``root`` directory and leaking sensitive information + to an attacker. + + Read-protected files or files outside of the ``root`` directory are + answered with ``403 Access Denied``. Missing files result in a + ``404 Not Found`` response. Conditional requests (``If-Modified-Since``, + ``If-None-Match``) are answered with ``304 Not Modified`` whenever + possible. ``HEAD`` and ``Range`` requests (used by download managers to + check or continue partial downloads) are also handled automatically. """ root = os.path.join(os.path.abspath(root), '') @@ -2878,22 +2983,26 @@ def static_file(filename, root, getenv = request.environ.get if not filename.startswith(root): - return HTTPError(403, "Access denied.") + return HTTPError(403, 'Access denied.') if not os.path.exists(filename) or not os.path.isfile(filename): - return HTTPError(404, "File does not exist.") + return HTTPError(404, 'File does not exist.') if not os.access(filename, os.R_OK): - return HTTPError(403, "You do not have permission to access this file.") + return HTTPError(403, 'You do not have permission to access this file.') if mimetype is True: name = download if isinstance(download, str) else filename mimetype, encoding = mimetypes.guess_type(name) if encoding == 'gzip': mimetype = 'application/gzip' - elif encoding: # e.g. bzip2 -> application/x-bzip2 + elif encoding: # e.g. bzip2 -> application/x-bzip2 mimetype = 'application/x-' + encoding - if charset and mimetype and 'charset=' not in mimetype \ - and (mimetype[:5] == 'text/' or mimetype == 'application/javascript'): + if ( + charset + and mimetype + and 'charset=' not in mimetype + and (mimetype[:5] == 'text/' or mimetype == 'application/javascript') + ): mimetype += '; charset=%s' % charset if mimetype: @@ -2903,7 +3012,7 @@ def static_file(filename, root, download = os.path.basename(filename) if download: - download = download.replace('"','') + download = download.replace('"', '') headers['Content-Disposition'] = 'attachment; filename="%s"' % download stats = os.stat(filename) @@ -2912,8 +3021,7 @@ def static_file(filename, root, headers['Date'] = email.utils.formatdate(time.time(), usegmt=True) if etag is None: - etag = '%d:%d:%d:%d:%s' % (stats.st_dev, stats.st_ino, stats.st_mtime, - clen, filename) + etag = '%d:%d:%d:%d:%s' % (stats.st_dev, stats.st_ino, stats.st_mtime, clen, filename) etag = hashlib.sha1(tob(etag)).hexdigest() if etag: @@ -2924,36 +3032,39 @@ def static_file(filename, root, ims = getenv('HTTP_IF_MODIFIED_SINCE') if ims: - ims = parse_date(ims.split(";")[0].strip()) + ims = parse_date(ims.split(';')[0].strip()) if ims is not None and ims >= int(stats.st_mtime): return HTTPResponse(status=304, **headers) body = '' if request.method == 'HEAD' else open(filename, 'rb') - headers["Accept-Ranges"] = "bytes" + headers['Accept-Ranges'] = 'bytes' range_header = getenv('HTTP_RANGE') if range_header: ranges = list(parse_range_header(range_header, clen)) if not ranges: - return HTTPError(416, "Requested Range Not Satisfiable") + return HTTPError(416, 'Requested Range Not Satisfiable') offset, end = ranges[0] rlen = end - offset - headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end - 1, clen) - headers["Content-Length"] = str(rlen) - if body: body = _closeiter(_rangeiter(body, offset, rlen), body.close) + headers['Content-Range'] = 'bytes %d-%d/%d' % (offset, end - 1, clen) + headers['Content-Length'] = str(rlen) + if body: + body = _closeiter(_rangeiter(body, offset, rlen), body.close) return HTTPResponse(body, status=206, **headers) return HTTPResponse(body, **headers) + ############################################################################### # HTTP Utilities and MISC (TODO) ############################################### ############################################################################### def debug(mode=True): - """ Change the debug level. + """Change the debug level. There is only one debug level supported at the moment.""" global DEBUG - if mode: warnings.simplefilter('default') + if mode: + warnings.simplefilter('default') DEBUG = bool(mode) @@ -2974,16 +3085,16 @@ def http_date(value): def parse_date(ims): - """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """ + """Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch.""" try: ts = email.utils.parsedate_tz(ims) - return calendar.timegm(ts[:8] + (0, )) - (ts[9] or 0) + return calendar.timegm(ts[:8] + (0,)) - (ts[9] or 0) except (TypeError, ValueError, IndexError, OverflowError): return None def parse_auth(header): - """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" + """Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" try: method, data = header.split(None, 1) if method.lower() == 'basic': @@ -2994,9 +3105,10 @@ def parse_auth(header): def parse_range_header(header, maxlen=0): - """ Yield (start, end) ranges parsed from a HTTP Range header. Skip - unsatisfiable ranges. The end index is non-inclusive.""" - if not header or header[:6] != 'bytes=': return + """Yield (start, end) ranges parsed from a HTTP Range header. Skip + unsatisfiable ranges. The end index is non-inclusive.""" + if not header or header[:6] != 'bytes=': + return ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r] for start, end in ranges: try: @@ -3015,8 +3127,9 @@ def parse_range_header(header, maxlen=0): #: Header tokenizer used by _parse_http_header() _hsplit = re.compile('(?:(?:"((?:[^"\\\\]|\\\\.)*)")|([^;,=]+))([;,=]?)').findall + def _parse_http_header(h): - """ Parses a typical multi-valued and parametrised HTTP header (e.g. Accept headers) and returns a list of values + """Parses a typical multi-valued and parametrised HTTP header (e.g. Accept headers) and returns a list of values and parameters. For non-standard or broken input, this implementation may return partial results. :param h: A header string (e.g. ``text/html,text/plain;q=0.9,*/*;q=0.8``) :return: List of (value, params) tuples. The second element is a (possibly empty) dict. @@ -3051,9 +3164,11 @@ def _parse_http_header(h): def _parse_qsl(qs): r = [] for pair in qs.split('&'): - if not pair: continue + if not pair: + continue nv = pair.split('=', 1) - if len(nv) != 2: nv.append('') + if len(nv) != 2: + nv.append('') key = urlunquote(nv[0].replace('+', ' ')) value = urlunquote(nv[1].replace('+', ' ')) r.append((key, value)) @@ -3061,16 +3176,14 @@ def _parse_qsl(qs): def _lscmp(a, b): - """ Compares two strings in a cryptographically safe way: - Runtime is not affected by length of common prefix. """ - return not sum(0 if x == y else 1 - for x, y in zip(a, b)) and len(a) == len(b) + """Compares two strings in a cryptographically safe way: + Runtime is not affected by length of common prefix.""" + return not sum(0 if x == y else 1 for x, y in zip(a, b)) and len(a) == len(b) def cookie_encode(data, key, digestmod=None): - """ Encode and sign a pickle-able object. Return a (byte) string """ - depr(0, 13, "cookie_encode() will be removed soon.", - "Do not use this API directly.") + """Encode and sign a pickle-able object. Return a (byte) string""" + depr(0, 13, 'cookie_encode() will be removed soon.', 'Do not use this API directly.') digestmod = digestmod or hashlib.sha256 msg = base64.b64encode(pickle.dumps(data, -1)) sig = base64.b64encode(hmac.new(tob(key), msg, digestmod=digestmod).digest()) @@ -3078,9 +3191,8 @@ def cookie_encode(data, key, digestmod=None): def cookie_decode(data, key, digestmod=None): - """ Verify and decode an encoded string. Return an object or None.""" - depr(0, 13, "cookie_decode() will be removed soon.", - "Do not use this API directly.") + """Verify and decode an encoded string. Return an object or None.""" + depr(0, 13, 'cookie_decode() will be removed soon.', 'Do not use this API directly.') data = tob(data) if cookie_is_encoded(data): sig, msg = data.split(tob('?'), 1) @@ -3092,26 +3204,29 @@ def cookie_decode(data, key, digestmod=None): def cookie_is_encoded(data): - """ Return True if the argument looks like a encoded cookie.""" - depr(0, 13, "cookie_is_encoded() will be removed soon.", - "Do not use this API directly.") + """Return True if the argument looks like a encoded cookie.""" + depr(0, 13, 'cookie_is_encoded() will be removed soon.', 'Do not use this API directly.') return bool(data.startswith(tob('!')) and tob('?') in data) def html_escape(string): - """ Escape HTML special characters ``&<>`` and quotes ``'"``. """ - return string.replace('&', '&').replace('<', '<').replace('>', '>')\ - .replace('"', '"').replace("'", ''') + """Escape HTML special characters ``&<>`` and quotes ``'"``.""" + return ( + string.replace('&', '&') + .replace('<', '<') + .replace('>', '>') + .replace('"', '"') + .replace("'", ''') + ) def html_quote(string): - """ Escape and quote a string to be used as an HTTP attribute.""" - return '"%s"' % html_escape(string).replace('\n', ' ')\ - .replace('\r', ' ').replace('\t', ' ') + """Escape and quote a string to be used as an HTTP attribute.""" + return '"%s"' % html_escape(string).replace('\n', ' ').replace('\r', ' ').replace('\t', ' ') def yieldroutes(func): - """ Return a generator for routes that match the signature (name, args) + """Return a generator for routes that match the signature (name, args) of the func parameter. This may yield more than one route if the function takes optional keyword arguments. The output is best described by example:: @@ -3131,19 +3246,22 @@ def yieldroutes(func): def path_shift(script_name, path_info, shift=1): - """ Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. + """Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. - :return: The modified paths. - :param script_name: The SCRIPT_NAME path. - :param script_name: The PATH_INFO path. - :param shift: The number of path fragments to shift. May be negative to - change the shift direction. (default: 1) + :return: The modified paths. + :param script_name: The SCRIPT_NAME path. + :param script_name: The PATH_INFO path. + :param shift: The number of path fragments to shift. May be negative to + change the shift direction. (default: 1) """ - if shift == 0: return script_name, path_info + if shift == 0: + return script_name, path_info pathlist = path_info.strip('/').split('/') scriptlist = script_name.strip('/').split('/') - if pathlist and pathlist[0] == '': pathlist = [] - if scriptlist and scriptlist[0] == '': scriptlist = [] + if pathlist and pathlist[0] == '': + pathlist = [] + if scriptlist and scriptlist[0] == '': + scriptlist = [] if 0 < shift <= len(pathlist): moved = pathlist[:shift] scriptlist = scriptlist + moved @@ -3154,19 +3272,19 @@ def path_shift(script_name, path_info, shift=1): scriptlist = scriptlist[:shift] else: empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO' - raise AssertionError("Cannot shift. Nothing left from %s" % empty) + raise AssertionError('Cannot shift. Nothing left from %s' % empty) new_script_name = '/' + '/'.join(scriptlist) new_path_info = '/' + '/'.join(pathlist) - if path_info.endswith('/') and pathlist: new_path_info += '/' + if path_info.endswith('/') and pathlist: + new_path_info += '/' return new_script_name, new_path_info -def auth_basic(check, realm="private", text="Access denied"): - """ Callback decorator to require HTTP auth (basic). - TODO: Add route(check_auth=...) parameter. """ +def auth_basic(check, realm='private', text='Access denied'): + """Callback decorator to require HTTP auth (basic). + TODO: Add route(check_auth=...) parameter.""" def decorator(func): - @functools.wraps(func) def wrapper(*a, **ka): user, password = request.auth or (None, None) @@ -3180,12 +3298,13 @@ def wrapper(*a, **ka): return decorator + # Shortcuts for common Bottle methods. # They all refer to the current default application. def make_default_app_wrapper(name): - """ Return a callable that relays calls to the current default app. """ + """Return a callable that relays calls to the current default app.""" @functools.wraps(getattr(Bottle, name)) def wrapper(*a, **ka): @@ -3194,18 +3313,18 @@ def wrapper(*a, **ka): return wrapper -route = make_default_app_wrapper('route') -get = make_default_app_wrapper('get') -post = make_default_app_wrapper('post') -put = make_default_app_wrapper('put') -delete = make_default_app_wrapper('delete') -patch = make_default_app_wrapper('patch') -error = make_default_app_wrapper('error') -mount = make_default_app_wrapper('mount') -hook = make_default_app_wrapper('hook') -install = make_default_app_wrapper('install') +route = make_default_app_wrapper('route') +get = make_default_app_wrapper('get') +post = make_default_app_wrapper('post') +put = make_default_app_wrapper('put') +delete = make_default_app_wrapper('delete') +patch = make_default_app_wrapper('patch') +error = make_default_app_wrapper('error') +mount = make_default_app_wrapper('mount') +hook = make_default_app_wrapper('hook') +install = make_default_app_wrapper('install') uninstall = make_default_app_wrapper('uninstall') -url = make_default_app_wrapper('get_url') +url = make_default_app_wrapper('get_url') ############################################################################### @@ -3217,7 +3336,7 @@ def wrapper(*a, **ka): class MultipartError(HTTPError): def __init__(self, msg): - HTTPError.__init__(self, 400, "MultipartError: " + msg) + HTTPError.__init__(self, 400, 'MultipartError: ' + msg) class _MultipartParser(object): @@ -3226,11 +3345,11 @@ def __init__( stream, boundary, content_length=-1, - disk_limit=2 ** 30, - mem_limit=2 ** 20, - memfile_limit=2 ** 18, - buffer_size=2 ** 16, - charset="latin1", + disk_limit=2**30, + mem_limit=2**20, + memfile_limit=2**18, + buffer_size=2**16, + charset='latin1', ): self.stream = stream self.boundary = boundary @@ -3242,22 +3361,22 @@ def __init__( self.charset = charset if not boundary: - raise MultipartError("No boundary.") + raise MultipartError('No boundary.') if self.buffer_size - 6 < len(boundary): # "--boundary--\r\n" - raise MultipartError("Boundary does not fit into buffer_size.") + raise MultipartError('Boundary does not fit into buffer_size.') def _lineiter(self): - """ Iterate over a binary file-like object (crlf terminated) line by - line. Each line is returned as a (line, crlf) tuple. Lines larger - than buffer_size are split into chunks where all but the last chunk - has an empty string instead of crlf. Maximum chunk size is twice the - buffer size. + """Iterate over a binary file-like object (crlf terminated) line by + line. Each line is returned as a (line, crlf) tuple. Lines larger + than buffer_size are split into chunks where all but the last chunk + has an empty string instead of crlf. Maximum chunk size is twice the + buffer size. """ read = self.stream.read maxread, maxbuf = self.content_length, self.buffer_size - partial = b"" # Contains the last (partial) line + partial = b'' # Contains the last (partial) line while True: chunk = read(maxbuf if maxread < 0 else min(maxbuf, maxread)) @@ -3276,20 +3395,20 @@ def _lineiter(self): if i >= 0: yield chunk[scanpos:i], b'\r\n' scanpos = i + 2 - else: # CRLF not found + else: # CRLF not found partial = chunk[scanpos:] if scanpos else chunk break if len(partial) > maxbuf: - yield partial[:-1], b"" + yield partial[:-1], b'' partial = partial[-1:] def parse(self): - """ Return a MultiPart iterator. Can only be called once. """ + """Return a MultiPart iterator. Can only be called once.""" - lines, line = self._lineiter(), "" - separator = b"--" + tob(self.boundary) - terminator = separator + b"--" + lines, line = self._lineiter(), '' + separator = b'--' + tob(self.boundary) + terminator = separator + b'--' mem_used, disk_used = 0, 0 # Track used resources to prevent DoS is_tail = False # True if the last line was incomplete (cutted) @@ -3299,18 +3418,18 @@ def parse(self): if line in (separator, terminator): break else: - raise MultipartError("Stream does not contain boundary") + raise MultipartError('Stream does not contain boundary') # First line is termainating boundary -> empty multipart stream if line == terminator: for _ in lines: - raise MultipartError("Found data after empty multipart stream") + raise MultipartError('Found data after empty multipart stream') return part_options = { - "buffer_size": self.buffer_size, - "memfile_limit": self.memfile_limit, - "charset": self.charset, + 'buffer_size': self.buffer_size, + 'memfile_limit': self.memfile_limit, + 'charset': self.charset, } part = _MultipartPart(**part_options) @@ -3331,9 +3450,9 @@ def parse(self): part.feed(line, nl) if part.is_buffered(): if part.size + mem_used > self.mem_limit: - raise MultipartError("Memory limit reached.") + raise MultipartError('Memory limit reached.') elif part.size + disk_used > self.disk_limit: - raise MultipartError("Disk limit reached.") + raise MultipartError('Disk limit reached.') except MultipartError: part.close() raise @@ -3341,16 +3460,16 @@ def parse(self): part.close() if line != terminator: - raise MultipartError("Unexpected end of multipart stream.") + raise MultipartError('Unexpected end of multipart stream.') class _MultipartPart(object): - def __init__(self, buffer_size=2 ** 16, memfile_limit=2 ** 18, charset="latin1"): + def __init__(self, buffer_size=2**16, memfile_limit=2**18, charset='latin1'): self.headerlist = [] self.headers = None self.file = False self.size = 0 - self._buf = b"" + self._buf = b'' self.disposition = None self.name = None self.filename = None @@ -3359,7 +3478,7 @@ def __init__(self, buffer_size=2 ** 16, memfile_limit=2 ** 18, charset="latin1") self.memfile_limit = memfile_limit self.buffer_size = buffer_size - def feed(self, line, nl=""): + def feed(self, line, nl=''): if self.file: return self.write_body(line, nl) return self.write_header(line, nl) @@ -3368,18 +3487,18 @@ def write_header(self, line, nl): line = line.decode(self.charset) if not nl: - raise MultipartError("Unexpected end of line in header.") + raise MultipartError('Unexpected end of line in header.') if not line.strip(): # blank line -> end of header segment self.finish_header() - elif line[0] in " \t" and self.headerlist: + elif line[0] in ' \t' and self.headerlist: name, value = self.headerlist.pop() self.headerlist.append((name, value + line.strip())) else: - if ":" not in line: - raise MultipartError("Syntax error in header: No colon.") + if ':' not in line: + raise MultipartError('Syntax error in header: No colon.') - name, value = line.split(":", 1) + name, value = line.split(':', 1) self.headerlist.append((name.strip(), value.strip())) def write_body(self, line, nl): @@ -3391,10 +3510,10 @@ def write_body(self, line, nl): self._buf = nl if self.content_length > 0 and self.size > self.content_length: - raise MultipartError("Size of body exceeds Content-Length header.") + raise MultipartError('Size of body exceeds Content-Length header.') if self.size > self.memfile_limit and isinstance(self.file, BytesIO): - self.file, old = NamedTemporaryFile(mode="w+b"), self.file + self.file, old = NamedTemporaryFile(mode='w+b'), self.file old.seek(0) copied, maxcopy, chunksize = 0, self.size, self.buffer_size @@ -3407,42 +3526,42 @@ def write_body(self, line, nl): def finish_header(self): self.file = BytesIO() self.headers = HeaderDict(self.headerlist) - content_disposition = self.headers.get("Content-Disposition") - content_type = self.headers.get("Content-Type") + content_disposition = self.headers.get('Content-Disposition') + content_type = self.headers.get('Content-Type') if not content_disposition: - raise MultipartError("Content-Disposition header is missing.") + raise MultipartError('Content-Disposition header is missing.') self.disposition, self.options = _parse_http_header(content_disposition)[0] - self.name = self.options.get("name") - if "filename" in self.options: - self.filename = self.options.get("filename") - if self.filename[1:3] == ":\\" or self.filename[:2] == "\\\\": - self.filename = self.filename.split("\\")[-1] # ie6 bug + self.name = self.options.get('name') + if 'filename' in self.options: + self.filename = self.options.get('filename') + if self.filename[1:3] == ':\\' or self.filename[:2] == '\\\\': + self.filename = self.filename.split('\\')[-1] # ie6 bug self.content_type, options = _parse_http_header(content_type)[0] if content_type else (None, {}) - self.charset = options.get("charset") or self.charset + self.charset = options.get('charset') or self.charset - self.content_length = int(self.headers.get("Content-Length", "-1")) + self.content_length = int(self.headers.get('Content-Length', '-1')) def finish(self): if not self.file: - raise MultipartError("Incomplete part: Header section not closed.") + raise MultipartError('Incomplete part: Header section not closed.') self.file.seek(0) def is_buffered(self): - """ Return true if the data is fully buffered in memory.""" + """Return true if the data is fully buffered in memory.""" return isinstance(self.file, BytesIO) @property def value(self): - """ Data decoded with the specified charset """ + """Data decoded with the specified charset""" return self.raw.decode(self.charset) @property def raw(self): - """ Data without decoding """ + """Data without decoding""" pos = self.file.tell() self.file.seek(0) @@ -3456,6 +3575,7 @@ def close(self): self.file.close() self.file = False + ############################################################################### # Server Adapter ############################################################### ############################################################################### @@ -3464,6 +3584,7 @@ def close(self): # - https://github.com/bottlepy/bottle/pull/647#issuecomment-60152870 # - https://github.com/bottlepy/bottle/pull/865#issuecomment-242795341 + class ServerAdapter(object): quiet = False @@ -3476,9 +3597,8 @@ def run(self, handler): # pragma: no cover pass def __repr__(self): - args = ', '.join('%s=%s' % (k, repr(v)) - for k, v in self.options.items()) - return "%s(%s)" % (self.__class__.__name__, args) + args = ', '.join('%s=%s' % (k, repr(v)) for k, v in self.options.items()) + return '%s(%s)' % (self.__class__.__name__, args) class CGIServer(ServerAdapter): @@ -3497,15 +3617,15 @@ def fixed_environ(environ, start_response): class FlupFCGIServer(ServerAdapter): def run(self, handler): # pragma: no cover import flup.server.fcgi + self.options.setdefault('bindAddress', (self.host, self.port)) flup.server.fcgi.WSGIServer(handler, **self.options).run() class WSGIRefServer(ServerAdapter): def run(self, app): # pragma: no cover - from wsgiref.simple_server import make_server - from wsgiref.simple_server import WSGIRequestHandler, WSGIServer import socket + from wsgiref.simple_server import WSGIRequestHandler, WSGIServer, make_server class FixedHandler(WSGIRequestHandler): def address_string(self): # Prevent reverse DNS lookups please. @@ -3524,8 +3644,7 @@ def log_request(*args, **kw): class server_cls(server_cls): address_family = socket.AF_INET6 - self.srv = make_server(self.host, self.port, app, server_cls, - handler_cls) + self.srv = make_server(self.host, self.port, app, server_cls, handler_cls) self.port = self.srv.server_port # update port actual port (0 means random) try: self.srv.serve_forever() @@ -3536,10 +3655,13 @@ class server_cls(server_cls): class CherryPyServer(ServerAdapter): def run(self, handler): # pragma: no cover - depr(0, 13, "The wsgi server part of cherrypy was split into a new " - "project called 'cheroot'.", "Use the 'cheroot' server " - "adapter instead of cherrypy.") - from cherrypy import wsgiserver # This will fail for CherryPy >= 9 + depr( + 0, + 13, + "The wsgi server part of cherrypy was split into a new project called 'cheroot'.", + "Use the 'cheroot' server adapter instead of cherrypy.", + ) + from cherrypy import wsgiserver # This will fail for CherryPy >= 9 self.options['bind_addr'] = (self.host, self.port) self.options['wsgi_app'] = handler @@ -3564,9 +3686,10 @@ def run(self, handler): # pragma: no cover class CherootServer(ServerAdapter): - def run(self, handler): # pragma: no cover + def run(self, handler): # pragma: no cover from cheroot import wsgi from cheroot.ssl import builtin + self.options['bind_addr'] = (self.host, self.port) self.options['wsgi_app'] = handler certfile = self.options.pop('certfile', None) @@ -3574,8 +3697,7 @@ def run(self, handler): # pragma: no cover chainfile = self.options.pop('chainfile', None) server = wsgi.Server(**self.options) if certfile and keyfile: - server.ssl_adapter = builtin.BuiltinSSLAdapter( - certfile, keyfile, chainfile) + server.ssl_adapter = builtin.BuiltinSSLAdapter(certfile, keyfile, chainfile) try: server.start() finally: @@ -3585,6 +3707,7 @@ def run(self, handler): # pragma: no cover class WaitressServer(ServerAdapter): def run(self, handler): from waitress import serve + serve(handler, host=self.host, port=self.port, _quiet=self.quiet, **self.options) @@ -3592,26 +3715,27 @@ class PasteServer(ServerAdapter): def run(self, handler): # pragma: no cover from paste import httpserver from paste.translogger import TransLogger + handler = TransLogger(handler, setup_console_handler=(not self.quiet)) - httpserver.serve(handler, - host=self.host, - port=str(self.port), **self.options) + httpserver.serve(handler, host=self.host, port=str(self.port), **self.options) class MeinheldServer(ServerAdapter): def run(self, handler): from meinheld import server + server.listen((self.host, self.port)) server.run(handler) class FapwsServer(ServerAdapter): - """ Extremely fast webserver using libev. See https://github.com/william-os4y/fapws3 """ + """Extremely fast webserver using libev. See https://github.com/william-os4y/fapws3""" def run(self, handler): # pragma: no cover - depr(0, 13, "fapws3 is not maintained and support will be dropped.") + depr(0, 13, 'fapws3 is not maintained and support will be dropped.') import fapws._evwsgi as evwsgi from fapws import base, config + port = self.port if float(config.SERVER_IDENT[-2:]) > 0.4: # fapws3 silently changed its API in 0.5 @@ -3619,8 +3743,8 @@ def run(self, handler): # pragma: no cover evwsgi.start(self.host, port) # fapws3 never releases the GIL. Complain upstream. I tried. No luck. if 'BOTTLE_CHILD' in os.environ and not self.quiet: - _stderr("WARNING: Auto-reloading does not work with Fapws3.") - _stderr(" (Fapws3 breaks python thread support)") + _stderr('WARNING: Auto-reloading does not work with Fapws3.') + _stderr(' (Fapws3 breaks python thread support)') evwsgi.set_base_module(base) def app(environ, start_response): @@ -3632,10 +3756,13 @@ def app(environ, start_response): class TornadoServer(ServerAdapter): - """ The super hyped asynchronous server by facebook. Untested. """ + """The super hyped asynchronous server by facebook. Untested.""" def run(self, handler): # pragma: no cover - import tornado.wsgi, tornado.httpserver, tornado.ioloop + import tornado.httpserver + import tornado.ioloop + import tornado.wsgi + container = tornado.wsgi.WSGIContainer(handler) server = tornado.httpserver.HTTPServer(container) server.listen(port=self.port, address=self.host) @@ -3643,13 +3770,14 @@ def run(self, handler): # pragma: no cover class AppEngineServer(ServerAdapter): - """ Adapter for Google App Engine. """ + """Adapter for Google App Engine.""" + quiet = True def run(self, handler): - depr(0, 13, "AppEngineServer no longer required", - "Configure your application directly in your app.yaml") + depr(0, 13, 'AppEngineServer no longer required', 'Configure your application directly in your app.yaml') from google.appengine.ext.webapp import util + # A main() function in the handler script enables 'App Caching'. # Lets makes sure it is there. This _really_ improves performance. module = sys.modules.get('__main__') @@ -3659,12 +3787,13 @@ def run(self, handler): class TwistedServer(ServerAdapter): - """ Untested. """ + """Untested.""" def run(self, handler): - from twisted.web import server, wsgi - from twisted.python.threadpool import ThreadPool from twisted.internet import reactor + from twisted.python.threadpool import ThreadPool + from twisted.web import server, wsgi + thread_pool = ThreadPool() thread_pool.start() reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop) @@ -3675,25 +3804,27 @@ def run(self, handler): class DieselServer(ServerAdapter): - """ Untested. """ + """Untested.""" def run(self, handler): - depr(0, 13, "Diesel is not tested or supported and will be removed.") + depr(0, 13, 'Diesel is not tested or supported and will be removed.') from diesel.protocols.wsgi import WSGIApplication + app = WSGIApplication(handler, port=self.port) app.run() class GeventServer(ServerAdapter): - """ Untested. Options: + """Untested. Options: - * See gevent.wsgi.WSGIServer() documentation for more options. + * See gevent.wsgi.WSGIServer() documentation for more options. """ def run(self, handler): - from gevent import pywsgi, local + from gevent import local, pywsgi + if not isinstance(threading.local(), local.local): - msg = "Bottle requires gevent.monkey.patch_all() (before import)" + msg = 'Bottle requires gevent.monkey.patch_all() (before import)' raise RuntimeError(msg) if self.quiet: self.options['log'] = None @@ -3701,20 +3832,21 @@ def run(self, handler): server = pywsgi.WSGIServer(address, handler, **self.options) if 'BOTTLE_CHILD' in os.environ: import signal + signal.signal(signal.SIGINT, lambda s, f: server.stop()) server.serve_forever() class GunicornServer(ServerAdapter): - """ Untested. See http://gunicorn.org/configure.html for options. """ + """Untested. See http://gunicorn.org/configure.html for options.""" def run(self, handler): from gunicorn.app.base import BaseApplication - if self.host.startswith("unix:"): + if self.host.startswith('unix:'): config = {'bind': self.host} else: - config = {'bind': "%s:%d" % (self.host, self.port)} + config = {'bind': '%s:%d' % (self.host, self.port)} config.update(self.options) @@ -3730,19 +3862,20 @@ def load(self): class EventletServer(ServerAdapter): - """ Untested. Options: + """Untested. Options: - * `backlog` adjust the eventlet backlog parameter which is the maximum - number of queued connections. Should be at least 1; the maximum - value is system-dependent. - * `family`: (default is 2) socket family, optional. See socket - documentation for available families. + * `backlog` adjust the eventlet backlog parameter which is the maximum + number of queued connections. Should be at least 1; the maximum + value is system-dependent. + * `family`: (default is 2) socket family, optional. See socket + documentation for available families. """ def run(self, handler): - from eventlet import wsgi, listen, patcher + from eventlet import listen, patcher, wsgi + if not patcher.is_monkey_patched(os): - msg = "Bottle requires eventlet.monkey_patch() (before import)" + msg = 'Bottle requires eventlet.monkey_patch() (before import)' raise RuntimeError(msg) socket_args = {} for arg in ('backlog', 'family'): @@ -3752,43 +3885,50 @@ def run(self, handler): pass address = (self.host, self.port) try: - wsgi.server(listen(address, **socket_args), handler, - log_output=(not self.quiet)) + wsgi.server(listen(address, **socket_args), handler, log_output=(not self.quiet)) except TypeError: # Fallback, if we have old version of eventlet wsgi.server(listen(address), handler) class BjoernServer(ServerAdapter): - """ Fast server written in C: https://github.com/jonashaag/bjoern """ + """Fast server written in C: https://github.com/jonashaag/bjoern""" def run(self, handler): from bjoern import run + run(handler, self.host, self.port, reuse_port=True) + class AsyncioServerAdapter(ServerAdapter): - """ Extend ServerAdapter for adding custom event loop """ + """Extend ServerAdapter for adding custom event loop""" + def get_event_loop(self): pass + class AiohttpServer(AsyncioServerAdapter): - """ Asynchronous HTTP client/server framework for asyncio - https://pypi.python.org/pypi/aiohttp/ - https://pypi.org/project/aiohttp-wsgi/ + """Asynchronous HTTP client/server framework for asyncio + https://pypi.python.org/pypi/aiohttp/ + https://pypi.org/project/aiohttp-wsgi/ """ def get_event_loop(self): import asyncio + return asyncio.new_event_loop() def run(self, handler): import asyncio + from aiohttp_wsgi.wsgi import serve + self.loop = self.get_event_loop() asyncio.set_event_loop(self.loop) if 'BOTTLE_CHILD' in os.environ: import signal + signal.signal(signal.SIGINT, lambda s, f: self.loop.stop()) serve(handler, host=self.host, port=self.port) @@ -3796,16 +3936,19 @@ def run(self, handler): class AiohttpUVLoopServer(AiohttpServer): """uvloop - https://github.com/MagicStack/uvloop + https://github.com/MagicStack/uvloop """ + def get_event_loop(self): import uvloop + return uvloop.new_event_loop() + class AutoServer(ServerAdapter): - """ Untested. """ - adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, - CherootServer, WSGIRefServer] + """Untested.""" + + adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, CherootServer, WSGIRefServer] def run(self, handler): for sa in self.adapters: @@ -3844,29 +3987,32 @@ def run(self, handler): def load(target, **namespace): - """ Import a module or fetch an object from a module. + """Import a module or fetch an object from a module. - * ``package.module`` returns `module` as a module object. - * ``pack.mod:name`` returns the module variable `name` from `pack.mod`. - * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result. + * ``package.module`` returns `module` as a module object. + * ``pack.mod:name`` returns the module variable `name` from `pack.mod`. + * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result. - The last form accepts not only function calls, but any type of - expression. Keyword arguments passed to this function are available as - local variables. Example: ``import_string('re:compile(x)', x='[a-z]')`` + The last form accepts not only function calls, but any type of + expression. Keyword arguments passed to this function are available as + local variables. Example: ``import_string('re:compile(x)', x='[a-z]')`` """ - module, target = target.split(":", 1) if ':' in target else (target, None) - if module not in sys.modules: __import__(module) - if not target: return sys.modules[module] - if target.isalnum(): return getattr(sys.modules[module], target) + module, target = target.split(':', 1) if ':' in target else (target, None) + if module not in sys.modules: + __import__(module) + if not target: + return sys.modules[module] + if target.isalnum(): + return getattr(sys.modules[module], target) package_name = module.split('.')[0] namespace[package_name] = sys.modules[package_name] return eval('%s.%s' % (module, target), namespace) def load_app(target): - """ Load a bottle application from a module and make sure that the import - does not affect the current default application, but returns a separate - application object. See :func:`load` for the target parameter. """ + """Load a bottle application from a module and make sure that the import + does not affect the current default application, but returns a separate + application object. See :func:`load` for the target parameter.""" global NORUN NORUN, nr_old = True, NORUN tmp = default_app.push() # Create a new "default application" @@ -3881,35 +4027,40 @@ def load_app(target): _debug = debug -def run(app=None, - server='wsgiref', - host='127.0.0.1', - port=8080, - interval=1, - reloader=False, - quiet=False, - plugins=None, - debug=None, - config=None, **kargs): - """ Start a server instance. This method blocks until the server terminates. - - :param app: WSGI application or target string supported by - :func:`load_app`. (default: :func:`default_app`) - :param server: Server adapter to use. See :data:`server_names` keys - for valid names or pass a :class:`ServerAdapter` subclass. - (default: `wsgiref`) - :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on - all interfaces including the external one. (default: 127.0.0.1) - :param port: Server port to bind to. Values below 1024 require root - privileges. (default: 8080) - :param reloader: Start auto-reloading server? (default: False) - :param interval: Auto-reloader interval in seconds (default: 1) - :param quiet: Suppress output to stdout and stderr? (default: False) - :param options: Options passed to the server adapter. - """ - if NORUN: return +def run( + app=None, + server='wsgiref', + host='127.0.0.1', + port=8080, + interval=1, + reloader=False, + quiet=False, + plugins=None, + debug=None, + config=None, + **kargs, +): + """Start a server instance. This method blocks until the server terminates. + + :param app: WSGI application or target string supported by + :func:`load_app`. (default: :func:`default_app`) + :param server: Server adapter to use. See :data:`server_names` keys + for valid names or pass a :class:`ServerAdapter` subclass. + (default: `wsgiref`) + :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on + all interfaces including the external one. (default: 127.0.0.1) + :param port: Server port to bind to. Values below 1024 require root + privileges. (default: 8080) + :param reloader: Start auto-reloading server? (default: False) + :param interval: Auto-reloader interval in seconds (default: 1) + :param quiet: Suppress output to stdout and stderr? (default: False) + :param options: Options passed to the server adapter. + """ + if NORUN: + return if reloader and not os.environ.get('BOTTLE_CHILD'): import subprocess + fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock') environ = os.environ.copy() environ['BOTTLE_CHILD'] = 'true' @@ -3918,7 +4069,7 @@ def run(app=None, # If a package was loaded with `python -m`, then `sys.argv` needs to be # restored to the original value, or imports might break. See #1336 if getattr(sys.modules.get('__main__'), '__package__', None): - args[1:1] = ["-m", sys.modules['__main__'].__package__] + args[1:1] = ['-m', sys.modules['__main__'].__package__] try: os.close(fd) # We never write to this file @@ -3938,12 +4089,13 @@ def run(app=None, return try: - if debug is not None: _debug(debug) + if debug is not None: + _debug(debug) app = app or default_app() if isinstance(app, basestring): app = load_app(app) if not callable(app): - raise ValueError("Application is not callable: %r" % app) + raise ValueError('Application is not callable: %r' % app) for plugin in plugins or []: if isinstance(plugin, basestring): @@ -3960,18 +4112,16 @@ def run(app=None, if isinstance(server, type): server = server(host=host, port=port, **kargs) if not isinstance(server, ServerAdapter): - raise ValueError("Unknown or unsupported server: %r" % server) + raise ValueError('Unknown or unsupported server: %r' % server) server.quiet = server.quiet or quiet if not server.quiet: - _stderr("Bottle v%s server starting up (using %s)..." % - (__version__, repr(server))) - if server.host.startswith("unix:"): - _stderr("Listening on %s" % server.host) + _stderr('Bottle v%s server starting up (using %s)...' % (__version__, repr(server))) + if server.host.startswith('unix:'): + _stderr('Listening on %s' % server.host) else: - _stderr("Listening on http://%s:%d/" % - (server.host, server.port)) - _stderr("Hit Ctrl-C to quit.\n") + _stderr('Listening on http://%s:%d/' % (server.host, server.port)) + _stderr('Hit Ctrl-C to quit.\n') if reloader: lockfile = os.environ.get('BOTTLE_LOCKFILE') @@ -3987,7 +4137,8 @@ def run(app=None, except (SystemExit, MemoryError): raise except: - if not reloader: raise + if not reloader: + raise if not getattr(server, 'quiet', quiet): print_exc() time.sleep(interval) @@ -3995,8 +4146,8 @@ def run(app=None, class FileCheckerThread(threading.Thread): - """ Interrupt main-thread as soon as a changed module file is detected, - the lockfile gets deleted or gets too old. """ + """Interrupt main-thread as soon as a changed module file is detected, + the lockfile gets deleted or gets too old.""" def __init__(self, lockfile, interval): threading.Thread.__init__(self) @@ -4012,12 +4163,13 @@ def run(self): for module in list(sys.modules.values()): path = getattr(module, '__file__', '') or '' - if path[-4:] in ('.pyo', '.pyc'): path = path[:-1] - if path and exists(path): files[path] = mtime(path) + if path[-4:] in ('.pyo', '.pyc'): + path = path[:-1] + if path and exists(path): + files[path] = mtime(path) while not self.status: - if not exists(self.lockfile)\ - or mtime(self.lockfile) < time.time() - self.interval - 5: + if not exists(self.lockfile) or mtime(self.lockfile) < time.time() - self.interval - 5: self.status = 'error' thread.interrupt_main() for path, lmtime in list(files.items()): @@ -4031,10 +4183,12 @@ def __enter__(self): self.start() def __exit__(self, exc_type, *_): - if not self.status: self.status = 'exit' # silent exit + if not self.status: + self.status = 'exit' # silent exit self.join() return exc_type is not None and issubclass(exc_type, KeyboardInterrupt) + ############################################################################### # Template Adapters ############################################################ ############################################################################### @@ -4045,17 +4199,14 @@ class TemplateError(BottleException): class BaseTemplate(object): - """ Base class and minimal API for template adapters """ + """Base class and minimal API for template adapters""" + extensions = ['tpl', 'html', 'thtml', 'stpl'] - settings = {} #used in prepare() - defaults = {} #used in render() - - def __init__(self, - source=None, - name=None, - lookup=None, - encoding='utf8', **settings): - """ Create a new template. + settings = {} # used in prepare() + defaults = {} # used in render() + + def __init__(self, source=None, name=None, lookup=None, encoding='utf8', **settings): + """Create a new template. If the source parameter (str or buffer) is missing, the name argument is used to guess a template filename. Subclasses can assume that self.source and/or self.filename are set. Both are strings. @@ -4082,27 +4233,33 @@ def __init__(self, @classmethod def search(cls, name, lookup=None): - """ Search name in all directories specified in lookup. - First without, then with common extensions. Return first hit. """ + """Search name in all directories specified in lookup. + First without, then with common extensions. Return first hit.""" if not lookup: - raise depr(0, 12, "Empty template lookup path.", "Configure a template lookup path.") + raise depr(0, 12, 'Empty template lookup path.', 'Configure a template lookup path.') if os.path.isabs(name): - raise depr(0, 12, "Use of absolute path for template name.", - "Refer to templates with names or paths relative to the lookup path.") + raise depr( + 0, + 12, + 'Use of absolute path for template name.', + 'Refer to templates with names or paths relative to the lookup path.', + ) for spath in lookup: spath = os.path.abspath(spath) + os.sep fname = os.path.abspath(os.path.join(spath, name)) - if not fname.startswith(spath): continue - if os.path.isfile(fname): return fname + if not fname.startswith(spath): + continue + if os.path.isfile(fname): + return fname for ext in cls.extensions: if os.path.isfile('%s.%s' % (fname, ext)): return '%s.%s' % (fname, ext) @classmethod def global_config(cls, key, *args): - """ This reads or sets the global settings stored in class.settings. """ + """This reads or sets the global settings stored in class.settings.""" if args: cls.settings = cls.settings.copy() # Make settings local to class cls.settings[key] = args[0] @@ -4110,14 +4267,14 @@ def global_config(cls, key, *args): return cls.settings[key] def prepare(self, **options): - """ Run preparations (parsing, caching, ...). + """Run preparations (parsing, caching, ...). It should be possible to call this again to refresh a template or to update settings. """ raise NotImplementedError def render(self, *args, **kwargs): - """ Render the template with the specified local variables and return + """Render the template with the specified local variables and return a single byte or unicode string. If it is a byte string, the encoding must match self.encoding. This method must be thread-safe! Local variables may be provided in dictionaries (args) @@ -4128,17 +4285,16 @@ def render(self, *args, **kwargs): class MakoTemplate(BaseTemplate): def prepare(self, **options): - from mako.template import Template from mako.lookup import TemplateLookup + from mako.template import Template + options.update({'input_encoding': self.encoding}) options.setdefault('format_exceptions', bool(DEBUG)) lookup = TemplateLookup(directories=self.lookup, **options) if self.source: self.tpl = Template(self.source, lookup=lookup, **options) else: - self.tpl = Template(uri=self.name, - filename=self.filename, - lookup=lookup, **options) + self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options) def render(self, *args, **kwargs): for dictarg in args: @@ -4151,6 +4307,7 @@ def render(self, *args, **kwargs): class CheetahTemplate(BaseTemplate): def prepare(self, **options): from Cheetah.Template import Template + self.context = threading.local() self.context.vars = {} options['searchList'] = [self.context.vars] @@ -4172,10 +4329,14 @@ def render(self, *args, **kwargs): class Jinja2Template(BaseTemplate): def prepare(self, filters=None, tests=None, globals={}, **kwargs): from jinja2 import Environment, FunctionLoader + self.env = Environment(loader=FunctionLoader(self.loader), **kwargs) - if filters: self.env.filters.update(filters) - if tests: self.env.tests.update(tests) - if globals: self.env.globals.update(globals) + if filters: + self.env.filters.update(filters) + if tests: + self.env.tests.update(tests) + if globals: + self.env.globals.update(globals) if self.source: self.tpl = self.env.from_string(self.source) else: @@ -4193,16 +4354,14 @@ def loader(self, name): fname = name else: fname = self.search(name, self.lookup) - if not fname: return - with open(fname, "rb") as f: + if not fname: + return + with open(fname, 'rb') as f: return (f.read().decode(self.encoding), fname, lambda: False) class SimpleTemplate(BaseTemplate): - def prepare(self, - escape_func=html_escape, - noescape=False, - syntax=None, **ka): + def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka): self.cache = {} enc = self.encoding self._str = lambda x: touni(x, enc) @@ -4243,28 +4402,30 @@ def _include(self, _env, _name=None, **kwargs): def execute(self, _stdout, kwargs): env = self.defaults.copy() env.update(kwargs) - env.update({ - '_stdout': _stdout, - '_printlist': _stdout.extend, - 'include': functools.partial(self._include, env), - 'rebase': functools.partial(self._rebase, env), - '_rebase': None, - '_str': self._str, - '_escape': self._escape, - 'get': env.get, - 'setdefault': env.setdefault, - 'defined': env.__contains__ - }) + env.update( + { + '_stdout': _stdout, + '_printlist': _stdout.extend, + 'include': functools.partial(self._include, env), + 'rebase': functools.partial(self._rebase, env), + '_rebase': None, + '_str': self._str, + '_escape': self._escape, + 'get': env.get, + 'setdefault': env.setdefault, + 'defined': env.__contains__, + } + ) exec(self.co, env) if env.get('_rebase'): subtpl, rargs = env.pop('_rebase') - rargs['base'] = ''.join(_stdout) #copy stdout + rargs['base'] = ''.join(_stdout) # copy stdout del _stdout[:] # clear stdout return self._include(env, subtpl, **rargs) return env def render(self, *args, **kwargs): - """ Render the template using keyword arguments as local variables. """ + """Render the template using keyword arguments as local variables.""" env = {} stdout = [] for dictarg in args: @@ -4279,13 +4440,14 @@ class StplSyntaxError(TemplateError): class StplParser(object): - """ Parser for stpl templates. """ + """Parser for stpl templates.""" + _re_cache = {} #: Cache for compiled re patterns # This huge pile of voodoo magic splits python code into 8 different tokens. # We use the verbose (?x) regex mode to make this more manageable - _re_tok = r'''( + _re_tok = r"""( [urbURB]* (?: ''(?!') |""(?!") @@ -4296,11 +4458,11 @@ class StplParser(object): |'{3}(?:[^\\]|\\.|\n)+?'{3} |"{3}(?:[^\\]|\\.|\n)+?"{3} ) - )''' + )""" _re_inl = _re_tok.replace(r'|\n', '') # We re-use this string pattern later - _re_tok += r''' + _re_tok += r""" # 2: Comments (until end of line, but not the newline itself) |(\#.*) @@ -4320,19 +4482,18 @@ class StplParser(object): # 9: And finally, a single newline. The 10th token is 'everything else' |(\r?\n) - ''' + """ # Match the start tokens of code areas in a template - _re_split = r'''(?m)^[ \t]*(\\?)((%(line_start)s)|(%(block_start)s))''' + _re_split = r"""(?m)^[ \t]*(\\?)((%(line_start)s)|(%(block_start)s))""" # Match inline statements (may contain python strings) - _re_inl = r'''%%(inline_start)s((?:%s|[^'"\n])*?)%%(inline_end)s''' % _re_inl + _re_inl = r"""%%(inline_start)s((?:%s|[^'"\n])*?)%%(inline_end)s""" % _re_inl # add the flag in front of the regexp to avoid Deprecation warning (see Issue #949) # verbose and dot-matches-newline mode _re_tok = '(?mx)' + _re_tok _re_inl = '(?mx)' + _re_inl - default_syntax = '<% %> % {{ }}' def __init__(self, source, syntax=None, encoding='utf8'): @@ -4344,7 +4505,7 @@ def __init__(self, source, syntax=None, encoding='utf8'): self.paren_depth = 0 def get_syntax(self): - """ Tokens as a space separated string (default: <% %> % {{ }}) """ + """Tokens as a space separated string (default: <% %> % {{ }})""" return self._syntax def set_syntax(self, syntax): @@ -4362,25 +4523,24 @@ def set_syntax(self, syntax): syntax = property(get_syntax, set_syntax) def translate(self): - if self.offset: raise RuntimeError('Parser is a one time instance.') + if self.offset: + raise RuntimeError('Parser is a one time instance.') while True: m = self.re_split.search(self.source, pos=self.offset) if m: - text = self.source[self.offset:m.start()] + text = self.source[self.offset : m.start()] self.text_buffer.append(text) self.offset = m.end() if m.group(1): # Escape syntax - line, sep, _ = self.source[self.offset:].partition('\n') - self.text_buffer.append(self.source[m.start():m.start(1)] + - m.group(2) + line + sep) + line, sep, _ = self.source[self.offset :].partition('\n') + self.text_buffer.append(self.source[m.start() : m.start(1)] + m.group(2) + line + sep) self.offset += len(line + sep) continue self.flush_text() - self.offset += self.read_code(self.source[self.offset:], - multiline=bool(m.group(4))) + self.offset += self.read_code(self.source[self.offset :], multiline=bool(m.group(4))) else: break - self.text_buffer.append(self.source[self.offset:]) + self.text_buffer.append(self.source[self.offset :]) self.flush_text() return ''.join(self.code_buffer) @@ -4394,7 +4554,7 @@ def read_code(self, pysource, multiline): offset = len(pysource) self.write_code(code_line.strip(), comment) break - code_line += pysource[offset:m.start()] + code_line += pysource[offset : m.start()] offset = m.end() _str, _com, _po, _pc, _blk1, _blk2, _end, _cend, _nl = m.groups() if self.paren_depth > 0 and (_blk1 or _blk2): # a if b else c @@ -4423,8 +4583,10 @@ def read_code(self, pysource, multiline): code_line = _blk2 self.indent_mod -= 1 elif _cend: # The end-code-block template token (usually '%>') - if multiline: multiline = False - else: code_line += _cend + if multiline: + multiline = False + else: + code_line += _cend elif _end: self.indent -= 1 self.indent_mod += 1 @@ -4440,19 +4602,23 @@ def read_code(self, pysource, multiline): def flush_text(self): text = ''.join(self.text_buffer) del self.text_buffer[:] - if not text: return + if not text: + return parts, pos, nl = [], 0, '\\\n' + ' ' * self.indent for m in self.re_inl.finditer(text): - prefix, pos = text[pos:m.start()], m.end() + prefix, pos = text[pos : m.start()], m.end() if prefix: parts.append(nl.join(map(repr, prefix.splitlines(True)))) - if prefix.endswith('\n'): parts[-1] += nl + if prefix.endswith('\n'): + parts[-1] += nl parts.append(self.process_inline(m.group(1).strip())) if pos < len(text): prefix = text[pos:] lines = prefix.splitlines(True) - if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3] - elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4] + if lines[-1].endswith('\\\\\n'): + lines[-1] = lines[-1][:-3] + elif lines[-1].endswith('\\\\\r\n'): + lines[-1] = lines[-1][:-4] parts.append(nl.join(map(repr, lines))) code = '_printlist((%s,))' % ', '.join(parts) self.lineno += code.count('\n') + 1 @@ -4460,7 +4626,8 @@ def flush_text(self): @staticmethod def process_inline(chunk): - if chunk[0] == '!': return '_str(%s)' % chunk[1:] + if chunk[0] == '!': + return '_str(%s)' % chunk[1:] return '_escape(%s)' % chunk def write_code(self, line, comment=''): @@ -4486,8 +4653,9 @@ def template(*args, **kwargs): settings = kwargs.pop('template_settings', {}) if isinstance(tpl, adapter): TEMPLATES[tplid] = tpl - if settings: TEMPLATES[tplid].prepare(**settings) - elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl: + if settings: + TEMPLATES[tplid].prepare(**settings) + elif '\n' in tpl or '{' in tpl or '%' in tpl or '$' in tpl: TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings) else: TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings) @@ -4497,24 +4665,22 @@ def template(*args, **kwargs): mako_template = functools.partial(template, template_adapter=MakoTemplate) -cheetah_template = functools.partial(template, - template_adapter=CheetahTemplate) +cheetah_template = functools.partial(template, template_adapter=CheetahTemplate) jinja2_template = functools.partial(template, template_adapter=Jinja2Template) def view(tpl_name, **defaults): - """ Decorator: renders a template for a handler. - The handler can control its behavior like that: - - - return a dict of template vars to fill out the template - - return something other than a dict and the view decorator will not - process the template, but return the handler result as is. - This includes returning a HTTPResponse(dict) to get, - for instance, JSON with autojson or other castfilters. + """Decorator: renders a template for a handler. + The handler can control its behavior like that: + + - return a dict of template vars to fill out the template + - return something other than a dict and the view decorator will not + process the template, but return the handler result as is. + This includes returning a HTTPResponse(dict) to get, + for instance, JSON with autojson or other castfilters. """ def decorator(func): - @functools.wraps(func) def wrapper(*args, **kwargs): result = func(*args, **kwargs) @@ -4547,16 +4713,16 @@ def wrapper(*args, **kwargs): #: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found') HTTP_CODES = httplib.responses.copy() HTTP_CODES[418] = "I'm a teapot" # RFC 2324 -HTTP_CODES[428] = "Precondition Required" -HTTP_CODES[429] = "Too Many Requests" -HTTP_CODES[431] = "Request Header Fields Too Large" -HTTP_CODES[451] = "Unavailable For Legal Reasons" # RFC 7725 -HTTP_CODES[511] = "Network Authentication Required" -_HTTP_STATUS_LINES = dict((k, '%d %s' % (k, v)) - for (k, v) in HTTP_CODES.items()) +HTTP_CODES[428] = 'Precondition Required' +HTTP_CODES[429] = 'Too Many Requests' +HTTP_CODES[431] = 'Request Header Fields Too Large' +HTTP_CODES[451] = 'Unavailable For Legal Reasons' # RFC 7725 +HTTP_CODES[511] = 'Network Authentication Required' +_HTTP_STATUS_LINES = dict((k, '%d %s' % (k, v)) for (k, v) in HTTP_CODES.items()) #: The default template used for error pages. Override with @error() -ERROR_PAGE_TEMPLATE = """ +ERROR_PAGE_TEMPLATE = ( + """ %%try: %%from %s import DEBUG, request @@ -4594,7 +4760,9 @@ def wrapper(*args, **kwargs): ImportError: Could not generate the error page. Please add bottle to the import path. %%end -""" % __name__ +""" + % __name__ +) #: A thread-safe instance of :class:`LocalRequest`. If accessed from within a #: request callback, this instance always refers to the *current* request @@ -4614,8 +4782,7 @@ def wrapper(*args, **kwargs): #: A virtual package that redirects import statements. #: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`. -ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else - __name__ + ".ext", 'bottle_%s').module +ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__ + '.ext', 'bottle_%s').module def _main(argv): # pragma: no coverage @@ -4630,7 +4797,7 @@ def _cli_error(cli_msg): print('Bottle %s' % __version__) sys.exit(0) if not args.app: - _cli_error("No application entry point specified.") + _cli_error('No application entry point specified.') sys.path.insert(0, '.') sys.modules.setdefault('bottle', sys.modules['__main__']) @@ -4652,9 +4819,9 @@ def _cli_error(cli_msg): except configparser.Error as parse_error: _cli_error(parse_error) except IOError: - _cli_error("Unable to read config file %r" % cfile) + _cli_error('Unable to read config file %r' % cfile) except (UnicodeError, TypeError, ValueError) as error: - _cli_error("Unable to parse config file %r: %s" % (cfile, error)) + _cli_error('Unable to parse config file %r: %s' % (cfile, error)) for cval in args.param or []: if '=' in cval: @@ -4662,14 +4829,16 @@ def _cli_error(cli_msg): else: config[cval] = True - run(args.app, + run( + args.app, host=host, port=int(port), server=args.server, reloader=args.reload, plugins=args.plugin, debug=args.debug, - config=config) + config=config, + ) def main(): diff --git a/build/fake_kms_provider_server.py b/build/fake_kms_provider_server.py index abe8779e31e..c934cd00cf9 100644 --- a/build/fake_kms_provider_server.py +++ b/build/fake_kms_provider_server.py @@ -19,36 +19,26 @@ from typing import Protocol class _RequestParams(Protocol): - - def __getitem__(self, key: str) -> str: - ... + def __getitem__(self, key: str) -> str: ... @overload - def get(self, key: str) -> 'str | None': - ... + def get(self, key: str) -> 'str | None': ... @overload - def get(self, key: str, default: str) -> str: - ... + def get(self, key: str, default: str) -> str: ... class _HeadersDict(dict[str, str]): - - def raw(self, key: str) -> 'bytes | None': - ... + def raw(self, key: str) -> 'bytes | None': ... class _Request(Protocol): - @property - def query(self) -> _RequestParams: - ... + def query(self) -> _RequestParams: ... @property - def params(self) -> _RequestParams: - ... + def params(self) -> _RequestParams: ... @property - def headers(self) -> _HeadersDict: - ... + def headers(self) -> _HeadersDict: ... request = cast('_Request', None) @@ -59,9 +49,7 @@ def parse_qs(qs: str) -> 'dict[str, str]': return dict(bottle._parse_qsl(qs)) # type: ignore -_HandlerFuncT = Callable[ - [], - 'None|str|bytes|dict[str, Any]|bottle.BaseResponse|Iterable[bytes|str]'] +_HandlerFuncT = Callable[[], 'None|str|bytes|dict[str, Any]|bottle.BaseResponse|Iterable[bytes|str]'] def handle_asserts(fn: _HandlerFuncT) -> _HandlerFuncT: @@ -73,9 +61,7 @@ def wrapped(): return fn() except AssertionError as e: traceback.print_exc() - return bottle.HTTPResponse(status=400, - body=json.dumps({'error': - list(e.args)})) + return bottle.HTTPResponse(status=400, body=json.dumps({'error': list(e.args)})) return wrapped @@ -83,20 +69,21 @@ def wrapped(): def test_params() -> 'dict[str, str]': return parse_qs(request.headers.get('X-MongoDB-HTTP-TestParams', '')) + @kms_provider.get('/computeMetadata/v1/instance/service-accounts/default/token') @handle_asserts def get_gcp_token(): - metadata_header = request.headers.get("Metadata-Flavor") + metadata_header = request.headers.get('Metadata-Flavor') assert metadata_header == 'Google' case = test_params().get('case') print('Case is:', case) - if case == '404': + if case == '404': return HTTPResponse(status=404) - + if case == 'bad-json': return b'{"access-token": }' - + if case == 'empty-json': return b'{}' @@ -107,11 +94,9 @@ def get_gcp_token(): return _slow() assert case in (None, ''), 'Unknown HTTP test case "{}"'.format(case) - - return { - 'access_token' : 'google-cookie', - 'token_type' : 'Bearer' - } + + return {'access_token': 'google-cookie', 'token_type': 'Bearer'} + @kms_provider.get('/metadata/identity/oauth2/token') @handle_asserts @@ -155,10 +140,12 @@ def _gen_giant() -> Iterable[bytes]: "Generate a giant message" yield b'{ "item": [' for _ in range(1024 * 256): - yield (b'null, null, null, null, null, null, null, null, null, null, ' - b'null, null, null, null, null, null, null, null, null, null, ' - b'null, null, null, null, null, null, null, null, null, null, ' - b'null, null, null, null, null, null, null, null, null, null, ') + yield ( + b'null, null, null, null, null, null, null, null, null, null, ' + b'null, null, null, null, null, null, null, null, null, null, ' + b'null, null, null, null, null, null, null, null, null, null, ' + b'null, null, null, null, null, null, null, null, null, null, ' + ) yield b' null ] }' yield b'\n' @@ -174,7 +161,8 @@ def _slow() -> Iterable[bytes]: if __name__ == '__main__': print( - 'RECOMMENDED: Run this script using bottle.py (e.g. [{} {}/bottle.py fake_kms_provider_server:kms_provider])' - .format(sys.executable, - Path(__file__).resolve().parent)) + 'RECOMMENDED: Run this script using bottle.py (e.g. [{} {}/bottle.py fake_kms_provider_server:kms_provider])'.format( + sys.executable, Path(__file__).resolve().parent + ) + ) kms_provider.run() diff --git a/build/generate-future-functions.py b/build/generate-future-functions.py index 1fa42a5f52d..aee30369fad 100644 --- a/build/generate-future-functions.py +++ b/build/generate-future-functions.py @@ -30,477 +30,552 @@ """ from collections import namedtuple -from os.path import basename, dirname, join as joinpath, normpath +from os.path import basename, dirname, normpath +from os.path import join as joinpath # Please "pip install jinja2". from jinja2 import Environment, FileSystemLoader this_dir = dirname(__file__) template_dir = joinpath(this_dir, 'future_function_templates') -mock_server_dir = normpath( - joinpath(this_dir, '../src/libmongoc/tests/mock_server')) +mock_server_dir = normpath(joinpath(this_dir, '../src/libmongoc/tests/mock_server')) # Add additional types here. Use typedefs for derived types so they can # be named with one symbol. -typedef = namedtuple("typedef", ["name", "typedef"]) +typedef = namedtuple('typedef', ['name', 'typedef']) # These are typedef'ed if necessary in future-value.h, and added to the union # of possible future_value_t.value types. future_value_t getters and setters # are generated for all types, as well as future_t getters. typedef_list = [ # Fundamental. - typedef("bool", None), - typedef("char_ptr", "char *"), - typedef("char_ptr_ptr", "char **"), - typedef("int", None), - typedef("int64_t", None), - typedef("size_t", None), - typedef("ssize_t", None), - typedef("uint32_t", None), - typedef("void_ptr", "void *"), - + typedef('bool', None), + typedef('char_ptr', 'char *'), + typedef('char_ptr_ptr', 'char **'), + typedef('int', None), + typedef('int64_t', None), + typedef('size_t', None), + typedef('ssize_t', None), + typedef('uint32_t', None), + typedef('void_ptr', 'void *'), # Const fundamental. - typedef("const_char_ptr", "const char *"), - typedef("bool_ptr", "bool *"), - + typedef('const_char_ptr', 'const char *'), + typedef('bool_ptr', 'bool *'), # libbson. - typedef("bson_error_ptr", "bson_error_t *"), - typedef("bson_ptr", "bson_t *"), - + typedef('bson_error_ptr', 'bson_error_t *'), + typedef('bson_ptr', 'bson_t *'), # Const libbson. - typedef("const_bson_ptr", "const bson_t *"), - typedef("const_bson_ptr_ptr", "const bson_t **"), - + typedef('const_bson_ptr', 'const bson_t *'), + typedef('const_bson_ptr_ptr', 'const bson_t **'), # libmongoc. - typedef("mongoc_async_ptr", "mongoc_async_t *"), - typedef("mongoc_bulk_operation_ptr", "mongoc_bulk_operation_t *"), - typedef("mongoc_client_ptr", "mongoc_client_t *"), - typedef("mongoc_client_pool_ptr", "mongoc_client_pool_t *"), - typedef("mongoc_collection_ptr", "mongoc_collection_t *"), - typedef("mongoc_cluster_ptr", "mongoc_cluster_t *"), - typedef("mongoc_cmd_parts_ptr", "mongoc_cmd_parts_t *"), - typedef("mongoc_cursor_ptr", "mongoc_cursor_t *"), - typedef("mongoc_database_ptr", "mongoc_database_t *"), - typedef("mongoc_gridfs_file_ptr", "mongoc_gridfs_file_t *"), - typedef("mongoc_gridfs_ptr", "mongoc_gridfs_t *"), - typedef("mongoc_insert_flags_t", None), - typedef("mongoc_iovec_ptr", "mongoc_iovec_t *"), - typedef("mongoc_server_stream_ptr", "mongoc_server_stream_t *"), - typedef("mongoc_query_flags_t", None), - typedef("mongoc_server_description_ptr", "mongoc_server_description_t *"), - typedef("mongoc_ss_optype_t", None), - typedef("mongoc_topology_ptr", "mongoc_topology_t *"), - typedef("mongoc_write_concern_ptr", "mongoc_write_concern_t *"), - typedef("mongoc_change_stream_ptr", "mongoc_change_stream_t *"), - typedef("mongoc_remove_flags_t", None), - + typedef('mongoc_async_ptr', 'mongoc_async_t *'), + typedef('mongoc_bulk_operation_ptr', 'mongoc_bulk_operation_t *'), + typedef('mongoc_client_ptr', 'mongoc_client_t *'), + typedef('mongoc_client_pool_ptr', 'mongoc_client_pool_t *'), + typedef('mongoc_collection_ptr', 'mongoc_collection_t *'), + typedef('mongoc_cluster_ptr', 'mongoc_cluster_t *'), + typedef('mongoc_cmd_parts_ptr', 'mongoc_cmd_parts_t *'), + typedef('mongoc_cursor_ptr', 'mongoc_cursor_t *'), + typedef('mongoc_database_ptr', 'mongoc_database_t *'), + typedef('mongoc_gridfs_file_ptr', 'mongoc_gridfs_file_t *'), + typedef('mongoc_gridfs_ptr', 'mongoc_gridfs_t *'), + typedef('mongoc_insert_flags_t', None), + typedef('mongoc_iovec_ptr', 'mongoc_iovec_t *'), + typedef('mongoc_server_stream_ptr', 'mongoc_server_stream_t *'), + typedef('mongoc_query_flags_t', None), + typedef('mongoc_server_description_ptr', 'mongoc_server_description_t *'), + typedef('mongoc_ss_optype_t', None), + typedef('mongoc_topology_ptr', 'mongoc_topology_t *'), + typedef('mongoc_write_concern_ptr', 'mongoc_write_concern_t *'), + typedef('mongoc_change_stream_ptr', 'mongoc_change_stream_t *'), + typedef('mongoc_remove_flags_t', None), # Const libmongoc. - typedef("const_mongoc_find_and_modify_opts_ptr", - "const mongoc_find_and_modify_opts_t *"), - typedef("const_mongoc_iovec_ptr", "const mongoc_iovec_t *"), - typedef("const_mongoc_read_prefs_ptr", "const mongoc_read_prefs_t *"), - typedef("const_mongoc_write_concern_ptr", - "const mongoc_write_concern_t *"), - typedef("const_mongoc_ss_log_context_ptr", - "const mongoc_ss_log_context_t *"), - typedef("mongoc_index_model_t_ptr_const_ptr", "mongoc_index_model_t *const *") + typedef('const_mongoc_find_and_modify_opts_ptr', 'const mongoc_find_and_modify_opts_t *'), + typedef('const_mongoc_iovec_ptr', 'const mongoc_iovec_t *'), + typedef('const_mongoc_read_prefs_ptr', 'const mongoc_read_prefs_t *'), + typedef('const_mongoc_write_concern_ptr', 'const mongoc_write_concern_t *'), + typedef('const_mongoc_ss_log_context_ptr', 'const mongoc_ss_log_context_t *'), + typedef('mongoc_index_model_t_ptr_const_ptr', 'mongoc_index_model_t *const *'), ] type_list = [T.name for T in typedef_list] type_list_with_void = type_list + ['void'] -param = namedtuple("param", ["type_name", "name"]) -future_function = namedtuple("future_function", ["ret_type", "name", "params"]) +param = namedtuple('param', ['type_name', 'name']) +future_function = namedtuple('future_function', ['ret_type', 'name', 'params']) # Add additional functions to be tested here. For a name like "cursor_next", we # generate two functions: future_cursor_next to prepare the future_t and launch # a background thread, and background_cursor_next to run on the thread and # resolve the future. future_functions = [ - future_function("void", - "mongoc_async_run", - [param("mongoc_async_ptr", "async")]), - - future_function("uint32_t", - "mongoc_bulk_operation_execute", - [param("mongoc_bulk_operation_ptr", "bulk"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_database_read_command_with_opts", - [param("mongoc_database_ptr", "database"), - param("const_bson_ptr", "command"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_database_read_write_command_with_opts", - [param("mongoc_database_ptr", "database"), - param("const_bson_ptr", "command"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_database_write_command_with_opts", - [param("mongoc_database_ptr", "database"), - param("const_bson_ptr", "command"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_client_command_simple", - [param("mongoc_client_ptr", "client"), - param("const_char_ptr", "db_name"), - param("const_bson_ptr", "command"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_client_command_with_opts", - [param("mongoc_client_ptr", "client"), - param("const_char_ptr", "db_name"), - param("const_bson_ptr", "command"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_client_read_command_with_opts", - [param("mongoc_client_ptr", "client"), - param("const_char_ptr", "db_name"), - param("const_bson_ptr", "command"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_client_write_command_with_opts", - [param("mongoc_client_ptr", "client"), - param("const_char_ptr", "db_name"), - param("const_bson_ptr", "command"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_client_read_write_command_with_opts", - [param("mongoc_client_ptr", "client"), - param("const_char_ptr", "db_name"), - param("const_bson_ptr", "command"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("mongoc_change_stream_ptr", - "mongoc_client_watch", - [param("mongoc_client_ptr", "client"), - param("const_bson_ptr", "pipeline"), - param("const_bson_ptr", "opts")]), - - future_function("mongoc_cursor_ptr", - "mongoc_collection_aggregate", - [param("mongoc_collection_ptr", "collection"), - param("mongoc_query_flags_t", "flags"), - param("const_bson_ptr", "pipeline"), - param("const_bson_ptr", "options"), - param("const_mongoc_read_prefs_ptr", "read_prefs")]), - - future_function("bool", - "mongoc_collection_create_indexes_with_opts", - [param("mongoc_collection_ptr", "collection"), - param("mongoc_index_model_t_ptr_const_ptr", "models"), - param("size_t", "num_models"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_drop_index_with_opts", - [param("mongoc_collection_ptr", "collection"), - param("const_char_ptr", "index_name"), - param("const_bson_ptr", "opts"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_drop_with_opts", - [param("mongoc_collection_ptr", "collection"), - param("const_bson_ptr", "opts"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_find_and_modify_with_opts", - [param("mongoc_collection_ptr", "collection"), - param("const_bson_ptr", "query"), - param("const_mongoc_find_and_modify_opts_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_find_and_modify", - [param("mongoc_collection_ptr", "collection"), - param("const_bson_ptr", "query"), - param("const_bson_ptr", "sort"), - param("const_bson_ptr", "update"), - param("const_bson_ptr", "fields"), - param("bool", "_remove"), - param("bool", "upsert"), - param("bool", "_new"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("mongoc_cursor_ptr", - "mongoc_collection_find_indexes_with_opts", - [param("mongoc_collection_ptr", "collection"), - param("const_bson_ptr", "opts")]), - - future_function("bool", - "mongoc_collection_insert_many", - [param("mongoc_collection_ptr", "collection"), - param("const_bson_ptr_ptr", "documents"), - param("size_t", "n_documents"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_insert_one", - [param("mongoc_collection_ptr", "collection"), - param("const_bson_ptr", "document"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_read_command_with_opts", - [param("mongoc_collection_ptr", "collection"), - param("const_bson_ptr", "command"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_read_write_command_with_opts", - [param("mongoc_collection_ptr", "collection"), - param("const_bson_ptr", "command"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_write_command_with_opts", - [param("mongoc_collection_ptr", "collection"), - param("const_bson_ptr", "command"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_cluster_run_command_parts", - [param("mongoc_cluster_ptr", "cluster"), - param("mongoc_server_stream_ptr", "server_stream"), - param("mongoc_cmd_parts_ptr", "parts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("void", - "mongoc_cursor_destroy", - [param("mongoc_cursor_ptr", "cursor")]), - - future_function("bool", - "mongoc_cursor_next", - [param("mongoc_cursor_ptr", "cursor"), - param("const_bson_ptr_ptr", "doc")]), - - future_function("char_ptr_ptr", - "mongoc_client_get_database_names_with_opts", - [param("mongoc_client_ptr", "client"), - param("const_bson_ptr", "opts"), - param("bson_error_ptr", "error")]), - - future_function("mongoc_server_description_ptr", - "mongoc_client_select_server", - [param("mongoc_client_ptr", "client"), - param("bool", "for_writes"), - param("const_mongoc_read_prefs_ptr", "prefs"), - param("bson_error_ptr", "error")]), - - future_function("void", - "mongoc_client_destroy", - [param("mongoc_client_ptr", "client")]), - - future_function("void", - "mongoc_client_pool_destroy", - [param("mongoc_client_pool_ptr", "pool")]), - - future_function("bool", - "mongoc_database_command_simple", - [param("mongoc_database_ptr", "database"), - param("bson_ptr", "command"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_database_drop_with_opts", - [param("mongoc_database_ptr", "database"), - param("const_bson_ptr", "opts"), - param("bson_error_ptr", "error")]), - - future_function("char_ptr_ptr", - "mongoc_database_get_collection_names_with_opts", - [param("mongoc_database_ptr", "database"), - param("const_bson_ptr", "opts"), - param("bson_error_ptr", "error")]), - - future_function("mongoc_change_stream_ptr", - "mongoc_database_watch", - [param("mongoc_database_ptr", "database"), - param("const_bson_ptr", "pipeline"), - param("const_bson_ptr", "opts")]), - - future_function("ssize_t", - "mongoc_gridfs_file_readv", - [param("mongoc_gridfs_file_ptr", "file"), - param("mongoc_iovec_ptr", "iov"), - param("size_t", "iovcnt"), - param("size_t", "min_bytes"), - param("uint32_t", "timeout_msec")]), - - future_function("bool", - "mongoc_gridfs_file_remove", - [param("mongoc_gridfs_file_ptr", "file"), - param("bson_error_ptr", "error")]), - - future_function("int", - "mongoc_gridfs_file_seek", - [param("mongoc_gridfs_file_ptr", "file"), - param("int64_t", "delta"), - param("int", "whence")]), - - future_function("ssize_t", - "mongoc_gridfs_file_writev", - [param("mongoc_gridfs_file_ptr", "file"), - param("const_mongoc_iovec_ptr", "iov"), - param("size_t", "iovcnt"), - param("uint32_t", "timeout_msec")]), - - future_function("mongoc_gridfs_file_ptr", - "mongoc_gridfs_find_one_with_opts", - [param("mongoc_gridfs_ptr", "gridfs"), - param("const_bson_ptr", "filter"), - param("const_bson_ptr", "opts"), - param("bson_error_ptr", "error")]), - - future_function("mongoc_server_description_ptr", - "mongoc_topology_select", - [param("mongoc_topology_ptr", "topology"), - param("mongoc_ss_optype_t", "optype"), - param("const_mongoc_ss_log_context_ptr", "log_context"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("bool_ptr", "must_use_primary"), - param("bson_error_ptr", "error")]), - - future_function("mongoc_gridfs_ptr", - "mongoc_client_get_gridfs", - [param("mongoc_client_ptr", "client"), - param("const_char_ptr", "db"), - param("const_char_ptr", "prefix"), - param("bson_error_ptr", "error")]), - - future_function("mongoc_change_stream_ptr", - "mongoc_collection_watch", - [param("mongoc_collection_ptr", "coll"), - param("const_bson_ptr", "pipeline"), - param("const_bson_ptr", "opts")]), - - future_function("bool", - "mongoc_change_stream_next", - [param("mongoc_change_stream_ptr", "stream"), - param("const_bson_ptr_ptr", "bson")]), - - future_function("void", - "mongoc_change_stream_destroy", - [param("mongoc_change_stream_ptr", "stream")]), - - future_function("bool", - "mongoc_collection_delete_one", - [param("mongoc_collection_ptr", "coll"), - param("const_bson_ptr", "selector"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_delete_many", - [param("mongoc_collection_ptr", "coll"), - param("const_bson_ptr", "selector"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_remove", - [param("mongoc_collection_ptr", "coll"), - param("mongoc_remove_flags_t", "flags"), - param("const_bson_ptr", "selector"), - param("const_mongoc_write_concern_ptr", "write_concern"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_update_one", - [param("mongoc_collection_ptr", "coll"), - param("const_bson_ptr", "selector"), - param("const_bson_ptr", "update"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_update_many", - [param("mongoc_collection_ptr", "coll"), - param("const_bson_ptr", "selector"), - param("const_bson_ptr", "update"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("bool", - "mongoc_collection_replace_one", - [param("mongoc_collection_ptr", "coll"), - param("const_bson_ptr", "selector"), - param("const_bson_ptr", "replacement"), - param("const_bson_ptr", "opts"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("int64_t", - "mongoc_collection_count_documents", - [param("mongoc_collection_ptr", "coll"), - param("const_bson_ptr", "filter"), - param("const_bson_ptr", "opts"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), - - future_function("int64_t", - "mongoc_collection_estimated_document_count", - [param("mongoc_collection_ptr", "coll"), - param("const_bson_ptr", "opts"), - param("const_mongoc_read_prefs_ptr", "read_prefs"), - param("bson_ptr", "reply"), - param("bson_error_ptr", "error")]), + future_function('void', 'mongoc_async_run', [param('mongoc_async_ptr', 'async')]), + future_function( + 'uint32_t', + 'mongoc_bulk_operation_execute', + [param('mongoc_bulk_operation_ptr', 'bulk'), param('bson_ptr', 'reply'), param('bson_error_ptr', 'error')], + ), + future_function( + 'bool', + 'mongoc_database_read_command_with_opts', + [ + param('mongoc_database_ptr', 'database'), + param('const_bson_ptr', 'command'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_database_read_write_command_with_opts', + [ + param('mongoc_database_ptr', 'database'), + param('const_bson_ptr', 'command'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_database_write_command_with_opts', + [ + param('mongoc_database_ptr', 'database'), + param('const_bson_ptr', 'command'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_client_command_simple', + [ + param('mongoc_client_ptr', 'client'), + param('const_char_ptr', 'db_name'), + param('const_bson_ptr', 'command'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_client_command_with_opts', + [ + param('mongoc_client_ptr', 'client'), + param('const_char_ptr', 'db_name'), + param('const_bson_ptr', 'command'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_client_read_command_with_opts', + [ + param('mongoc_client_ptr', 'client'), + param('const_char_ptr', 'db_name'), + param('const_bson_ptr', 'command'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_client_write_command_with_opts', + [ + param('mongoc_client_ptr', 'client'), + param('const_char_ptr', 'db_name'), + param('const_bson_ptr', 'command'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_client_read_write_command_with_opts', + [ + param('mongoc_client_ptr', 'client'), + param('const_char_ptr', 'db_name'), + param('const_bson_ptr', 'command'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'mongoc_change_stream_ptr', + 'mongoc_client_watch', + [param('mongoc_client_ptr', 'client'), param('const_bson_ptr', 'pipeline'), param('const_bson_ptr', 'opts')], + ), + future_function( + 'mongoc_cursor_ptr', + 'mongoc_collection_aggregate', + [ + param('mongoc_collection_ptr', 'collection'), + param('mongoc_query_flags_t', 'flags'), + param('const_bson_ptr', 'pipeline'), + param('const_bson_ptr', 'options'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_create_indexes_with_opts', + [ + param('mongoc_collection_ptr', 'collection'), + param('mongoc_index_model_t_ptr_const_ptr', 'models'), + param('size_t', 'num_models'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_drop_index_with_opts', + [ + param('mongoc_collection_ptr', 'collection'), + param('const_char_ptr', 'index_name'), + param('const_bson_ptr', 'opts'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_drop_with_opts', + [ + param('mongoc_collection_ptr', 'collection'), + param('const_bson_ptr', 'opts'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_find_and_modify_with_opts', + [ + param('mongoc_collection_ptr', 'collection'), + param('const_bson_ptr', 'query'), + param('const_mongoc_find_and_modify_opts_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_find_and_modify', + [ + param('mongoc_collection_ptr', 'collection'), + param('const_bson_ptr', 'query'), + param('const_bson_ptr', 'sort'), + param('const_bson_ptr', 'update'), + param('const_bson_ptr', 'fields'), + param('bool', '_remove'), + param('bool', 'upsert'), + param('bool', '_new'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'mongoc_cursor_ptr', + 'mongoc_collection_find_indexes_with_opts', + [param('mongoc_collection_ptr', 'collection'), param('const_bson_ptr', 'opts')], + ), + future_function( + 'bool', + 'mongoc_collection_insert_many', + [ + param('mongoc_collection_ptr', 'collection'), + param('const_bson_ptr_ptr', 'documents'), + param('size_t', 'n_documents'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_insert_one', + [ + param('mongoc_collection_ptr', 'collection'), + param('const_bson_ptr', 'document'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_read_command_with_opts', + [ + param('mongoc_collection_ptr', 'collection'), + param('const_bson_ptr', 'command'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_read_write_command_with_opts', + [ + param('mongoc_collection_ptr', 'collection'), + param('const_bson_ptr', 'command'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_write_command_with_opts', + [ + param('mongoc_collection_ptr', 'collection'), + param('const_bson_ptr', 'command'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_cluster_run_command_parts', + [ + param('mongoc_cluster_ptr', 'cluster'), + param('mongoc_server_stream_ptr', 'server_stream'), + param('mongoc_cmd_parts_ptr', 'parts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function('void', 'mongoc_cursor_destroy', [param('mongoc_cursor_ptr', 'cursor')]), + future_function( + 'bool', 'mongoc_cursor_next', [param('mongoc_cursor_ptr', 'cursor'), param('const_bson_ptr_ptr', 'doc')] + ), + future_function( + 'char_ptr_ptr', + 'mongoc_client_get_database_names_with_opts', + [param('mongoc_client_ptr', 'client'), param('const_bson_ptr', 'opts'), param('bson_error_ptr', 'error')], + ), + future_function( + 'mongoc_server_description_ptr', + 'mongoc_client_select_server', + [ + param('mongoc_client_ptr', 'client'), + param('bool', 'for_writes'), + param('const_mongoc_read_prefs_ptr', 'prefs'), + param('bson_error_ptr', 'error'), + ], + ), + future_function('void', 'mongoc_client_destroy', [param('mongoc_client_ptr', 'client')]), + future_function('void', 'mongoc_client_pool_destroy', [param('mongoc_client_pool_ptr', 'pool')]), + future_function( + 'bool', + 'mongoc_database_command_simple', + [ + param('mongoc_database_ptr', 'database'), + param('bson_ptr', 'command'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_database_drop_with_opts', + [param('mongoc_database_ptr', 'database'), param('const_bson_ptr', 'opts'), param('bson_error_ptr', 'error')], + ), + future_function( + 'char_ptr_ptr', + 'mongoc_database_get_collection_names_with_opts', + [param('mongoc_database_ptr', 'database'), param('const_bson_ptr', 'opts'), param('bson_error_ptr', 'error')], + ), + future_function( + 'mongoc_change_stream_ptr', + 'mongoc_database_watch', + [ + param('mongoc_database_ptr', 'database'), + param('const_bson_ptr', 'pipeline'), + param('const_bson_ptr', 'opts'), + ], + ), + future_function( + 'ssize_t', + 'mongoc_gridfs_file_readv', + [ + param('mongoc_gridfs_file_ptr', 'file'), + param('mongoc_iovec_ptr', 'iov'), + param('size_t', 'iovcnt'), + param('size_t', 'min_bytes'), + param('uint32_t', 'timeout_msec'), + ], + ), + future_function( + 'bool', 'mongoc_gridfs_file_remove', [param('mongoc_gridfs_file_ptr', 'file'), param('bson_error_ptr', 'error')] + ), + future_function( + 'int', + 'mongoc_gridfs_file_seek', + [param('mongoc_gridfs_file_ptr', 'file'), param('int64_t', 'delta'), param('int', 'whence')], + ), + future_function( + 'ssize_t', + 'mongoc_gridfs_file_writev', + [ + param('mongoc_gridfs_file_ptr', 'file'), + param('const_mongoc_iovec_ptr', 'iov'), + param('size_t', 'iovcnt'), + param('uint32_t', 'timeout_msec'), + ], + ), + future_function( + 'mongoc_gridfs_file_ptr', + 'mongoc_gridfs_find_one_with_opts', + [ + param('mongoc_gridfs_ptr', 'gridfs'), + param('const_bson_ptr', 'filter'), + param('const_bson_ptr', 'opts'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'mongoc_server_description_ptr', + 'mongoc_topology_select', + [ + param('mongoc_topology_ptr', 'topology'), + param('mongoc_ss_optype_t', 'optype'), + param('const_mongoc_ss_log_context_ptr', 'log_context'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('bool_ptr', 'must_use_primary'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'mongoc_gridfs_ptr', + 'mongoc_client_get_gridfs', + [ + param('mongoc_client_ptr', 'client'), + param('const_char_ptr', 'db'), + param('const_char_ptr', 'prefix'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'mongoc_change_stream_ptr', + 'mongoc_collection_watch', + [param('mongoc_collection_ptr', 'coll'), param('const_bson_ptr', 'pipeline'), param('const_bson_ptr', 'opts')], + ), + future_function( + 'bool', + 'mongoc_change_stream_next', + [param('mongoc_change_stream_ptr', 'stream'), param('const_bson_ptr_ptr', 'bson')], + ), + future_function('void', 'mongoc_change_stream_destroy', [param('mongoc_change_stream_ptr', 'stream')]), + future_function( + 'bool', + 'mongoc_collection_delete_one', + [ + param('mongoc_collection_ptr', 'coll'), + param('const_bson_ptr', 'selector'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_delete_many', + [ + param('mongoc_collection_ptr', 'coll'), + param('const_bson_ptr', 'selector'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_remove', + [ + param('mongoc_collection_ptr', 'coll'), + param('mongoc_remove_flags_t', 'flags'), + param('const_bson_ptr', 'selector'), + param('const_mongoc_write_concern_ptr', 'write_concern'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_update_one', + [ + param('mongoc_collection_ptr', 'coll'), + param('const_bson_ptr', 'selector'), + param('const_bson_ptr', 'update'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_update_many', + [ + param('mongoc_collection_ptr', 'coll'), + param('const_bson_ptr', 'selector'), + param('const_bson_ptr', 'update'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'bool', + 'mongoc_collection_replace_one', + [ + param('mongoc_collection_ptr', 'coll'), + param('const_bson_ptr', 'selector'), + param('const_bson_ptr', 'replacement'), + param('const_bson_ptr', 'opts'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'int64_t', + 'mongoc_collection_count_documents', + [ + param('mongoc_collection_ptr', 'coll'), + param('const_bson_ptr', 'filter'), + param('const_bson_ptr', 'opts'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), + future_function( + 'int64_t', + 'mongoc_collection_estimated_document_count', + [ + param('mongoc_collection_ptr', 'coll'), + param('const_bson_ptr', 'opts'), + param('const_mongoc_read_prefs_ptr', 'read_prefs'), + param('bson_ptr', 'reply'), + param('bson_error_ptr', 'error'), + ], + ), ] for fn in future_functions: @@ -523,7 +598,7 @@ def future_function_name(fn): if fn.name.startswith('mongoc'): # E.g. future_cursor_next(). - return 'future' + fn.name[len('mongoc'):] + return 'future' + fn.name[len('mongoc') :] else: # E.g. future_mongoc_client_command_simple(). return 'future_' + fn.name @@ -532,16 +607,18 @@ def future_function_name(fn): env = Environment(loader=FileSystemLoader(template_dir)) env.filters['future_function_name'] = future_function_name -files = ["future.h", - "future.c", - "future-value.h", - "future-value.c", - "future-functions.h", - "future-functions.c"] +files = [ + 'future.h', + 'future.c', + 'future-value.h', + 'future-value.c', + 'future-functions.h', + 'future-functions.c', +] for file_name in files: print(file_name) with open(joinpath(mock_server_dir, file_name), 'w+') as f: - t = env.get_template(file_name + ".template") + t = env.get_template(file_name + '.template') f.write(t.render(globals())) f.write('\n') diff --git a/build/generate-opts.py b/build/generate-opts.py index 71736433aef..9f9b24c9986 100644 --- a/build/generate-opts.py +++ b/build/generate-opts.py @@ -25,9 +25,10 @@ """ # yapf: disable -from collections import OrderedDict -from os.path import basename, dirname, join as joinpath, normpath import re +from collections import OrderedDict +from os.path import basename, dirname, normpath +from os.path import join as joinpath from jinja2 import Environment, FileSystemLoader # Please "pip install jinja2". diff --git a/build/mongodl.py b/build/mongodl.py index b904ea0eb29..c5ac978fa47 100644 --- a/build/mongodl.py +++ b/build/mongodl.py @@ -3,6 +3,7 @@ Use '--help' for more information. """ + import argparse import enum import hashlib @@ -39,14 +40,14 @@ DISTRO_VERSION_MAP = { 'elementary': { - '6': '20.04' + '6': '20.04', }, 'fedora': { '32': '8', '33': '8', '34': '8', '35': '8', - '36': '8' + '36': '8', }, 'linuxmint': { '19': '18.04', @@ -57,7 +58,7 @@ '20.1': '20.04', '20.2': '20.04', '20.3': '20.04', - '21': '22.04' + '21': '22.04', }, } @@ -106,8 +107,7 @@ def infer_target(): for c in cands: if c.is_file(): return _infer_target_os_rel(c) - raise RuntimeError("We don't know how to find the default '--target'" - " option for this system. Please contribute!") + raise RuntimeError("We don't know how to find the default '--target' option for this system. Please contribute!") def _infer_target_os_rel(os_rel_path: Path): @@ -115,8 +115,7 @@ def _infer_target_os_rel(os_rel_path: Path): content = f.read() id_re = re.compile(r'\bID=("?)(.*)\1') mat = id_re.search(content) - assert mat, 'Unable to detect ID from [{}] content:\n{}'.format( - os_rel_path, content) + assert mat, 'Unable to detect ID from [{}] content:\n{}'.format(os_rel_path, content) os_id = mat.group(2) if os_id == 'arch': # There are no Archlinux-specific MongoDB downloads, so we'll just use @@ -125,8 +124,7 @@ def _infer_target_os_rel(os_rel_path: Path): return 'rhel80' ver_id_re = re.compile(r'VERSION_ID=("?)(.*)\1') mat = ver_id_re.search(content) - assert mat, 'Unable to detect VERSION_ID from [{}] content:\n{}'.format( - os_rel_path, content) + assert mat, 'Unable to detect VERSION_ID from [{}] content:\n{}'.format(os_rel_path, content) ver_id = mat.group(2) mapped_id = DISTRO_ID_MAP.get(os_id) if mapped_id: @@ -134,21 +132,23 @@ def _infer_target_os_rel(os_rel_path: Path): ver_mapper = DISTRO_VERSION_MAP.get(os_id) if ver_mapper: mapped_version = ver_mapper[ver_id] - print('Mapping version "{}" to "{}"'.format( - ver_id, mapped_version)) + print('Mapping version "{}" to "{}"'.format(ver_id, mapped_version)) ver_id = mapped_version os_id = mapped_id os_id = os_id.lower() if os_id not in DISTRO_ID_TO_TARGET: - raise RuntimeError("We don't know how to map '{}' to a distribution " - "download target. Please contribute!".format(os_id)) + raise RuntimeError( + "We don't know how to map '{}' to a distribution download target. Please contribute!".format(os_id) + ) ver_table = DISTRO_ID_TO_TARGET[os_id] for pattern, target in ver_table.items(): if fnmatch(ver_id, pattern): return target raise RuntimeError( - "We don't know how to map '{}' version '{}' to a distribution " - "download target. Please contribute!".format(os_id, ver_id)) + "We don't know how to map '{}' version '{}' to a distribution download target. Please contribute!".format( + os_id, ver_id + ) + ) def caches_root(): @@ -180,15 +180,15 @@ def _import_json_data(db, json_file): db.execute('DROP TABLE IF EXISTS components') db.execute('DROP TABLE IF EXISTS downloads') db.execute('DROP TABLE IF EXISTS versions') - db.execute(r''' + db.execute(r""" CREATE TABLE versions ( version_id INTEGER PRIMARY KEY, date TEXT NOT NULL, version TEXT NOT NULL, githash TEXT NOT NULL ) - ''') - db.execute(r''' + """) + db.execute(r""" CREATE TABLE downloads ( download_id INTEGER PRIMARY KEY, version_id INTEGER NOT NULL REFERENCES versions, @@ -198,8 +198,8 @@ def _import_json_data(db, json_file): ar_url TEST NOT NULL, data TEXT NOT NULL ) - ''') - db.execute(r''' + """) + db.execute(r""" CREATE TABLE components ( component_id INTEGER PRIMARY KEY, key TEXT NOT NULL, @@ -207,7 +207,7 @@ def _import_json_data(db, json_file): data TEXT NOT NULL, UNIQUE(key, download_id) ) - ''') + """) with json_file.open('r') as f: data = json.load(f) for ver in data['versions']: @@ -215,10 +215,10 @@ def _import_json_data(db, json_file): githash = ver['githash'] date = ver['date'] db.execute( - r''' + r""" INSERT INTO versions (date, version, githash) VALUES (?, ?, ?) - ''', + """, (date, version, githash), ) version_id = db.lastrowid @@ -228,10 +228,10 @@ def _import_json_data(db, json_file): edition = dl['edition'] ar_url = dl['archive']['url'] db.execute( - r''' + r""" INSERT INTO downloads (version_id, target, arch, edition, ar_url, data) VALUES (?, ?, ?, ?, ?, ?) - ''', + """, (version_id, target, arch, edition, ar_url, json.dumps(dl)), ) dl_id = db.lastrowid @@ -239,10 +239,10 @@ def _import_json_data(db, json_file): if 'url' not in data: continue db.execute( - r''' + r""" INSERT INTO components (key, download_id, data) VALUES (?, ?, ?) - ''', + """, (key, dl_id, json.dumps(data)), ) @@ -269,27 +269,26 @@ def get_dl_db(): caches = cache_dir() _mkdir(caches) db = sqlite3.connect(str(caches / 'downloads.db'), isolation_level=None) - db.executescript(r''' + db.executescript(r""" CREATE TABLE IF NOT EXISTS meta ( etag TEXT, last_modified TEXT ) - ''') - db.executescript(r''' + """) + db.executescript(r""" CREATE TABLE IF NOT EXISTS past_downloads ( url TEXT NOT NULL UNIQUE, etag TEXT, last_modified TEXT ) - ''') - changed, full_json = _download_file( - db, 'https://downloads.mongodb.org/full.json') + """) + changed, full_json = _download_file(db, 'https://downloads.mongodb.org/full.json') if not changed: return db with db: print('Refreshing downloads manifest ...') cur = db.cursor() - cur.execute("begin") + cur.execute('begin') _import_json_data(cur, full_json) return db @@ -297,7 +296,7 @@ def get_dl_db(): def _print_list(db, version, target, arch, edition, component): if version or target or arch or edition or component: matching = db.execute( - r''' + r""" SELECT version, target, arch, edition, key, components.data FROM components, downloads USING(download_id), @@ -307,27 +306,24 @@ def _print_list(db, version, target, arch, edition, component): AND (:arch IS NULL OR arch=:arch) AND (:edition IS NULL OR edition=:edition) AND (:version IS NULL OR version=:version) - ''', - dict(version=version, - target=target, - arch=arch, - edition=edition, - component=component), + """, + dict(version=version, target=target, arch=arch, edition=edition, component=component), ) for version, target, arch, edition, comp_key, comp_data in matching: - print('Download: {}\n\n' - ' Version: {}\n\n' - ' Target: {}\n\n' - ' Arch: {}\n\n' - ' Edition: {}\n\n' - ' Info: {}\n\n'.format(comp_key, version, target, arch, - edition, comp_data)) + print( + 'Download: {}\n\n' + ' Version: {}\n\n' + ' Target: {}\n\n' + ' Arch: {}\n\n' + ' Edition: {}\n\n' + ' Info: {}\n\n'.format(comp_key, version, target, arch, edition, comp_data) + ) print('(Omit filter arguments for a list of available filters)') return arches, targets, editions, versions, components = next( iter( - db.execute(r''' + db.execute(r""" VALUES( (select group_concat(arch, ', ') from (select distinct arch from downloads)), (select group_concat(target, ', ') from (select distinct target from downloads)), @@ -335,27 +331,16 @@ def _print_list(db, version, target, arch, edition, component): (select group_concat(version, ', ') from (select distinct version from versions)), (select group_concat(key, ', ') from (select distinct key from components)) ) - '''))) - versions = '\n'.join( - textwrap.wrap(versions, - width=78, - initial_indent=' ', - subsequent_indent=' ')) - targets = '\n'.join( - textwrap.wrap(targets, - width=78, - initial_indent=' ', - subsequent_indent=' ')) - print('Architectures:\n' - ' {}\n' - 'Targets:\n' - '{}\n' - 'Editions:\n' - ' {}\n' - 'Versions:\n' - '{}\n' - 'Components:\n' - ' {}\n'.format(arches, targets, editions, versions, components)) + """) + ) + ) + versions = '\n'.join(textwrap.wrap(versions, width=78, initial_indent=' ', subsequent_indent=' ')) + targets = '\n'.join(textwrap.wrap(targets, width=78, initial_indent=' ', subsequent_indent=' ')) + print( + 'Architectures:\n {}\nTargets:\n{}\nEditions:\n {}\nVersions:\n{}\nComponents:\n {}\n'.format( + arches, targets, editions, versions, components + ) + ) def infer_arch(): @@ -371,10 +356,7 @@ def infer_arch(): def _download_file(db, url): caches = cache_dir() - info = list( - db.execute( - 'SELECT etag, last_modified FROM past_downloads WHERE url=?', - [url])) + info = list(db.execute('SELECT etag, last_modified FROM past_downloads WHERE url=?', [url])) etag = None modtime = None if info: @@ -385,7 +367,7 @@ def _download_file(db, url): if modtime: headers['If-Modified-Since'] = modtime req = urllib.request.Request(url, headers=headers) - digest = hashlib.md5(url.encode("utf-8")).hexdigest()[:4] + digest = hashlib.md5(url.encode('utf-8')).hexdigest()[:4] dest = caches / 'files' / digest / PurePosixPath(url).name try: resp = urllib.request.urlopen(req) @@ -396,7 +378,7 @@ def _download_file(db, url): else: print('Downloading [{}] ...'.format(url)) _mkdir(dest.parent) - got_etag = resp.getheader("ETag") + got_etag = resp.getheader('ETag') got_modtime = resp.getheader('Last-Modified') with dest.open('wb') as of: buf = resp.read(1024 * 1024 * 4) @@ -405,16 +387,15 @@ def _download_file(db, url): buf = resp.read(1024 * 1024 * 4) db.execute( 'INSERT OR REPLACE INTO past_downloads (url, etag, last_modified) VALUES (?, ?, ?)', - (url, got_etag, got_modtime)) + (url, got_etag, got_modtime), + ) return DLRes(True, dest) -def _dl_component(db, out_dir, version, target, arch, edition, component, - pattern, strip_components, test): - print('Download {} v{}-{} for {}-{}'.format(component, version, edition, - target, arch)) +def _dl_component(db, out_dir, version, target, arch, edition, component, pattern, strip_components, test): + print('Download {} v{}-{} for {}-{}'.format(component, version, edition, target, arch)) matching = db.execute( - r''' + r""" SELECT components.data FROM components, @@ -426,26 +407,17 @@ def _dl_component(db, out_dir, version, target, arch, edition, component, AND edition=:edition AND version=:version AND key=:component - ''', - dict(version=version, - target=target, - arch=arch, - edition=edition, - component=component), + """, + dict(version=version, target=target, arch=arch, edition=edition, component=component), ) found = list(matching) if not found: raise ValueError( - 'No download for "{}" was found for ' - 'the requested version+target+architecture+edition'.format( - component)) + 'No download for "{}" was found for the requested version+target+architecture+edition'.format(component) + ) data = json.loads(found[0][0]) cached = _download_file(db, data['url']).path - return _expand_archive(cached, - out_dir, - pattern, - strip_components, - test=test) + return _expand_archive(cached, out_dir, pattern, strip_components, test=test) def pathjoin(items): @@ -496,43 +468,41 @@ class ExpandResult(enum.Enum): def _expand_archive(ar, dest, pattern, strip_components, test): - ''' + """ Expand the archive members from 'ar' into 'dest'. If 'pattern' is not-None, only extracts members that match the pattern. - ''' + """ print('Extract from: [{}]'.format(ar.name)) print(' into: [{}]'.format(dest)) if ar.suffix == '.zip': - n_extracted = _expand_zip(ar, - dest, - pattern, - strip_components, - test=test) + n_extracted = _expand_zip(ar, dest, pattern, strip_components, test=test) elif ar.suffix == '.tgz': - n_extracted = _expand_tgz(ar, - dest, - pattern, - strip_components, - test=test) + n_extracted = _expand_tgz(ar, dest, pattern, strip_components, test=test) else: raise RuntimeError('Unknown archive file extension: ' + ar.suffix) verb = 'would be' if test else 'were' if n_extracted == 0: if pattern and strip_components: - print('NOTE: No files {verb} extracted. Likely all files {verb} ' - 'excluded by "--only={p}" and/or "--strip-components={s}"'. - format(p=pattern, s=strip_components, verb=verb)) + print( + 'NOTE: No files {verb} extracted. Likely all files {verb} ' + 'excluded by "--only={p}" and/or "--strip-components={s}"'.format( + p=pattern, s=strip_components, verb=verb + ) + ) elif pattern: - print('NOTE: No files {verb} extracted. Likely all files {verb} ' - 'excluded by the "--only={p}" filter'.format(p=pattern, - verb=verb)) + print( + 'NOTE: No files {verb} extracted. Likely all files {verb} excluded by the "--only={p}" filter'.format( + p=pattern, verb=verb + ) + ) elif strip_components: - print('NOTE: No files {verb} extracted. Likely all files {verb} ' - 'excluded by "--strip-components={s}"'.format( - s=strip_components, verb=verb)) + print( + 'NOTE: No files {verb} extracted. Likely all files {verb} excluded by "--strip-components={s}"'.format( + s=strip_components, verb=verb + ) + ) else: - print('NOTE: No files {verb} extracted. Empty archive?'.format( - verb=verb)) + print('NOTE: No files {verb} extracted. Empty archive?'.format(verb=verb)) return ExpandResult.Empty elif n_extracted == 1: print('One file {v} extracted'.format(v='would be' if test else 'was')) @@ -543,7 +513,7 @@ def _expand_archive(ar, dest, pattern, strip_components, test): def _expand_tgz(ar, dest, pattern, strip_components, test): - 'Expand a tar.gz archive' + "Expand a tar.gz archive" n_extracted = 0 with tarfile.open(str(ar), 'r:*') as tf: for mem in tf.getmembers(): @@ -561,7 +531,7 @@ def _expand_tgz(ar, dest, pattern, strip_components, test): def _expand_zip(ar, dest, pattern, strip_components, test): - 'Expand a .zip archive.' + "Expand a .zip archive." n_extracted = 0 with zipfile.ZipFile(ar, 'r') as zf: for item in zf.infolist(): @@ -578,8 +548,7 @@ def _expand_zip(ar, dest, pattern, strip_components, test): return n_extracted -def _maybe_extract_member(out, relpath, pattern, strip, is_dir, opener, - modebits, test): +def _maybe_extract_member(out, relpath, pattern, strip, is_dir, opener, modebits, test): """ Try to extract an archive member according to the given arguments. @@ -615,48 +584,42 @@ def _maybe_extract_member(out, relpath, pattern, strip, is_dir, opener, def main(): parser = argparse.ArgumentParser(description=__doc__) grp = parser.add_argument_group('List arguments') - grp.add_argument('--list', - action='store_true', - help='List available components, targets, editions, and ' - 'architectures. Download arguments will act as filters.') + grp.add_argument( + '--list', + action='store_true', + help='List available components, targets, editions, and architectures. Download arguments will act as filters.', + ) dl_grp = parser.add_argument_group( 'Download arguments', description='Select what to download and extract. ' 'Non-required arguments will be inferred ' - 'based on the host system.') - dl_grp.add_argument('--target', - '-T', - help='The target platform for which to download. ' - 'Use "--list" to list available targets.') - dl_grp.add_argument('--arch', - '-A', - help='The architecture for which to download') + 'based on the host system.', + ) + dl_grp.add_argument( + '--target', '-T', help='The target platform for which to download. Use "--list" to list available targets.' + ) + dl_grp.add_argument('--arch', '-A', help='The architecture for which to download') dl_grp.add_argument( '--edition', '-E', help='The edition of the product to download (Default is "enterprise"). ' - 'Use "--list" to list available editions.') + 'Use "--list" to list available editions.', + ) + dl_grp.add_argument('--out', '-o', help='The directory in which to download components. (Required)', type=Path) + dl_grp.add_argument( + '--version', '-V', help='The product version to download (Required). Use "--list" to list available versions.' + ) dl_grp.add_argument( - '--out', - '-o', - help='The directory in which to download components. (Required)', - type=Path) - dl_grp.add_argument('--version', - '-V', - help='The product version to download (Required). ' - 'Use "--list" to list available versions.') - dl_grp.add_argument('--component', - '-C', - help='The component to download (Required). ' - 'Use "--list" to list available components.') + '--component', '-C', help='The component to download (Required). Use "--list" to list available components.' + ) dl_grp.add_argument( '--only', - help= - 'Restrict extraction to items that match the given globbing expression. ' + help='Restrict extraction to items that match the given globbing expression. ' 'The full archive member path is matched, so a pattern like "*.exe" ' 'will only match "*.exe" at the top level of the archive. To match ' 'recursively, use the "**" pattern to match any number of ' - 'intermediate directories.') + 'intermediate directories.', + ) dl_grp.add_argument( '--strip-path-components', '-p', @@ -664,55 +627,56 @@ def main(): metavar='N', default=0, type=int, - help= - 'Strip the given number of path components from archive members before ' + help='Strip the given number of path components from archive members before ' 'extracting into the destination. The relative path of the archive ' 'member will be used to form the destination path. For example, a ' 'member named [bin/mongod.exe] will be extracted to [/bin/mongod.exe]. ' 'Using --strip-components=1 will remove the first path component, extracting ' 'such an item to [/mongod.exe]. If the path has fewer than N components, ' - 'that archive member will be ignored.') + 'that archive member will be ignored.', + ) dl_grp.add_argument( '--test', action='store_true', help='Do not extract or place any files/directories. ' - 'Only print what will be extracted without placing any files.') - dl_grp.add_argument('--empty-is-error', - action='store_true', - help='If all files are excluded by other filters, ' - 'treat that situation as an error and exit non-zero.') + 'Only print what will be extracted without placing any files.', + ) + dl_grp.add_argument( + '--empty-is-error', + action='store_true', + help='If all files are excluded by other filters, treat that situation as an error and exit non-zero.', + ) args = parser.parse_args() db = get_dl_db() if args.list: - _print_list(db, args.version, args.target, args.arch, args.edition, - args.component) + _print_list(db, args.version, args.target, args.arch, args.edition, args.component) return if args.version is None: raise argparse.ArgumentError(None, 'A "--version" is required') if args.component is None: - raise argparse.ArgumentError( - None, 'A "--component" name should be provided') + raise argparse.ArgumentError(None, 'A "--component" name should be provided') if args.out is None: - raise argparse.ArgumentError(None, - 'A "--out" directory should be provided') + raise argparse.ArgumentError(None, 'A "--out" directory should be provided') target = args.target or infer_target() arch = args.arch or infer_arch() edition = args.edition or 'enterprise' out = args.out or Path.cwd() out = out.absolute() - result = _dl_component(db, - out, - version=args.version, - target=target, - arch=arch, - edition=edition, - component=args.component, - pattern=args.only, - strip_components=args.strip_components, - test=args.test) + result = _dl_component( + db, + out, + version=args.version, + target=target, + arch=arch, + edition=edition, + component=args.component, + pattern=args.only, + strip_components=args.strip_components, + test=args.test, + ) if result is ExpandResult.Empty: return 1 return 0 diff --git a/build/proc-ctl.py b/build/proc-ctl.py index 9446bf2892f..373a93d39ac 100644 --- a/build/proc-ctl.py +++ b/build/proc-ctl.py @@ -16,48 +16,30 @@ from typing import TYPE_CHECKING, NoReturn, Sequence, Union, cast if TYPE_CHECKING: - from typing import (Literal, NamedTuple, TypedDict) + from typing import Literal, NamedTuple, TypedDict INTERUPT_SIGNAL = signal.SIGINT if os.name != 'nt' else signal.CTRL_C_SIGNAL def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser('proc-ctl') - grp = parser.add_subparsers(title='Commands', - dest='command', - metavar='') + grp = parser.add_subparsers(title='Commands', dest='command', metavar='') start = grp.add_parser('start', help='Start a new subprocess') - start.add_argument('--ctl-dir', - help='The control directory for the subprocess', - required=True, - type=Path) - start.add_argument('--cwd', - help='The new subdirectory of the spawned process', - type=Path) - start.add_argument( - '--spawn-wait', - help='Number of seconds to wait for child to be running', - type=float, - default=3) - start.add_argument('child_command', - nargs='+', - help='The command to execute', - metavar=' [args...]') + start.add_argument('--ctl-dir', help='The control directory for the subprocess', required=True, type=Path) + start.add_argument('--cwd', help='The new subdirectory of the spawned process', type=Path) + start.add_argument('--spawn-wait', help='Number of seconds to wait for child to be running', type=float, default=3) + start.add_argument('child_command', nargs='+', help='The command to execute', metavar=' [args...]') stop = grp.add_parser('stop', help='Stop a running subprocess') - stop.add_argument('--ctl-dir', - help='The control directory for the subprocess', - required=True, - type=Path) - stop.add_argument('--stop-wait', - help='Number of seconds to wait for stopping', - type=float, - default=5) - stop.add_argument('--if-not-running', - help='Action to take if the child is not running', - choices=['fail', 'ignore'], - default='fail') + stop.add_argument('--ctl-dir', help='The control directory for the subprocess', required=True, type=Path) + stop.add_argument('--stop-wait', help='Number of seconds to wait for stopping', type=float, default=5) + stop.add_argument( + '--if-not-running', + help='Action to take if the child is not running', + choices=['fail', 'ignore'], + default='fail', + ) ll_run = grp.add_parser('__run') ll_run.add_argument('--ctl-dir', type=Path, required=True) @@ -67,33 +49,39 @@ def create_parser() -> argparse.ArgumentParser: if TYPE_CHECKING: - StartCommandArgs = NamedTuple('StartCommandArgs', [ - ('command', Literal['start']), - ('ctl_dir', Path), - ('cwd', Path), - ('child_command', Sequence[str]), - ('spawn_wait', float), - ]) - - StopCommandArgs = NamedTuple('StopCommandArgs', [ - ('command', Literal['stop']), - ('ctl_dir', Path), - ('stop_wait', float), - ('if_not_running', Literal['fail', 'ignore']), - ]) - - _RunCommandArgs = NamedTuple('_RunCommandArgs', [ - ('command', Literal['__run']), - ('child_command', Sequence[str]), - ('ctl_dir', Path), - ]) + StartCommandArgs = NamedTuple( + 'StartCommandArgs', + [ + ('command', Literal['start']), + ('ctl_dir', Path), + ('cwd', Path), + ('child_command', Sequence[str]), + ('spawn_wait', float), + ], + ) + + StopCommandArgs = NamedTuple( + 'StopCommandArgs', + [ + ('command', Literal['stop']), + ('ctl_dir', Path), + ('stop_wait', float), + ('if_not_running', Literal['fail', 'ignore']), + ], + ) + + _RunCommandArgs = NamedTuple( + '_RunCommandArgs', + [ + ('command', Literal['__run']), + ('child_command', Sequence[str]), + ('ctl_dir', Path), + ], + ) CommandArgs = Union[StartCommandArgs, StopCommandArgs, _RunCommandArgs] - _ResultType = TypedDict('_ResultType', { - 'exit': 'str | int | None', - 'error': 'str | None' - }) + _ResultType = TypedDict('_ResultType', {'exit': 'str | int | None', 'error': 'str | None'}) def parse_argv(argv: 'Sequence[str]') -> 'CommandArgs': @@ -103,7 +91,6 @@ def parse_argv(argv: 'Sequence[str]') -> 'CommandArgs': class _ChildControl: - def __init__(self, ctl_dir: Path) -> None: self._ctl_dir = ctl_dir @@ -128,10 +115,7 @@ def get_pid(self) -> 'int | None': return int(txt) def set_exit(self, exit: 'str | int | None', error: 'str | None') -> None: - write_text(self.result_file, json.dumps({ - 'exit': exit, - 'error': error - })) + write_text(self.result_file, json.dumps({'exit': exit, 'error': error})) remove_file(self.pid_file) def get_result(self) -> 'None | _ResultType': @@ -159,8 +143,7 @@ def _start(args: 'StartCommandArgs') -> int: args.ctl_dir.mkdir(exist_ok=True, parents=True) child = _ChildControl(args.ctl_dir) if child.get_pid() is not None: - raise RuntimeError('Child process is already running [PID {}]'.format( - child.get_pid())) + raise RuntimeError('Child process is already running [PID {}]'.format(child.get_pid())) child.clear_result() # Spawn the child controller subprocess.Popen( @@ -168,7 +151,8 @@ def _start(args: 'StartCommandArgs') -> int: cwd=args.cwd, stderr=subprocess.STDOUT, stdout=args.ctl_dir.joinpath('runner-output.txt').open('wb'), - stdin=subprocess.DEVNULL) + stdin=subprocess.DEVNULL, + ) expire = datetime.now() + timedelta(seconds=args.spawn_wait) # Wait for the PID to appear while child.get_pid() is None and child.get_result() is None: @@ -182,8 +166,7 @@ def _start(args: 'StartCommandArgs') -> int: raise RuntimeError('Failed to spawn child runner?') if result['error']: print(result['error'], file=sys.stderr) - raise RuntimeError('Child exited immediately [Exited {}]'.format( - result['exit'])) + raise RuntimeError('Child exited immediately [Exited {}]'.format(result['exit'])) # Wait to see that it is still running after --spawn-wait seconds while child.get_result() is None: if expire < datetime.now(): @@ -194,8 +177,7 @@ def _start(args: 'StartCommandArgs') -> int: if result is not None: if result['error']: print(result['error'], file=sys.stderr) - raise RuntimeError('Child exited prematurely [Exited {}]'.format( - result['exit'])) + raise RuntimeError('Child exited prematurely [Exited {}]'.format(result['exit'])) return 0 @@ -216,8 +198,7 @@ def _stop(args: 'StopCommandArgs') -> int: time.sleep(0.1) result = child.get_result() if result is None: - raise RuntimeError( - 'Child process did not exit within the grace period') + raise RuntimeError('Child process did not exit within the grace period') return 0 @@ -228,7 +209,8 @@ def __run(args: '_RunCommandArgs') -> int: args.child_command, stdout=args.ctl_dir.joinpath('child-output.txt').open('wb'), stderr=subprocess.STDOUT, - stdin=subprocess.DEVNULL) + stdin=subprocess.DEVNULL, + ) except: this.set_exit('spawn-failed', traceback.format_exc()) raise @@ -272,8 +254,7 @@ def remove_file(fpath: Path): then delete that file. This ensures the file is "out of the way", even if it takes some time to delete. """ - delname = fpath.with_name(fpath.name + '.delete-' + - str(random.randint(0, 999999))) + delname = fpath.with_name(fpath.name + '.delete-' + str(random.randint(0, 999999))) try: fpath.rename(delname) except FileNotFoundError: diff --git a/build/sphinx/homepage-config/conf.py b/build/sphinx/homepage-config/conf.py index f1fae6c5e7b..9e4fe048b8d 100644 --- a/build/sphinx/homepage-config/conf.py +++ b/build/sphinx/homepage-config/conf.py @@ -1,8 +1,9 @@ # -*- coding: utf-8 -*- -from docutils import nodes import os import sys +from docutils import nodes + # Import common docs config. this_path = os.path.dirname(__file__) sys.path.append(os.path.normpath(os.path.join(this_path, '../'))) @@ -18,9 +19,9 @@ master_doc = 'index' # General information about the project. -project = u'mongoc.org' -copyright = u'2009-present, MongoDB, Inc.' -author = u'MongoDB, Inc' +project = 'mongoc.org' +copyright = '2009-present, MongoDB, Inc.' +author = 'MongoDB, Inc' exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. @@ -32,25 +33,24 @@ # Support :download-link:`bson` or :download-link:`mongoc`. def download_link(typ, rawtext, text, lineno, inliner, options={}, content=[]): - if text == "mongoc": - lib = "mongo-c-driver" + if text == 'mongoc': + lib = 'mongo-c-driver' else: - raise ValueError( - "download link must be mongoc, not \"%s\"" % text) + raise ValueError('download link must be mongoc, not "%s"' % text) - title = "%s-%s" % (lib, version) - url = ("https://github.com/mongodb/mongo-c-driver/releases/tag/%(version)s") % { - "version": version - } + title = '%s-%s' % (lib, version) + url = ('https://github.com/mongodb/mongo-c-driver/releases/tag/%(version)s') % {'version': version} pnode = nodes.reference(title, title, internal=False, refuri=url) return [pnode], [] + def setup(app): mongoc_common_setup(app) app.add_role('download-link', download_link) + # -- Options for HTML output ---------------------------------------------- html_theme = 'furo' @@ -65,9 +65,7 @@ def setup(app): """ -html_sidebars = { - '**': [] -} +html_sidebars = {'**': []} # Note: http://www.sphinx-doc.org/en/1.5.1/config.html#confval-html_copy_source # This will degrade the Javascript quicksearch if we ever use it. diff --git a/build/sphinx/mongoc/__init__.py b/build/sphinx/mongoc/__init__.py index ea5c2861dfd..bf840f32c8c 100644 --- a/build/sphinx/mongoc/__init__.py +++ b/build/sphinx/mongoc/__init__.py @@ -1,8 +1,7 @@ -from docutils.nodes import literal, Text +from docutils.nodes import Text, literal from docutils.parsers.rst import roles - -from sphinx.roles import XRefRole from sphinx import version_info as sphinx_version_info +from sphinx.roles import XRefRole class SymbolRole(XRefRole): @@ -42,8 +41,7 @@ def __call__(self, *args, **kwargs): def setup(app): - roles.register_local_role( - 'symbol', SymbolRole(warn_dangling=True, innernodeclass=literal)) + roles.register_local_role('symbol', SymbolRole(warn_dangling=True, innernodeclass=literal)) return { 'version': '1.0', diff --git a/build/sphinx/mongoc_common.py b/build/sphinx/mongoc_common.py index f46a805d220..96d50f3fb25 100644 --- a/build/sphinx/mongoc_common.py +++ b/build/sphinx/mongoc_common.py @@ -1,11 +1,10 @@ import os import re from pathlib import Path -from typing import Any, Iterable, Sequence, Union, List, Tuple, Dict +from typing import Any, Dict, Iterable, List, Sequence, Tuple, Union from docutils import nodes from docutils.nodes import Node, document - from sphinx.application import Sphinx from sphinx.application import logger as sphinx_log @@ -14,12 +13,12 @@ except ImportError: # Try importing from older Sphinx version path. from sphinx.builders.html import DirectoryHTMLBuilder -from sphinx.config import Config from docutils.parsers.rst import Directive +from sphinx.config import Config # Do not require newer sphinx. EPEL packages build man pages with Sphinx 1.7.6. Refer: CDRIVER-4767 -needs_sphinx = "1.7" -author = "MongoDB, Inc" +needs_sphinx = '1.7' +author = 'MongoDB, Inc' # -- Options for HTML output ---------------------------------------------- @@ -35,7 +34,7 @@ def _file_man_page_name(fpath: Path) -> Union[str, None]: "Given an rST file input, find the :man_page: frontmatter value, if present" lines = fpath.read_text().splitlines() for line in lines: - mat = re.match(r":man_page:\s+(.+)", line) + mat = re.match(r':man_page:\s+(.+)', line) if not mat: continue return mat[1] @@ -45,11 +44,11 @@ def _collect_man(app: Sphinx): # Note: 'app' is partially-formed, as this is called from the Sphinx.__init__ docdir = Path(app.srcdir) # Find everything: - children = docdir.rglob("*") + children = docdir.rglob('*') # Find only regular files: files = filter(Path.is_file, children) # Find files that have a .rst extension: - rst_files = (f for f in files if f.suffix == ".rst") + rst_files = (f for f in files if f.suffix == '.rst') # Pair each file with its :man_page: frontmatter, if present: with_man_name = ((f, _file_man_page_name(f)) for f in rst_files) # Filter out pages that do not have a :man_page: item:s @@ -62,7 +61,7 @@ def _collect_man(app: Sphinx): docname = str(relative_path.parent / filepath.stem) assert docname, filepath - man_pages.append((docname, man_name, "", [author], 3)) + man_pages.append((docname, man_name, '', [author], 3)) # -- Options for manual page output --------------------------------------- @@ -82,8 +81,8 @@ def add_ga_javascript(app: Sphinx, pagename: str, templatename: str, context: Di return # Add google analytics and NPS survey. - context["metatags"] = ( - context.get("metatags", "") + context['metatags'] = ( + context.get('metatags', '') + """ @@ -111,34 +110,34 @@ class VersionList(Directive): has_content = True def run(self) -> Sequence[Node]: - if self.content[0] != "libmongoc" and self.content[0] != "libbson": - print("versionlist must be libmongoc or libbson") + if self.content[0] != 'libmongoc' and self.content[0] != 'libbson': + print('versionlist must be libmongoc or libbson') return [] libname = self.content[0] - env_name = libname.upper() + "_VERSION_LIST" + env_name = libname.upper() + '_VERSION_LIST' if env_name not in os.environ: - print(env_name + " not set, not generating version list") + print(env_name + ' not set, not generating version list') return [] - versions = os.environ[env_name].split(",") + versions = os.environ[env_name].split(',') - header = nodes.paragraph("", "") - p = nodes.paragraph("", "") - uri = "https://www.mongoc.org/%s/%s/index.html" % (libname, versions[0]) - p += nodes.reference("", "Latest Release (%s)" % versions[0], internal=False, refuri=uri) + header = nodes.paragraph('', '') + p = nodes.paragraph('', '') + uri = 'https://www.mongoc.org/%s/%s/index.html' % (libname, versions[0]) + p += nodes.reference('', 'Latest Release (%s)' % versions[0], internal=False, refuri=uri) header += p - p = nodes.paragraph("", "") - uri = "https://s3.amazonaws.com/mciuploads/mongo-c-driver/docs/%s/latest/index.html" % (libname) - p += nodes.reference("", "Current Development (master)", internal=False, refuri=uri) + p = nodes.paragraph('', '') + uri = 'https://s3.amazonaws.com/mciuploads/mongo-c-driver/docs/%s/latest/index.html' % (libname) + p += nodes.reference('', 'Current Development (master)', internal=False, refuri=uri) header += p blist = nodes.bullet_list() for v in versions: item = nodes.list_item() - p = nodes.paragraph("", "") - uri = "https://www.mongoc.org/%s/%s/index.html" % (libname, v) - p += nodes.reference("", v, internal=False, refuri=uri) + p = nodes.paragraph('', '') + uri = 'https://www.mongoc.org/%s/%s/index.html' % (libname, v) + p += nodes.reference('', v, internal=False, refuri=uri) item += p blist += item return [header, blist] @@ -146,22 +145,22 @@ def run(self) -> Sequence[Node]: def generate_html_redirs(app: Sphinx, page: str, templatename: str, context: Dict[str, Any], doctree: Any) -> None: builder = app.builder - if not isinstance(builder, DirectoryHTMLBuilder) or "writing-redirect" in context: + if not isinstance(builder, DirectoryHTMLBuilder) or 'writing-redirect' in context: return - if page == "index" or page.endswith(".index"): + if page == 'index' or page.endswith('.index'): return path = app.project.doc2path(page, True) out_index_html = Path(builder.get_outfilename(page)) slug = out_index_html.parent.name - redirect_file = out_index_html.parent.parent / f"{slug}.html" + redirect_file = out_index_html.parent.parent / f'{slug}.html' # HACK: handle_page() is not properly reentrant. Save and restore state for # this page while we generate our redirects page: prev_scripts = builder.script_files[:] prev_css = builder.css_files[:] builder.handle_page( - f"redirect-for-{page}", - {"target": page, "writing-redirect": 1}, - str(Path(__file__).parent.resolve() / "redirect.t.html"), + f'redirect-for-{page}', + {'target': page, 'writing-redirect': 1}, + str(Path(__file__).parent.resolve() / 'redirect.t.html'), # Note: In Sphinx 8.2, this argument changed from `str` to `Path`, but # continues to work with `str`. A future version might need this changed # to pass a `Path`, but we can keep `str` for now. @@ -170,13 +169,13 @@ def generate_html_redirs(app: Sphinx, page: str, templatename: str, context: Dic # Restore prior state: builder.script_files[:] = prev_scripts builder.css_files[:] = prev_css - sphinx_log.debug("Wrote redirect: %r -> %r", path, page) + sphinx_log.debug('Wrote redirect: %r -> %r', path, page) def mongoc_common_setup(app: Sphinx): _collect_man(app) - app.connect("html-page-context", generate_html_redirs) - app.connect("html-page-context", add_ga_javascript) + app.connect('html-page-context', generate_html_redirs) + app.connect('html-page-context', add_ga_javascript) # Run sphinx-build -D analytics=1 to enable Google Analytics. - app.add_config_value("analytics", False, "html") - app.add_directive("versionlist", VersionList) + app.add_config_value('analytics', False, 'html') + app.add_directive('versionlist', VersionList) diff --git a/docs/dev/conf.py b/docs/dev/conf.py index f4a593fa064..e09fd16fa8b 100644 --- a/docs/dev/conf.py +++ b/docs/dev/conf.py @@ -6,8 +6,8 @@ # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information -from pathlib import Path import re +from pathlib import Path from typing import Callable from sphinx import addnodes @@ -18,10 +18,10 @@ THIS_DIR = THIS_FILE.parent REPO_ROOT = THIS_DIR.parent.parent -project = "MongoDB C Driver Development" -copyright = "2009-present, MongoDB, Inc." -author = "MongoDB, Inc" -release = (REPO_ROOT / "VERSION_CURRENT").read_text().strip() +project = 'MongoDB C Driver Development' +copyright = '2009-present, MongoDB, Inc.' +author = 'MongoDB, Inc' +release = (REPO_ROOT / 'VERSION_CURRENT').read_text().strip() # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration @@ -29,13 +29,13 @@ extensions = [] templates_path = [] exclude_patterns = [] -default_role = "any" +default_role = 'any' # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output -html_theme = "nature" -pygments_style = "sphinx" +html_theme = 'nature' +pygments_style = 'sphinx' html_static_path = [] rst_prolog = rf""" @@ -51,56 +51,50 @@ def annotator( Create a parse_node function that adds a parenthesized annotation to an object signature. """ - def parse_node( - env: BuildEnvironment, sig: str, signode: addnodes.desc_signature - ) -> str: + def parse_node(env: BuildEnvironment, sig: str, signode: addnodes.desc_signature) -> str: signode += addnodes.desc_name(sig, sig) signode += addnodes.desc_sig_space() - signode += addnodes.desc_annotation("", f"({annot})") + signode += addnodes.desc_annotation('', f'({annot})') return sig return parse_node -def parse_earthly_artifact( - env: BuildEnvironment, sig: str, signode: addnodes.desc_signature -) -> str: +def parse_earthly_artifact(env: BuildEnvironment, sig: str, signode: addnodes.desc_signature) -> str: """ Parse and render the signature of an '.. earthly-artifact::' signature""" - mat = re.match(r"(?P\+.+?)(?P/.*)$", sig) + mat = re.match(r'(?P\+.+?)(?P/.*)$', sig) if not mat: - raise RuntimeError( - f"Invalid earthly-artifact signature: {sig!r} (expected “+/ string)" - ) - signode += addnodes.desc_addname(mat["target"], mat["target"]) - signode += addnodes.desc_name(mat["path"], mat["path"]) + raise RuntimeError(f'Invalid earthly-artifact signature: {sig!r} (expected “+/ string)') + signode += addnodes.desc_addname(mat['target'], mat['target']) + signode += addnodes.desc_name(mat['path'], mat['path']) signode += addnodes.desc_sig_space() - signode += addnodes.desc_annotation("", "(Earthly Artifact)") + signode += addnodes.desc_annotation('', '(Earthly Artifact)') return sig def setup(app: Sphinx): app.add_object_type( # type: ignore - "earthly-target", - "earthly-target", - indextemplate="pair: earthly target; %s", - parse_node=annotator("Earthly target"), + 'earthly-target', + 'earthly-target', + indextemplate='pair: earthly target; %s', + parse_node=annotator('Earthly target'), ) app.add_object_type( # type: ignore - "script", - "script", - indextemplate="pair: shell script; %s", - parse_node=annotator("shell script"), + 'script', + 'script', + indextemplate='pair: shell script; %s', + parse_node=annotator('shell script'), ) app.add_object_type( # type: ignore - "earthly-artifact", - "earthly-artifact", - indextemplate="pair: earthly artifact; %s", + 'earthly-artifact', + 'earthly-artifact', + indextemplate='pair: earthly artifact; %s', parse_node=parse_earthly_artifact, ) app.add_object_type( # type: ignore - "file", - "file", - indextemplate="repository file; %s", - parse_node=annotator("repository file"), + 'file', + 'file', + indextemplate='repository file; %s', + parse_node=annotator('repository file'), ) diff --git a/lldb.pyi b/lldb.pyi index 9df00f1776c..6e3082a95d7 100644 --- a/lldb.pyi +++ b/lldb.pyi @@ -5,7 +5,7 @@ are used in lldb_bson have been transcribed from the LLDB Python API documentation. Refer: https://lldb.llvm.org/python_api.html """ -from typing import IO, Any, Sequence, TypeAlias, NoReturn +from typing import IO, Any, NoReturn, Sequence, TypeAlias _Pointer: TypeAlias = int _Size: TypeAlias = int diff --git a/lldb_bson.py b/lldb_bson.py index 4af06ee0fd4..18fa86ff32a 100644 --- a/lldb_bson.py +++ b/lldb_bson.py @@ -107,13 +107,13 @@ def _wrap(*args: Any, **kwargs: Any) -> Any: print(e) raise - return cast("FuncT", _wrap) + return cast('FuncT', _wrap) @print_errors def __lldb_init_module(debugger: SBDebugger, internal_dict: InternalDict): # Inject the global magic document traverser: - internal_dict["bson"] = _BSONWalker() + internal_dict['bson'] = _BSONWalker() # Register types: for cls in _SyntheticMeta.synthetics: # The (regex of) the type that is handled by this class: @@ -127,34 +127,34 @@ def __lldb_init_module(debugger: SBDebugger, internal_dict: InternalDict): quoted = cls.__summary_str__.replace("'", "\\'") cmd = f"type summary add --summary-string '{quoted}' -x '^{ty}$'" debugger.HandleCommand(cmd) - if hasattr(cls, "__summary__"): + if hasattr(cls, '__summary__'): # More complex: Call a Python function that will create the summary cmd = f"type summary add -F lldb_bson.{cls.__name__}.__summary__ -x '^{ty}$'" debugger.HandleCommand(cmd) # Render __bson_byte__ as "bytes with ASCII." __bson_byte__ is a # debug-only type generated on-the-fly in LLDB - debugger.HandleCommand("type format add -f Y __bson_byte__") + debugger.HandleCommand('type format add -f Y __bson_byte__') # Arrays of bytes as a sequence of hex values: debugger.HandleCommand(r"type summary add -s '${var[]%x}' -x '__bson_byte__\[[0-9]+\]'") - print("lldb_bson is ready") + print('lldb_bson is ready') _ = __lldb_init_module # Silence "unused function" warnings -FuncT = TypeVar("FuncT", bound=Callable[..., Any]) -"Type of functions" -T = TypeVar("T") -"Unbounded invariant type parameter" +FuncT = TypeVar('FuncT', bound=Callable[..., Any]) +'Type of functions' +T = TypeVar('T') +'Unbounded invariant type parameter' InternalDict = Dict[str, Any] -"Type of internal dictionaries, provided by LLDB" +'Type of internal dictionaries, provided by LLDB' ValueFactory = Callable[[], SBValue] ChildItem = Union[ - Tuple[str, "str | int"], ValueFactory, Tuple[str, "str | int", "lldb.ValueFormatType|None", "SBType|None"] + Tuple[str, 'str | int'], ValueFactory, Tuple[str, 'str | int', 'lldb.ValueFormatType|None', 'SBType|None'] ] @@ -165,19 +165,19 @@ class _SyntheticMeta(type): """ synthetics: list[Type[SyntheticDisplayBase[Any]]] = [] - "The display type classes that have been defined" + 'The display type classes that have been defined' @override def __new__( cls: Type[_SyntheticMeta], name: str, bases: tuple[type, ...], namespace: dict[str, Any] ) -> Type[SyntheticDisplayBase[Any]]: new_class: Type[SyntheticDisplayBase[Any]] = type.__new__(cast(type, cls), name, bases, namespace) - if namespace.get("__abstract__"): + if namespace.get('__abstract__'): return new_class # Check for the required __typename__ and __parse__ - if not hasattr(new_class, "__typename__"): + if not hasattr(new_class, '__typename__'): raise TypeError(f'Type "{new_class}" is missing a "__typename__" attribute') - if not hasattr(new_class, "__parse__"): + if not hasattr(new_class, '__parse__'): raise TypeError(f'Type "{new_class}" has no "__parse__" method') # Remember this new class: cls.synthetics.append(new_class) @@ -186,12 +186,12 @@ def __new__( class SyntheticDisplayBase(Generic[T], SBSyntheticValueProvider, metaclass=_SyntheticMeta): __abstract__: ClassVar[bool] = True - "If true, disables metaclass checks" + 'If true, disables metaclass checks' __summary_str__: ClassVar[str | None] = None "Set to an LLDB '--summary-string' formatting string for rendering the inline value summary" __enable_synthetic__: ClassVar[bool] = True - "If False, do not generate synthetic children (used for primitive values)" + 'If False, do not generate synthetic children (used for primitive values)' if TYPE_CHECKING: __typename__: ClassVar[str] @@ -223,16 +223,16 @@ def __get_sbtype__(cls, frame: SBFrame, addr: int) -> SBType: Obtain the SBType for this class. Can be overriden in subclasses, and the type may consider the value that lives at the address. """ - return generate_or_get_type(f"struct {cls.__typename__} {{}}", frame) + return generate_or_get_type(f'struct {cls.__typename__} {{}}', frame) @print_errors def __init__(self, val: SBValue, idict: InternalDict | None = None) -> None: self.__sbvalue = val - "The SBValue given for this object" + 'The SBValue given for this object' self.__children: list[ChildItem] = [] - "The synthetic children associated with the value" + 'The synthetic children associated with the value' self.__value: T | None = None - "The decoded value, or ``None`` if it has not yet been decoded" + 'The decoded value, or ``None`` if it has not yet been decoded' @property def sbvalue(self) -> SBValue: @@ -285,7 +285,7 @@ def get_child_at_index(self, pos: int) -> SBValue: """ # LLDB sometimes calls us with a child that we don't have? if pos >= len(self.__children): - print(f"NOTE: lldb called get_child_at_index({pos}), but we only have {len(self.__children)} children") + print(f'NOTE: lldb called get_child_at_index({pos}), but we only have {len(self.__children)} children') return SBValue() # Get the child: nth = self.__children[pos] @@ -293,7 +293,7 @@ def get_child_at_index(self, pos: int) -> SBValue: if not isinstance(nth, tuple): # The type is a ValueFactory, which will return a new SBValue val = nth() - assert val.error.success, f"{val.error=}, {nth=}, {pos=}" + assert val.error.success, f'{val.error=}, {nth=}, {pos=}' return val # Otherwise, they yielded a tuple: if len(nth) == 4: @@ -338,7 +338,7 @@ class PrimitiveDisplay(Generic[T], SyntheticDisplayBase[T]): __enable_synthetic__: ClassVar[bool] = False __struct_format__: ClassVar[str] - "The struct format string that will be used to extract the value from memory" + 'The struct format string that will be used to extract the value from memory' @classmethod @override @@ -358,15 +358,15 @@ def __parse__(cls, value: SBValue) -> T: class DoubleDisplay(PrimitiveDisplay[float]): """Displays BSON doubles""" - __typename__ = "__bson_double__" - __struct_format__: ClassVar[str] = " bytes: @override def get_children(self) -> Iterable[ChildItem]: strlen = len(self.value) - yield "size (bytes)", strlen + yield 'size (bytes)', strlen # Create a char[] type to represent the string content: array_t = self.sbvalue.target.GetBasicType(lldb.eBasicTypeChar).GetArrayType(strlen) - yield lambda: self.sbvalue.synthetic_child_from_address("[content]", self.address + 4, array_t) + yield lambda: self.sbvalue.synthetic_child_from_address('[content]', self.address + 4, array_t) try: # Attempt a UTF-8 decode. We don't actually show this, we just want to # check if there are encoding errors, which we will display in the output - self.value.decode("utf-8") + self.value.decode('utf-8') except UnicodeDecodeError as e: - yield "decode error", str(e) + yield 'decode error', str(e) class DocumentInfo(NamedTuple): """A decoded document""" elements: Sequence[DocumentElement | DocumentError] - "Existing elements or errors found while parsing the data" + 'Existing elements or errors found while parsing the data' class DocumentElement(NamedTuple): @@ -415,7 +415,7 @@ class DocumentError(NamedTuple): error_offset: int -class DocumentDisplay(SyntheticDisplayBase["DocumentInfo | DocumentError"]): +class DocumentDisplay(SyntheticDisplayBase['DocumentInfo | DocumentError']): """ Main display of BSON document elements. This parses a document/array, and generates the child elements that can be further expanded and inspected. @@ -424,8 +424,8 @@ class DocumentDisplay(SyntheticDisplayBase["DocumentInfo | DocumentError"]): the top-level object and is the one responsible for filling the cache. """ - __typename__ = "__bson_document_[0-9]+__" - __qualifier__: ClassVar[str] = "document" + __typename__ = '__bson_document_[0-9]+__' + __qualifier__: ClassVar[str] = 'document' "The 'qualifier' of this type. Overriden by ArrayDisplay." @classmethod @@ -435,12 +435,12 @@ def __summary__(cls, value: SBValue, idict: InternalDict) -> str: prefix = cls.__qualifier__ doc = cls.__parse__(value) if isinstance(doc, DocumentError): - return f"Error parsing {prefix} at byte {doc.error_offset}: {doc.message}" + return f'Error parsing {prefix} at byte {doc.error_offset}: {doc.message}' if len(doc.elements) == 0: - return f"{prefix} (empty)" + return f'{prefix} (empty)' if len(doc.elements) == 1: - return f"{prefix} (1 element)" - return f"{prefix} ({len(doc.elements)} elements)" + return f'{prefix} (1 element)' + return f'{prefix} ({len(doc.elements)} elements)' @classmethod @override @@ -449,10 +449,10 @@ def __get_sbtype__(cls, frame: SBFrame, addr: int) -> SBType: # Read the size prefix: err = SBError() header = frame.thread.process.ReadMemory(addr, 4, err) - assert err.success, f"{err=}, {frame=}, {addr=}" + assert err.success, f'{err=}, {frame=}, {addr=}' size = read_i32le(header) # Generate the type: - typename = f"__bson_{cls.__qualifier__}_{size}__" + typename = f'__bson_{cls.__qualifier__}_{size}__' doc_t = generate_or_get_type( f""" enum __bson_byte__ : unsigned char {{}}; @@ -471,7 +471,7 @@ def __parse__(cls, value: SBValue) -> DocumentInfo | DocumentError: # will be pulled here: buf = memcache.read(value)[1] except LookupError as e: - return DocumentError(f"Failed to read memory: {e}", value.load_addr) + return DocumentError(f'Failed to read memory: {e}', value.load_addr) return cls.parse_bytes(buf) @classmethod @@ -497,12 +497,12 @@ def _parse_elems(cls, buf: bytes) -> Iterable[DocumentElement | DocumentError]: # Yield this one, and then advance to the next element: yield elem elem_size = 1 + len(elem.key) + 1 + elem.value_size - if cls.__qualifier__ == "array": + if cls.__qualifier__ == 'array': # Validate that array keys are increasing integers: expect_key = str(array_idx) if elem.key != expect_key: yield DocumentError( - f"Array element must have incrementing integer keys " + f'Array element must have incrementing integer keys ' f'(Expected "{expect_key}", got "{elem.key}")', cur_offset, ) @@ -511,7 +511,7 @@ def _parse_elems(cls, buf: bytes) -> Iterable[DocumentElement | DocumentError]: # Check that we actually consumed the whole buffer: remain = len(buf) - cur_offset if remain > 1: - yield DocumentError(f"Extra {len(buf)} bytes in document data", cur_offset) + yield DocumentError(f'Extra {len(buf)} bytes in document data', cur_offset) @classmethod def _parse_one( @@ -522,24 +522,24 @@ def _parse_one( type_tag = BSONType(buf[0]) except ValueError: # The tag byte is not a valid tag value - return DocumentError(f"Invalid element type tag 0x{buf[0]:x}", elem_offset) + return DocumentError(f'Invalid element type tag 0x{buf[0]:x}', elem_offset) except IndexError: # 'buf' was empty - return DocumentError(f"Unexpected end-of-data", elem_offset) + return DocumentError(f'Unexpected end-of-data', elem_offset) # Stop if this is the end: if type_tag == BSONType.EOD: - return DocumentElement(type_tag, "", 0, 0) + return DocumentElement(type_tag, '', 0, 0) # Find the null terminator on the key: try: key_nulpos = buf.index(0, 1) except ValueError: - return DocumentError(f"Unexpected end-of-data while parsing the element key", elem_offset) + return DocumentError(f'Unexpected end-of-data while parsing the element key', elem_offset) key_bytes = buf[1:key_nulpos] try: - key = key_bytes.decode("utf-8") + key = key_bytes.decode('utf-8') except UnicodeDecodeError as e: - yield DocumentError(f"Element key {key_bytes} is not valid UTF-8 ({e})", elem_offset) - key = key_bytes.decode("utf-8", errors="replace") + yield DocumentError(f'Element key {key_bytes} is not valid UTF-8 ({e})', elem_offset) + key = key_bytes.decode('utf-8', errors='replace') # The offset of the value within the element: inner_offset = key_nulpos + 1 # The buffer that starts at the value: @@ -580,7 +580,7 @@ def _parse_one( # Size is a length prefix, plus four, plus one for the subtype value_size = read_i32le(value_bytes) + 4 + 1 else: - assert False, f"Unhandled value tag? {type_tag=} {buf=} {key=}" + assert False, f'Unhandled value tag? {type_tag=} {buf=} {key=}' # The absolute offset of the element within the parent document: value_offset = elem_offset + inner_offset return DocumentElement(type_tag, key, value_offset, value_size) @@ -590,12 +590,12 @@ def get_children(self) -> Iterable[ChildItem]: doc = self.value if isinstance(doc, DocumentError): # The entire document failed to parse. Just generate one error: - yield "[error]", f"Parsing error at byte {doc.error_offset}: {doc.message}" + yield '[error]', f'Parsing error at byte {doc.error_offset}: {doc.message}' return for elem in doc.elements: if isinstance(elem, DocumentError): # There was an error at this location. - yield "[error]", f"Data error at offset {elem.error_offset}: {elem.message}" + yield '[error]', f'Data error at offset {elem.error_offset}: {elem.message}' else: # Create a ValueFactory for each element: yield functools.partial(self.create_child, self.sbvalue, elem) @@ -603,9 +603,9 @@ def get_children(self) -> Iterable[ChildItem]: @classmethod def create_child(cls, parent: SBValue, elem: DocumentElement) -> SBValue: """Generate the child elements for LLDB to walk through""" - if cls.__qualifier__ == "array": + if cls.__qualifier__ == 'array': # Don't quote the integer keys - name = f"[{elem.key}]" + name = f'[{elem.key}]' else: name = f"['{elem.key}']" value_addr = parent.load_addr + elem.value_offset @@ -637,20 +637,20 @@ def create_child(cls, parent: SBValue, elem: DocumentElement) -> SBValue: BSONType.MinKey: MinKeyDisplay.__get_sbtype__, } get_type = by_type.get(elem.type) - assert get_type is not None, f"Unhandled type tag? {elem=}" + assert get_type is not None, f'Unhandled type tag? {elem=}' # Create the SBType: type = get_type(frame, value_addr) # Create a synthetic child of that type at the address of the element's value: val = parent.synthetic_child_from_address(name, value_addr, type) - assert val.error.success, f"{elem=}, {val.error=}" + assert val.error.success, f'{elem=}, {val.error=}' return val class ArrayDisplay(DocumentDisplay): """Display for arrays. Most logic is implemented in the DocumentDisplay base.""" - __typename__ = "__bson_array_[0-9]+__" - __qualifier__: ClassVar[str] = "array" + __typename__ = '__bson_array_[0-9]+__' + __qualifier__: ClassVar[str] = 'array' class BinaryInfo(NamedTuple): @@ -661,7 +661,7 @@ class BinaryInfo(NamedTuple): class BinaryDisplay(SyntheticDisplayBase[BinaryInfo]): """Display for a BSON binary value""" - __typename__ = "__bson_binary__" + __typename__ = '__bson_binary__' @classmethod @override @@ -677,11 +677,11 @@ def __parse__(cls, value: SBValue) -> BinaryInfo: @override def get_children(self) -> Iterable[ChildItem]: - yield "size", len(self.value.data) - byte_t = generate_or_get_type("enum __bson_byte__ : char {}", self.sbvalue.frame) - yield "subtype", self.value.subtype, lldb.eFormatHex, byte_t + yield 'size', len(self.value.data) + byte_t = generate_or_get_type('enum __bson_byte__ : char {}', self.sbvalue.frame) + yield 'subtype', self.value.subtype, lldb.eFormatHex, byte_t array_t = byte_t.GetArrayType(len(self.value.data)) - yield lambda: self.sbvalue.synthetic_child_from_address("data", self.address + 5, array_t) + yield lambda: self.sbvalue.synthetic_child_from_address('data', self.address + 5, array_t) class UndefinedDisplay(SyntheticDisplayBase[None]): @@ -689,8 +689,8 @@ class UndefinedDisplay(SyntheticDisplayBase[None]): Display type for 'undefined' values. Also derived from for other unit types. """ - __typename__ = "__bson_undefined__" - __summary_str__ = "undefined" + __typename__ = '__bson_undefined__' + __summary_str__ = 'undefined' __enable_synthetic__: ClassVar[bool] = False @classmethod @@ -702,7 +702,7 @@ def __parse__(cls, value: SBValue) -> None: class ObjectIDDisplay(SyntheticDisplayBase[bytes]): """Display type for ObjectIDs""" - __typename__ = "__bson_objectid__" + __typename__ = '__bson_objectid__' @classmethod @override @@ -729,20 +729,20 @@ def __parse__(cls, value: SBValue) -> bytes: @override def get_children(self) -> Iterable[ChildItem]: - yield "spelling", self.value.hex() + yield 'spelling', self.value.hex() class DatetimeDisplay(SyntheticDisplayBase[int]): """Display for BSON Datetime objects""" - __typename__ = "__bson_datetime__" - __summary_str__: ClassVar[str] = "datetime: ${var[0]}" + __typename__ = '__bson_datetime__' + __summary_str__: ClassVar[str] = 'datetime: ${var[0]}' @classmethod @override def __summary__(cls, value: SBValue, idict: InternalDict) -> str: dt = datetime.fromtimestamp(cls.__parse__(value) / 1000) - s = f"{dt:%a %b %m %Y %H:%M:%S +%fμs}" + s = f'{dt:%a %b %m %Y %H:%M:%S +%fμs}' return f'Date("{s}")' @classmethod @@ -750,7 +750,7 @@ def __summary__(cls, value: SBValue, idict: InternalDict) -> str: def __parse__(cls, val: SBValue) -> int: buf = memcache.get_cached(val.load_addr) buf = buf[:8] - value: int = struct.unpack(" Iterable[ChildItem]: # Adjusted to the local time zone: adjusted = dt.astimezone() yield from { - "[isoformat]": dt.isoformat(), - "[date]": f"{dt:%B %d, %Y}", - "[time]": dt.strftime("%H:%M:%S +%fμs"), - "[local]": adjusted.strftime("%c"), - "Year": dt.year, - "Month": dt.month, - "Day": dt.day, - "Hour": dt.hour, - "Minute": dt.minute, - "Second": dt.second, - "+μs": dt.microsecond, + '[isoformat]': dt.isoformat(), + '[date]': f'{dt:%B %d, %Y}', + '[time]': dt.strftime('%H:%M:%S +%fμs'), + '[local]': adjusted.strftime('%c'), + 'Year': dt.year, + 'Month': dt.month, + 'Day': dt.day, + 'Hour': dt.hour, + 'Minute': dt.minute, + 'Second': dt.second, + '+μs': dt.microsecond, }.items() class NullDisplay(UndefinedDisplay): """Display for the BSON 'null' type""" - __typename__ = "__bson_null__" - __summary_str__ = "null" + __typename__ = '__bson_null__' + __summary_str__ = 'null' class RegexDisplay(SyntheticDisplayBase[Tuple[bytes, bytes]]): """Display type for BSON regular expressions""" - __typename__ = "__bson_regex_[0-9]+_[0-9]+__" + __typename__ = '__bson_regex_[0-9]+_[0-9]+__' __enable_synthetic__: ClassVar[bool] = False @classmethod @@ -815,8 +815,8 @@ def __summary__(cls, value: SBValue, idict: InternalDict) -> str: # Create a JS-style regex literal: pair = cls.__parse__(value) regex, options = cls.decode_pair(pair) - regex = regex.replace("/", "\\/").replace("\n", "\\n") - return f"/{regex}/{options}" + regex = regex.replace('/', '\\/').replace('\n', '\\n') + return f'/{regex}/{options}' @classmethod def parse_at(cls, addr: int) -> tuple[bytes, bytes]: @@ -833,16 +833,16 @@ def parse_at(cls, addr: int) -> tuple[bytes, bytes]: @classmethod def decode_pair(cls, value: tuple[bytes, bytes]) -> tuple[str, str]: regex, options = value - regex = regex.decode("utf-8", errors="replace") - options = options.decode("utf-8", errors="replace") + regex = regex.decode('utf-8', errors='replace') + options = options.decode('utf-8', errors='replace') return regex, options class DBPointerDisplay(SyntheticDisplayBase[Tuple[bytes, int]]): """Display type for DBPointers""" - __typename__ = "__bson_dbpointer__" - __summary_str__: ClassVar[str | None] = "DBPointer(${var[0]}, ${var[1]})" + __typename__ = '__bson_dbpointer__' + __summary_str__: ClassVar[str | None] = 'DBPointer(${var[0]}, ${var[1]})' @classmethod @override @@ -858,36 +858,36 @@ def get_children(self) -> Iterable[ChildItem]: utf8_t = UTF8Display.__get_sbtype__(self.sbvalue.frame, self.address) oid_offset = self.value[1] oid_t = ObjectIDDisplay.__get_sbtype__(self.sbvalue.frame, self.address + oid_offset) - yield lambda: self.sbvalue.synthetic_child_from_address("collection", self.sbvalue.load_addr, utf8_t) - yield lambda: self.sbvalue.synthetic_child_from_address("object", self.sbvalue.load_addr + oid_offset, oid_t) + yield lambda: self.sbvalue.synthetic_child_from_address('collection', self.sbvalue.load_addr, utf8_t) + yield lambda: self.sbvalue.synthetic_child_from_address('object', self.sbvalue.load_addr + oid_offset, oid_t) class CodeDisplay(UTF8Display): """Display type for BSON code""" - __typename__ = "__bson_code__" - __summary_str__ = "Code(${var[1]})" + __typename__ = '__bson_code__' + __summary_str__ = 'Code(${var[1]})' class SymbolDisplay(UTF8Display): """Display type for BSON symbols""" - __typename__ = "__bson_symbol__" + __typename__ = '__bson_symbol__' @classmethod @override def __summary__(cls, value: SBValue, idict: InternalDict) -> str: spell = cls.__parse__(value) - dec = spell.decode("utf-8", errors="replace").rstrip("\x00") - return f"Symbol({dec})" + dec = spell.decode('utf-8', errors='replace').rstrip('\x00') + return f'Symbol({dec})' class CodeWithScopeDisplay(SyntheticDisplayBase[int]): """Display type for BSON 'Code w/ Scope'""" - __typename__ = "__code_with_scope__" + __typename__ = '__code_with_scope__' - __summary_str__: ClassVar[str | None] = "Code(${var[0][1]}, ${var[1]})" + __summary_str__: ClassVar[str | None] = 'Code(${var[0][1]}, ${var[1]})' @classmethod @override @@ -903,54 +903,54 @@ def get_children(self) -> Iterable[ChildItem]: code_t = CodeDisplay.__get_sbtype__(self.sbvalue.frame, self.address) scope_doc_offset = self.value doc_t = DocumentDisplay.__get_sbtype__(self.sbvalue.frame, self.address + scope_doc_offset) - yield lambda: checked(self.sbvalue.synthetic_child_from_address("code", self.address + 4, code_t)) + yield lambda: checked(self.sbvalue.synthetic_child_from_address('code', self.address + 4, code_t)) yield lambda: checked( - self.sbvalue.synthetic_child_from_address("scope", self.address + scope_doc_offset, doc_t) + self.sbvalue.synthetic_child_from_address('scope', self.address + scope_doc_offset, doc_t) ) class Int32Display(PrimitiveDisplay[int]): """Display for 32-bit BSON integers""" - __typename__ = "__bson_int32__" - __struct_format__: ClassVar[str] = " str: - return f"NumberInt({cls.__parse__(value)})" + return f'NumberInt({cls.__parse__(value)})' class Int64Display(PrimitiveDisplay[int]): """Display for 64-bit BSON integers""" - __typename__ = "__bson_int64__" - __struct_format__: ClassVar[str] = " str: - return f"NumberLong({cls.__parse__(value)})" + return f'NumberLong({cls.__parse__(value)})' class TimestampDisplay(SyntheticDisplayBase[Tuple[int, int]]): """Display type for BSON timestamps""" - __typename__ = "__bson_timestamp__" - __summary_str__ = "Timestamp(${var[0]}, ${var[1]})" + __typename__ = '__bson_timestamp__' + __summary_str__ = 'Timestamp(${var[0]}, ${var[1]})' @classmethod @override def __parse__(cls, value: SBValue) -> tuple[int, int]: buf = memcache.get_cached(value.load_addr)[:8] # Just two 32bit integers: - timestamp, increment = struct.unpack(" Iterable[ChildItem]: - yield "timestamp", self.value[0] - yield "increment", self.value[1] + yield 'timestamp', self.value[0] + yield 'increment', self.value[1] class Decimal128Value(NamedTuple): @@ -966,7 +966,7 @@ class Decimal128Value(NamedTuple): class Decimal128Display(SyntheticDisplayBase[Decimal128Value]): """The display type for BSON's Decimal128 type""" - __typename__ = "__bson_decimal128__" + __typename__ = '__bson_decimal128__' @classmethod @override @@ -1025,11 +1025,11 @@ def __parse__(cls, value: SBValue) -> Decimal128Value: # Check for special values in the remainder of the combination: more = bits[3:6] if more == (1, 0, 0) or more == (1, 0, 1): - spelling = "Infinity" + spelling = 'Infinity' elif more == (1, 1, 0): - spelling = "NaN (quiet)" + spelling = 'NaN (quiet)' elif more == (1, 1, 1): - spelling = "NaN (signaling)" + spelling = 'NaN (signaling)' coeff = mergebits(coeff) exponent = mergebits(exponent) @@ -1042,12 +1042,12 @@ def __parse__(cls, value: SBValue) -> Decimal128Value: spelling = spelling.zfill(abs(e)) split = len(spelling) + e w, fr = spelling[:split], spelling[split:] - spelling = f"{w}.{fr}" + spelling = f'{w}.{fr}' else: - spelling = spelling + "0" * e + spelling = spelling + '0' * e if sign: - spelling = f"-{spelling}" + spelling = f'-{spelling}' # The "combination" bits combination = mergebits(bits[1:18]) @@ -1055,52 +1055,52 @@ def __parse__(cls, value: SBValue) -> Decimal128Value: @override def get_children(self) -> Iterable[ChildItem]: - yield "sign", self.value.sign - yield "combination", self.value.combination, lldb.eFormatBinary, None - yield "exponent (biased)", self.value.exponent - yield "exponent (actual)", self.value.exponent - 6176 - yield "significand", str(self.value.significand) - yield "value", self.value.spelling + yield 'sign', self.value.sign + yield 'combination', self.value.combination, lldb.eFormatBinary, None + yield 'exponent (biased)', self.value.exponent + yield 'exponent (actual)', self.value.exponent - 6176 + yield 'significand', str(self.value.significand) + yield 'value', self.value.spelling class MaxKeyDisplay(NullDisplay): """The display type for BSON's 'max key' type""" - __typename__ = "__bson_maxkey__" - __summary_str__ = "max key" + __typename__ = '__bson_maxkey__' + __summary_str__ = 'max key' class MinKeyDisplay(NullDisplay): """The display type for BSON's 'min key' type""" - __typename__ = "__bson_minkey__" - __summary_str__ = "min key" + __typename__ = '__bson_minkey__' + __summary_str__ = 'min key' class BSONTInfo(NamedTuple): """Information about a bson_t object""" addr: int - "The address of the pointer to the beginning of the BSON data managed by this object" + 'The address of the pointer to the beginning of the BSON data managed by this object' size: int - "The size of the BSON data managed/referenced by this object" + 'The size of the BSON data managed/referenced by this object' flags: int - "Flags of the bson_t object" + 'Flags of the bson_t object' class BSONTError(NamedTuple): """Represents an error while reading a bson_t object""" reason: str - "A description of the error that ocurred" + 'A description of the error that ocurred' -class BSONTDisplay(SyntheticDisplayBase["BSONTInfo | BSONTError"]): +class BSONTDisplay(SyntheticDisplayBase['BSONTInfo | BSONTError']): """ Implements inspection logic for bson_t """ - __typename__ = "bson_t" + __typename__ = 'bson_t' @classmethod @override @@ -1117,42 +1117,42 @@ def __parse__(cls, value: SBValue) -> BSONTInfo | BSONTError: err = SBError() flags = dat.GetUnsignedInt32(err, 0) if err.fail: - return BSONTError(f"Failed to read memory at 0x{value.load_addr:x}: {err.description}") + return BSONTError(f'Failed to read memory at 0x{value.load_addr:x}: {err.description}') length = dat.GetUnsignedInt32(err, 4) if err.fail: - return BSONTError(f"Failed to read memory at 0x{value.load_addr:x}: {err.description}") + return BSONTError(f'Failed to read memory at 0x{value.load_addr:x}: {err.description}') # Check bogus values: MAX_SIZE = 16 * 1024 * 1024 ALL_FLAGS = (1 << 6) - 1 if flags & ~ALL_FLAGS or length < 5 or length > MAX_SIZE: - return BSONTError(f"bson_t appears uninitialized/invalid [a] {flags=} {length=}") + return BSONTError(f'bson_t appears uninitialized/invalid [a] {flags=} {length=}') is_inline = bool(flags & 1) if is_inline: # Inline objects may only occupy 120 bytes, at most if length > 120: - return BSONTError("bson_t appears uninitialized/invalid [b]") + return BSONTError('bson_t appears uninitialized/invalid [b]') # Look for debug info for the inline impl - inline_t = value.target.FindFirstType("bson_impl_inline_t") + inline_t = value.target.FindFirstType('bson_impl_inline_t') if inline_t: as_inline = value.Cast(inline_t) - ptr = as_inline.GetChildMemberWithName("data").load_addr + ptr = as_inline.GetChildMemberWithName('data').load_addr else: # No debug info? Guess its location as the default ptr = value.load_addr + 4 + 4 if not err.success: - return BSONTError(f"Failed to read inline bson_t data: {err}") + return BSONTError(f'Failed to read inline bson_t data: {err}') return BSONTInfo(ptr, length, flags) # Look for impl_alloc_t - alloc_t = value.target.FindFirstType("bson_impl_alloc_t") + alloc_t = value.target.FindFirstType('bson_impl_alloc_t') if alloc_t: alloc = value.Cast(alloc_t) # Walk to the buffer for this value: - offset = alloc.GetChildMemberWithName("offset").unsigned - buf = alloc.GetChildMemberWithName("buf").deref.deref + offset = alloc.GetChildMemberWithName('offset').unsigned + buf = alloc.GetChildMemberWithName('buf').deref.deref ptr = buf.load_addr + offset return BSONTInfo(ptr, length, flags) @@ -1168,48 +1168,48 @@ def __parse__(cls, value: SBValue) -> BSONTInfo | BSONTError: offset_off = buf_off + (ptr_size * 2) offset = dat.GetUnsignedInt32(err, offset_off) if not err.success: - return BSONTError(f"Failed to read offset of buffer: {err}") - bufptr = value.CreateChildAtOffset("buf", buf_off, u8ptr_t.GetPointerType()).deref + return BSONTError(f'Failed to read offset of buffer: {err}') + bufptr = value.CreateChildAtOffset('buf', buf_off, u8ptr_t.GetPointerType()).deref if not bufptr.error.success: - return BSONTError(f"Failed to read the alloc buf: {bufptr.error} {offset=} {buf_off=}") + return BSONTError(f'Failed to read the alloc buf: {bufptr.error} {offset=} {buf_off=}') ptr = bufptr.data.GetUnsignedInt64(err, 0) assert err.success, err - u32_t = value.target.FindFirstType("uint32_t") + u32_t = value.target.FindFirstType('uint32_t') addr = SBAddress() addr.SetLoadAddress(ptr, value.target) - u32 = value.target.CreateValueFromAddress("tmp", addr, u32_t) + u32 = value.target.CreateValueFromAddress('tmp', addr, u32_t) assert u32.error.success, u32 if u32.unsigned != length or length < 5: - return BSONTError(f"bson_t appears uninitialized/invalid [c] {flags=} {length=} {u32.unsigned=}") + return BSONTError(f'bson_t appears uninitialized/invalid [c] {flags=} {length=} {u32.unsigned=}') return BSONTInfo(ptr, length, flags) @override def get_children(self) -> Iterable[ChildItem]: val = self.value if isinstance(val, BSONTError): - yield "[error]", val.reason + yield '[error]', val.reason return # Imbue the flags with the possible debug info to give it a nice rendering - flags_t = self.sbvalue.target.FindFirstType("bson_flags_t") + flags_t = self.sbvalue.target.FindFirstType('bson_flags_t') if flags_t.IsValid(): - yield "flags", val.flags, None, flags_t + yield 'flags', val.flags, None, flags_t else: - yield "flags", val.flags - yield "data size", val.size + yield 'flags', val.flags + yield 'data size', val.size ptr_t = self.sbvalue.target.GetBasicType(lldb.eBasicTypeVoid).GetPointerType() - yield "data address", val.addr, lldb.eFormatPointer, ptr_t + yield 'data address', val.addr, lldb.eFormatPointer, ptr_t # Generate the __bson_document_xxx__ that will allow walking the document: doc_t = DocumentDisplay.__get_sbtype__(self.sbvalue.frame, val.addr) - yield lambda: checked(self.sbvalue.synthetic_child_from_address("[content]", val.addr, doc_t)) + yield lambda: checked(self.sbvalue.synthetic_child_from_address('[content]', val.addr, doc_t)) def checked(val: SBValue) -> SBValue: """Assert that ``val`` is valid. Returns ``val``""" - assert val.error.success, f"{val=} {val.error=}" + assert val.error.success, f'{val=} {val.error=}' return val @@ -1217,11 +1217,11 @@ def read_i32le(dat: bytes) -> int: """Read a 32-bit integer from the given data.""" # Truncate before the read: buf = dat[0:4] - return struct.unpack(" SBType: @@ -1254,8 +1254,8 @@ def generate_or_get_type(expr_prefix: str, frame: SBFrame) -> SBType: # Create a new temporary object. Give it a unique name to prevent it from # colliding with any possible temporaries we may have generated previously. hash = hashlib.md5(expr_prefix.encode()).hexdigest() - varname = f"__bson_lldb_tmp_{hash}" - full_expr = f"{expr_prefix} {varname}; {varname}" + varname = f'__bson_lldb_tmp_{hash}' + full_expr = f'{expr_prefix} {varname}; {varname}' tmp = frame.EvaluateExpression(full_expr) existing = tmp.type _types_cache[cachekey] = existing @@ -1307,11 +1307,11 @@ def __rmatmul__(self, lhs: Any) -> SBValue | str: # Evaluate the left-hand string as an expression within the target target = lldb.debugger.GetSelectedTarget() if target is None: - raise RuntimeError("Not attached to a debug target") + raise RuntimeError('Not attached to a debug target') frame = target.process.selected_thread.frames[0] lhs = frame.EvaluateExpression(lhs) val: SBValue - if hasattr(lhs.__class__, "unwrap"): + if hasattr(lhs.__class__, 'unwrap'): # CodeLLDB gives us a wrapper around SBValue, but we want the unwrapped # version: val = lhs.__class__.unwrap(lhs) @@ -1333,14 +1333,14 @@ def __rmatmul__(self, lhs: Any) -> SBValue | str: # Create the synthetic __bson_document_xxx__ object for this doc doc_t = DocumentDisplay.__get_sbtype__(val.frame, as_bson.addr) # Obtain a value reference to the document data: - retval = val.CreateValueFromAddress("[root]", as_bson.addr, doc_t) + retval = val.CreateValueFromAddress('[root]', as_bson.addr, doc_t) # Now resolve the path: for part in self._path: if isinstance(part, str): # Access via ``p['foo']`` or ``p.foo``, requires our current node # to be a document: - if not retval.type.name.startswith("__bson_document_"): + if not retval.type.name.startswith('__bson_document_'): raise AttributeError( f'Element of type {retval.type.name} cannot be accessed as a document (looking for element "{part}")' ) @@ -1349,12 +1349,12 @@ def __rmatmul__(self, lhs: Any) -> SBValue | str: want_child_name = f"['{part}']" else: # Access via indexing ``p[42]``, requires an array - if not retval.type.name.startswith("__bson_array_"): + if not retval.type.name.startswith('__bson_array_'): raise AttributeError( - f"Element of type {retval.type.name} cannot be accessed as an array (looking for element {part})" + f'Element of type {retval.type.name} cannot be accessed as an array (looking for element {part})' ) # Array keys are bracketed, but not quoted - want_child_name = f"[{part}]" + want_child_name = f'[{part}]' # Find all children that match the key (usually only one) matching = (c for c in retval.children if c.name == want_child_name) # Get it: @@ -1364,7 +1364,7 @@ def __rmatmul__(self, lhs: Any) -> SBValue | str: if isinstance(part, str): raise KeyError(f'Document has no element "{part}"') else: - raise IndexError(f"Array index [{part}] is out-of-bounds") + raise IndexError(f'Array index [{part}] is out-of-bounds') # Set this as our current node, which we may step in further, or # we may be done retval = got @@ -1394,7 +1394,7 @@ class _MemoryCache: def __init__(self): self._segments: dict[int, bytes] = {} - "Segments of memory keyed by the base address of the read operation" + 'Segments of memory keyed by the base address of the read operation' def get_cached(self, addr: int) -> bytes: """ @@ -1410,9 +1410,9 @@ def get_cached(self, addr: int) -> bytes: segment = self.segment_containing(addr) if not segment: # Memory does not exist? - print(f"lldb_bson: Note: Attempted read of uncached address 0x{addr:x}") - print("".join(traceback.format_stack())) - return b"\0" * 512 + print(f'lldb_bson: Note: Attempted read of uncached address 0x{addr:x}') + print(''.join(traceback.format_stack())) + return b'\0' * 512 base_addr, data = segment inner_offset = addr - base_addr return data[inner_offset:] @@ -1446,4 +1446,4 @@ def read_at(self, proc: SBProcess, addr: int, size: int) -> tuple[int, bytes]: memcache = _MemoryCache() -"A module-wide memory segment cache." +'A module-wide memory segment cache.' diff --git a/src/libbson/doc/conf.py b/src/libbson/doc/conf.py index b0d1a391189..bf8187fac2f 100644 --- a/src/libbson/doc/conf.py +++ b/src/libbson/doc/conf.py @@ -5,30 +5,30 @@ # Ensure we can import "mongoc" extension module. this_path = os.path.dirname(__file__) -sys.path.append(os.path.normpath(os.path.join(this_path, "../../../build/sphinx"))) +sys.path.append(os.path.normpath(os.path.join(this_path, '../../../build/sphinx'))) from mongoc_common import * extensions = [ - "mongoc", + 'mongoc', ] # General information about the project. -project = "libbson" -copyright = "2009-present, MongoDB, Inc." -author = "MongoDB, Inc" +project = 'libbson' +copyright = '2009-present, MongoDB, Inc.' +author = 'MongoDB, Inc' -version_path = os.path.join(os.path.dirname(__file__), "../../..", "VERSION_CURRENT") +version_path = os.path.join(os.path.dirname(__file__), '../../..', 'VERSION_CURRENT') version = open(version_path).read().strip() -language = "en" -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] -master_doc = "index" +language = 'en' +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +master_doc = 'index' # -- Options for HTML output ---------------------------------------------- -html_theme = "furo" -html_title = html_shorttitle = "libbson %s" % version +html_theme = 'furo' +html_title = html_shorttitle = 'libbson %s' % version # html_favicon = None html_sidebars = {} @@ -47,9 +47,9 @@ def add_canonical_link(app, pagename, templatename, context, doctree): link = f'' - context["metatags"] = context.get("metatags", "") + link + context['metatags'] = context.get('metatags', '') + link def setup(app): mongoc_common_setup(app) - app.connect("html-page-context", add_canonical_link) + app.connect('html-page-context', add_canonical_link) diff --git a/src/libbson/examples/compile-with-pkg-config-static.sh b/src/libbson/examples/compile-with-pkg-config-static.sh index dd84504415c..d7755052cc9 100755 --- a/src/libbson/examples/compile-with-pkg-config-static.sh +++ b/src/libbson/examples/compile-with-pkg-config-static.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -set -o errexit # Exit the script with error if any of the commands fail +set -o errexit # Exit the script with error if any of the commands fail # -- sphinx-include-start -- gcc -o hello_bson hello_bson.c $(pkg-config --libs --cflags bson$major-static) diff --git a/src/libbson/examples/compile-with-pkg-config.sh b/src/libbson/examples/compile-with-pkg-config.sh index 9e4ed5239cc..d7de0256e32 100755 --- a/src/libbson/examples/compile-with-pkg-config.sh +++ b/src/libbson/examples/compile-with-pkg-config.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -set -o errexit # Exit the script with error if any of the commands fail +set -o errexit # Exit the script with error if any of the commands fail # -- sphinx-include-start -- gcc -o hello_bson hello_bson.c $(pkg-config --libs --cflags bson$major) diff --git a/src/libbson/fuzz/make-dicts.py b/src/libbson/fuzz/make-dicts.py index bb64765af86..2d07a12f782 100644 --- a/src/libbson/fuzz/make-dicts.py +++ b/src/libbson/fuzz/make-dicts.py @@ -8,60 +8,60 @@ def generate(): simple_oid = OID((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)) ITEMS: list[LineItem] = [ - Comment("This file is GENERATED! DO NOT MODIFY!"), - Comment("Instead, modify the content of make-dicts.py"), + Comment('This file is GENERATED! DO NOT MODIFY!'), + Comment('Instead, modify the content of make-dicts.py'), Line(), - Comment("Random values"), - Entry("int32_1729", encode_value(1729)), - Entry("int64_1729", struct.pack(" 0;"))), - Entry("symbol_elem", element("Sym", Symbol("symbol"))), - Entry("oid_elem", element("OID", simple_oid)), - Entry("dbpointer_elem", element("dbp", DBPointer(String("db"), simple_oid))), + Entry('code_elem', element('Js', Code('() => 0;'))), + Entry('symbol_elem', element('Sym', Symbol('symbol'))), + Entry('oid_elem', element('OID', simple_oid)), + Entry('dbpointer_elem', element('dbp', DBPointer(String('db'), simple_oid))), Line(), - Comment("Embedded nul"), - Comment("This string contains an embedded null, which is abnormal but valid"), - Entry("string_with_null", element("S0", "string\0value")), - Comment("This regex has an embedded null, which is invalid"), - Entry("bad_regex_elem", element("RxB", Regex("f\0oo", "ig"))), + Comment('Embedded nul'), + Comment('This string contains an embedded null, which is abnormal but valid'), + Entry('string_with_null', element('S0', 'string\0value')), + Comment('This regex has an embedded null, which is invalid'), + Entry('bad_regex_elem', element('RxB', Regex('f\0oo', 'ig'))), Comment("This element's key contains an embedded null, which is invalid"), - Entry("bad_key_elem", element("foo\0bar", "string")), + Entry('bad_key_elem', element('foo\0bar', 'string')), Line(), - Comment("Objects"), - Entry("obj_with_string", wrap_obj(element("single-elem", "foo"))), - Entry("obj_with_null", wrap_obj(element("null", None))), - Entry("obj_missing_term", wrap_obj(b"")[:-1]), + Comment('Objects'), + Entry('obj_with_string', wrap_obj(element('single-elem', 'foo'))), + Entry('obj_with_null', wrap_obj(element('null', None))), + Entry('obj_missing_term', wrap_obj(b'')[:-1]), ] for it in ITEMS: emit(it) -BytesIter = bytes | Iterable["BytesIter"] +BytesIter = bytes | Iterable['BytesIter'] def flatten(b: BytesIter) -> bytes: if isinstance(b, bytes): return b else: - return b"".join(map(flatten, b)) + return b''.join(map(flatten, b)) def len_prefix(b: BytesIter) -> bytes: @@ -73,7 +73,7 @@ def len_prefix(b: BytesIter) -> bytes: def make_cstring(s: str) -> bytes: """Encode a UTF-8 string and append a null terminator""" - return s.encode("utf-8") + b"\0" + return s.encode('utf-8') + b'\0' def make_string(s: str) -> bytes: @@ -85,12 +85,12 @@ def wrap_obj(items: BytesIter) -> bytes: """Wrap a sequence of bytes as if a BSON object (adds a header and trailing nul)""" bs = flatten(items) header = len(bs) + 5 - return encode_value(header) + bs + b"\0" + return encode_value(header) + bs + b'\0' class UndefinedType: def __bytes__(self) -> bytes: - return b"" + return b'' class Binary(NamedTuple): @@ -187,26 +187,28 @@ def __bytes__(self) -> bytes: def encode_value(val: ValueType) -> bytes: match val: case int(n): - return struct.pack(" bytes: class Entry(NamedTuple): key: str - "The key for the entry. Only for human readability" + 'The key for the entry. Only for human readability' value: bytes - "The arbitrary bytes that make up the entry" + 'The arbitrary bytes that make up the entry' class Comment(NamedTuple): @@ -274,21 +276,21 @@ class Comment(NamedTuple): class Line(NamedTuple): - txt: str = "" + txt: str = '' LineItem = Entry | Comment | Line def escape(b: bytes) -> Iterable[str]: - s = b.decode("ascii", "backslashreplace") + s = b.decode('ascii', 'backslashreplace') for u8 in b: s = chr(u8) # 0 <= u8 and u8 <= 255 if s.isascii() and s.isprintable(): yield s continue # Byte is not valid ASCII, or is not a printable char - yield f"\\x{u8:0>2x}" + yield f'\\x{u8:0>2x}' def emit(item: LineItem): @@ -296,12 +298,12 @@ def emit(item: LineItem): case Line(t): print(t) case Comment(txt): - print(f"# {txt}") + print(f'# {txt}') case Entry(key, val): - s = "".join(escape(val)) - s = s.replace('"', r"\x22") + s = ''.join(escape(val)) + s = s.replace('"', r'\x22') print(f'{key}="{s}"') -if __name__ == "__main__": +if __name__ == '__main__': generate() diff --git a/src/libbson/tests/validate-tests.py b/src/libbson/tests/validate-tests.py index 6cac02d655d..b7fa3c5fba1 100644 --- a/src/libbson/tests/validate-tests.py +++ b/src/libbson/tests/validate-tests.py @@ -59,22 +59,22 @@ def flatten_bytes(data: _ByteIter) -> bytes: case bytes(data): return data case it: - return b"".join(map(flatten_bytes, it)) + return b''.join(map(flatten_bytes, it)) def i32le(i: int) -> bytes: """Encode an integer as a 32-bit little-endian integer""" - return struct.pack(" bytes: """Encode an integer as a 64-bit little-endian integer""" - return struct.pack(" bytes: """Encode a float as a 64-bit little-endian float""" - return struct.pack(" bytes: @@ -82,7 +82,7 @@ def doc(*data: _ByteIter) -> bytes: flat = flatten_bytes(data) # +5 for the null terminator and the header bytes hdr = i32le(len(flat) + 5) - return hdr + flat + b"\0" + return hdr + flat + b'\0' def code_with_scope(code: str, doc: _ByteIter) -> bytes: @@ -114,10 +114,10 @@ def cstring(s: str | _ByteIter) -> bytes: """Encode a string as UTF-8 and add a null terminator""" match s: case str(s): - return cstring(s.encode("utf-8")) + return cstring(s.encode('utf-8')) case bs: bs = flatten_bytes(bs) - return bs + b"\0" + return bs + b'\0' def string(s: str | _ByteIter) -> bytes: @@ -159,15 +159,15 @@ class TestCase: """The bytes that will be injested by `bson_init_static` to form the document to be validated""" description: str | None """A plaintext description of the test case and what it actually does. Rendered as a comment.""" - flags: str = "0" + flags: str = '0' """Spelling of the flags argument passed to the validation API""" - error: ErrorInfo = ErrorInfo("0", "", 0) + error: ErrorInfo = ErrorInfo('0', '', 0) """Expected error, if any""" @property def fn_name(self) -> str: """Get a C identifier function name for this test case""" - return "_test_case_" + re.sub(r"[^\w]", "_", self.name).lower() + return '_test_case_' + re.sub(r'[^\w]', '_', self.name).lower() def fmt_byte(n: int) -> str: @@ -177,16 +177,16 @@ def fmt_byte(n: int) -> str: """ match n: case 0: - return "0" - case a if re.match(r"[a-zA-Z0-9.$-]", chr(a)): + return '0' + case a if re.match(r'[a-zA-Z0-9.$-]', chr(a)): return f"'{chr(a)}'" case a if a < 10: return str(a) case n: - return f"0x{n:0>2x}" + return f'0x{n:0>2x}' -GENERATED_NOTE = "// ! This code is GENERATED! Do not edit it directly!" +GENERATED_NOTE = '// ! This code is GENERATED! Do not edit it directly!' HEADER = rf"""{GENERATED_NOTE} // clang-format off @@ -204,46 +204,46 @@ def generate(case: TestCase) -> Iterable[str]: Generate the lines of a test case function. """ # A comment header - yield f"{GENERATED_NOTE}\n" - yield f"// Case: {case.name}\n" + yield f'{GENERATED_NOTE}\n' + yield f'// Case: {case.name}\n' # The function head - yield f"static inline void {case.fn_name}(void) {{\n" + yield f'static inline void {case.fn_name}(void) {{\n' # If we have a description, emit that in a block comment if case.description: - yield " /**\n" + yield ' /**\n' lines = textwrap.dedent(case.description).strip().splitlines() - yield from (f" * {ln}\n" for ln in lines) - yield " */\n" + yield from (f' * {ln}\n' for ln in lines) + yield ' */\n' # Emit the byte array literal - yield " const uint8_t bytes[] = {\n" - yield "\n".join( + yield ' const uint8_t bytes[] = {\n' + yield '\n'.join( textwrap.wrap( - ", ".join(map(fmt_byte, case.data)), - subsequent_indent=" " * 4, - initial_indent=" " * 4, + ', '.join(map(fmt_byte, case.data)), + subsequent_indent=' ' * 4, + initial_indent=' ' * 4, width=80, ) ) - yield "\n };\n" + yield '\n };\n' yield from [ # Initialize a BSON doc that points to the byte array - " bson_t doc;\n", - " mlib_check(bson_init_static(&doc, bytes, sizeof bytes));\n", + ' bson_t doc;\n', + ' mlib_check(bson_init_static(&doc, bytes, sizeof bytes));\n', # The error object to be filled - " bson_error_t error = {0};\n", + ' bson_error_t error = {0};\n', # The error offset. Expected to be reset to zero on success. - " size_t offset = 999999;\n" + ' size_t offset = 999999;\n' # Do the actual validation: - f" const bool is_valid = bson_validate_with_error_and_offset(&doc, {case.flags}, &offset, &error);\n", + f' const bool is_valid = bson_validate_with_error_and_offset(&doc, {case.flags}, &offset, &error);\n', ] - is_error = case.error.code != "0" + is_error = case.error.code != '0' yield from [ - " mlib_check(!is_valid);\n" if is_error else " ASSERT_OR_PRINT(is_valid, error);\n", - f" mlib_check(error.code, eq, {case.error.code});\n", - f" mlib_check(error.message, str_eq, {json.dumps(case.error.message)});\n", - f" mlib_check(offset, eq, {case.error.offset});\n" if is_error else "", + ' mlib_check(!is_valid);\n' if is_error else ' ASSERT_OR_PRINT(is_valid, error);\n', + f' mlib_check(error.code, eq, {case.error.code});\n', + f' mlib_check(error.message, str_eq, {json.dumps(case.error.message)});\n', + f' mlib_check(offset, eq, {case.error.offset});\n' if is_error else '', ] - yield "}\n" + yield '}\n' def corruption_at(off: int) -> ErrorInfo: @@ -254,20 +254,20 @@ def corruption_at(off: int) -> ErrorInfo: Note that this won't match if the error message is something other than "corrupt BSON". """ - return ErrorInfo(BSON_VALIDATE_CORRUPT, "corrupt BSON", off) + return ErrorInfo(BSON_VALIDATE_CORRUPT, 'corrupt BSON', off) -BSON_VALIDATE_CORRUPT = "BSON_VALIDATE_CORRUPT" -BSON_VALIDATE_DOLLAR_KEYS = "BSON_VALIDATE_DOLLAR_KEYS" -BSON_VALIDATE_DOT_KEYS = "BSON_VALIDATE_DOT_KEYS" -BSON_VALIDATE_EMPTY_KEYS = "BSON_VALIDATE_EMPTY_KEYS" -BSON_VALIDATE_UTF8 = "BSON_VALIDATE_UTF8" -BSON_VALIDATE_UTF8_ALLOW_NULL = "BSON_VALIDATE_UTF8_ALLOW_NULL" -MSG_EXPECTED_ID_FOLLOWING_REF = "Expected an $id element following $ref" +BSON_VALIDATE_CORRUPT = 'BSON_VALIDATE_CORRUPT' +BSON_VALIDATE_DOLLAR_KEYS = 'BSON_VALIDATE_DOLLAR_KEYS' +BSON_VALIDATE_DOT_KEYS = 'BSON_VALIDATE_DOT_KEYS' +BSON_VALIDATE_EMPTY_KEYS = 'BSON_VALIDATE_EMPTY_KEYS' +BSON_VALIDATE_UTF8 = 'BSON_VALIDATE_UTF8' +BSON_VALIDATE_UTF8_ALLOW_NULL = 'BSON_VALIDATE_UTF8_ALLOW_NULL' +MSG_EXPECTED_ID_FOLLOWING_REF = 'Expected an $id element following $ref' def disallowed_key(char: str, k: str) -> str: - return f"Disallowed '{char}' in element key: \"{k}\"" + return f'Disallowed \'{char}\' in element key: "{k}"' # d888888b d88888b .d8888. d888888b .o88b. .d8b. .d8888. d88888b .d8888. @@ -279,28 +279,28 @@ def disallowed_key(char: str, k: str) -> str: CASES: list[TestCase] = [ TestCase( - "empty", + 'empty', doc(), """Test a simple empty document object.""", ), TestCase( - "bad-element", - doc(b"f"), - "The element content is not valid", + 'bad-element', + doc(b'f'), + 'The element content is not valid', error=corruption_at(6), ), TestCase( - "invalid-type", - doc(elem("foo", 0xE, b"foo")), + 'invalid-type', + doc(elem('foo', 0xE, b'foo')), """The type tag "0x0e" is not a valid type""", error=corruption_at(9), ), TestCase( - "key/invalid/accept", + 'key/invalid/accept', doc( - utf8elem("a", "b"), - utf8elem(b"foo\xffbar", "baz"), - utf8elem("c", "d"), + utf8elem('a', 'b'), + utf8elem(b'foo\xffbar', 'baz'), + utf8elem('c', 'd'), ), """ The element key contains an invalid UTF-8 byte, but we accept it @@ -308,49 +308,49 @@ def disallowed_key(char: str, k: str) -> str: """, ), TestCase( - "key/invalid/reject", + 'key/invalid/reject', doc( - utf8elem("a", "b"), - elem(b"foo\xffbar", Tag.UTF8, string("baz")), - utf8elem("c", "d"), + utf8elem('a', 'b'), + elem(b'foo\xffbar', Tag.UTF8, string('baz')), + utf8elem('c', 'd'), ), """ The element key is not valid UTF-8 and we reject it when we do UTF-8 validation. """, flags=BSON_VALIDATE_UTF8, - error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 13), + error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 13), ), TestCase( - "key/empty/accept", - doc(utf8elem("", "string")), + 'key/empty/accept', + doc(utf8elem('', 'string')), """ The element has an empty string key, and we accept this. """, ), TestCase( - "key/empty/reject", + 'key/empty/reject', doc( - utf8elem("a", "b"), - utf8elem("", "string"), + utf8elem('a', 'b'), + utf8elem('', 'string'), ), """ The element has an empty key, and we can reject it. """, flags=BSON_VALIDATE_EMPTY_KEYS, - error=ErrorInfo(BSON_VALIDATE_EMPTY_KEYS, "Element key cannot be an empty string", 13), + error=ErrorInfo(BSON_VALIDATE_EMPTY_KEYS, 'Element key cannot be an empty string', 13), ), TestCase( - "key/empty/accept-if-absent", - doc(utf8elem("foo", "bar")), + 'key/empty/accept-if-absent', + doc(utf8elem('foo', 'bar')), """ We are checking for empty keys, and accept if they are absent. """, flags=BSON_VALIDATE_EMPTY_KEYS, ), TestCase( - "key/dot/accept", - doc(utf8elem("foo.bar", "baz")), + 'key/dot/accept', + doc(utf8elem('foo.bar', 'baz')), """ The element key has an ASCII dot, and we accept this since we don't ask to validate it. @@ -358,42 +358,42 @@ def disallowed_key(char: str, k: str) -> str: flags=BSON_VALIDATE_EMPTY_KEYS, ), TestCase( - "key/dot/reject", - doc(utf8elem("a", "b"), utf8elem("foo.bar", "baz")), + 'key/dot/reject', + doc(utf8elem('a', 'b'), utf8elem('foo.bar', 'baz')), """ The element has an ASCII dot, and we reject it when we ask to validate it. """, flags=BSON_VALIDATE_DOT_KEYS, - error=ErrorInfo(BSON_VALIDATE_DOT_KEYS, disallowed_key(".", "foo.bar"), 13), + error=ErrorInfo(BSON_VALIDATE_DOT_KEYS, disallowed_key('.', 'foo.bar'), 13), ), TestCase( - "key/dot/accept-if-absent", - doc(utf8elem("foo", "bar")), + 'key/dot/accept-if-absent', + doc(utf8elem('foo', 'bar')), """ We are checking for keys with dot '.', and accept if they are absent. """, flags=BSON_VALIDATE_DOT_KEYS, ), TestCase( - "key/dollar/accept", - doc(utf8elem("a", "b"), utf8elem("$foo", "bar")), + 'key/dollar/accept', + doc(utf8elem('a', 'b'), utf8elem('$foo', 'bar')), """ We can accept an element key that starts with a dollar '$' sign. """, ), TestCase( - "key/dollar/reject", - doc(utf8elem("a", "b"), utf8elem("$foo", "bar")), + 'key/dollar/reject', + doc(utf8elem('a', 'b'), utf8elem('$foo', 'bar')), """ We can reject an element key that starts with a dollar '$' sign. """, flags=BSON_VALIDATE_DOLLAR_KEYS, - error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key("$", "$foo"), 13), + error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key('$', '$foo'), 13), ), TestCase( - "key/dollar/accept-in-middle", - doc(utf8elem("foo$bar", "baz")), + 'key/dollar/accept-in-middle', + doc(utf8elem('foo$bar', 'baz')), """ This contains a element key "foo$bar", but we don't reject this, as we only care about keys that *start* with dollars. @@ -401,8 +401,8 @@ def disallowed_key(char: str, k: str) -> str: flags=BSON_VALIDATE_DOLLAR_KEYS, ), TestCase( - "key/dollar/accept-if-absent", - doc(utf8elem("foo", "bar")), + 'key/dollar/accept-if-absent', + doc(utf8elem('foo', 'bar')), """ We are validating for dollar-keys, and we accept because this document doesn't contain any such keys. @@ -410,13 +410,13 @@ def disallowed_key(char: str, k: str) -> str: flags=BSON_VALIDATE_DOLLAR_KEYS, ), TestCase( - "utf8/simple", - doc(utf8elem("string", "some string")), - "Simple UTF-8 string element", + 'utf8/simple', + doc(utf8elem('string', 'some string')), + 'Simple UTF-8 string element', ), TestCase( - "utf8/missing-null", - doc(elem("a", Tag.UTF8, i32le(4), b"abcd")), + 'utf8/missing-null', + doc(elem('a', Tag.UTF8, i32le(4), b'abcd')), """ The UTF-8 element "a" contains 4 characters and declares its length of 4, but the fourth character is supposed to be a null terminator. In this case, @@ -425,87 +425,87 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(14), ), TestCase( - "utf8/length-zero", - doc(elem("", Tag.UTF8, i32le(0), b"\0")), - "UTF-8 string length must always be at least 1 for the null terminator", + 'utf8/length-zero', + doc(elem('', Tag.UTF8, i32le(0), b'\0')), + 'UTF-8 string length must always be at least 1 for the null terminator', error=corruption_at(6), ), TestCase( - "utf8/length-too-short", - doc(elem("", Tag.UTF8, i32le(3), b"bar\0")), - "UTF-8 string is three chars and a null terminator, but the declared length is 3 (should be 4)", + 'utf8/length-too-short', + doc(elem('', Tag.UTF8, i32le(3), b'bar\0')), + 'UTF-8 string is three chars and a null terminator, but the declared length is 3 (should be 4)', error=corruption_at(12), ), TestCase( - "utf8/header-too-large", - doc(elem("foo", Tag.UTF8, b"\xff\xff\xff\xffbar\0")), + 'utf8/header-too-large', + doc(elem('foo', Tag.UTF8, b'\xff\xff\xff\xffbar\0')), """ Data { "foo": "bar" } but the declared length of "bar" is way too large. """, error=corruption_at(9), ), TestCase( - "utf8/valid", - doc(elem("foo", Tag.UTF8, string("abcd"))), + 'utf8/valid', + doc(elem('foo', Tag.UTF8, string('abcd'))), """ Validate a valid UTF-8 string with UTF-8 validation enabled. """, flags=BSON_VALIDATE_UTF8, ), TestCase( - "utf8/invalid/accept", - doc(utf8elem("foo", b"abc\xffd")), + 'utf8/invalid/accept', + doc(utf8elem('foo', b'abc\xffd')), """ Validate an invalid UTF-8 string, but accept invalid UTF-8. """, ), TestCase( - "utf8/invalid/reject", - doc(utf8elem("foo", b"abc\xffd")), + 'utf8/invalid/reject', + doc(utf8elem('foo', b'abc\xffd')), """ Validate an invalid UTF-8 string, and expect rejection. """, flags=BSON_VALIDATE_UTF8, - error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4), + error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4), ), TestCase( - "utf8/valid-with-null/accept-1", - doc(utf8elem("foo", b"abc\x00123")), + 'utf8/valid-with-null/accept-1', + doc(utf8elem('foo', b'abc\x00123')), """ This is a valid UTF-8 string that contains a null character. We accept it because we don't do UTF-8 validation. """, ), TestCase( - "utf8/valid-with-null/accept-2", - doc(utf8elem("foo", b"abc\x00123")), + 'utf8/valid-with-null/accept-2', + doc(utf8elem('foo', b'abc\x00123')), """ This is a valid UTF-8 string that contains a null character. We allow it explicitly when we request UTF-8 validation. """, - flags=f"{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}", + flags=f'{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}', ), TestCase( - "utf8/valid-with-null/reject", - doc(utf8elem("foo", b"abc\x00123")), + 'utf8/valid-with-null/reject', + doc(utf8elem('foo', b'abc\x00123')), """ This is a valid UTF-8 string that contains a null character. We reject this because we don't pass BSON_VALIDATE_UTF8_ALLOW_NULL. """, flags=BSON_VALIDATE_UTF8, - error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, "UTF-8 string contains a U+0000 (null) character", 4), + error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, 'UTF-8 string contains a U+0000 (null) character', 4), ), TestCase( - "utf8/overlong-null/accept-1", - doc(utf8elem("foo", b"abc\xc0\x80123")), + 'utf8/overlong-null/accept-1', + doc(utf8elem('foo', b'abc\xc0\x80123')), """ This is an *invalid* UTF-8 string, and contains an overlong null. We should accept it because we aren't doing UTF-8 validation. """, ), TestCase( - "utf8/overlong-null/accept-2", - doc(utf8elem("foo", b"abc\xc0\x80123")), + 'utf8/overlong-null/accept-2', + doc(utf8elem('foo', b'abc\xc0\x80123')), """ ! NOTE: overlong-null: This test relies on our UTF-8 validation accepting the `c0 80` sequence @@ -517,11 +517,11 @@ def disallowed_key(char: str, k: str) -> str: If/when UTF-8 validation is changed to reject overlong null, then this test should change to expect rejection the invalid UTF-8. """, - flags=f"{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}", + flags=f'{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}', ), TestCase( - "utf8/overlong-null/reject", - doc(utf8elem("foo", b"abc\xc0\x80123")), + 'utf8/overlong-null/reject', + doc(utf8elem('foo', b'abc\xc0\x80123')), """ ! NOTE: overlong-null: This test relies on our UTF-8 validation accepting the `c0 80` sequence @@ -534,29 +534,29 @@ def disallowed_key(char: str, k: str) -> str: expected error code and error message for this test should change. """, flags=BSON_VALIDATE_UTF8, - error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, "UTF-8 string contains a U+0000 (null) character", 4), + error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, 'UTF-8 string contains a U+0000 (null) character', 4), ), TestCase( - "utf8-key/invalid/accept", - doc(utf8elem(b"abc\xffdef", "bar")), + 'utf8-key/invalid/accept', + doc(utf8elem(b'abc\xffdef', 'bar')), """ The element key is not valid UTf-8, but we accept it if we don't do UTF-8 validation. """, ), TestCase( - "utf8-key/invalid/reject", - doc(utf8elem(b"abc\xffdef", "bar")), + 'utf8-key/invalid/reject', + doc(utf8elem(b'abc\xffdef', 'bar')), """ The element key is not valid UTF-8, and we reject it when we requested UTF-8 validation. """, flags=BSON_VALIDATE_UTF8, - error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4), + error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4), ), TestCase( - "utf8-key/overlong-null/reject", - doc(utf8elem(b"abc\xc0\x80def", "bar")), + 'utf8-key/overlong-null/reject', + doc(utf8elem(b'abc\xc0\x80def', 'bar')), """ ! NOTE: overlong-null: This test relies on our UTF-8 validation accepting the `c0 80` sequence @@ -568,11 +568,11 @@ def disallowed_key(char: str, k: str) -> str: expected error code and error message for this test should change. """, flags=BSON_VALIDATE_UTF8, - error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, "UTF-8 string contains a U+0000 (null) character", 4), + error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, 'UTF-8 string contains a U+0000 (null) character', 4), ), TestCase( - "utf8-key/overlong-null/accept", - doc(utf8elem(b"abc\xc0\x80def", "bar")), + 'utf8-key/overlong-null/accept', + doc(utf8elem(b'abc\xc0\x80def', 'bar')), """ ! NOTE: overlong-null: This test relies on our UTF-8 validation accepting the `c0 80` sequence @@ -583,54 +583,54 @@ def disallowed_key(char: str, k: str) -> str: If/when UTF-8 validation is changed to reject overlong null, then this test case should instead reject the key string as invalid UTF-8. """, - flags=f"{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}", + flags=f'{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}', ), TestCase( - "array/empty", - doc(elem("array", Tag.Array, doc())), - "Simple empty array element", + 'array/empty', + doc(elem('array', Tag.Array, doc())), + 'Simple empty array element', ), TestCase( - "array/simple", + 'array/simple', doc( elem( - "array", + 'array', Tag.Array, doc( - elem("0", Tag.Int32, i32le(42)), - elem("1", Tag.Int32, i32le(1729)), - elem("2", Tag.Int32, i32le(-8)), + elem('0', Tag.Int32, i32le(42)), + elem('1', Tag.Int32, i32le(1729)), + elem('2', Tag.Int32, i32le(-8)), ), ) ), - "Simple array element of integers", + 'Simple array element of integers', ), TestCase( - "array/invalid-element", + 'array/invalid-element', doc( elem( - "array", + 'array', Tag.Array, doc( - elem("0", Tag.Int32, i32le(42)), - elem("1", Tag.Int32, i32le(1729)[-1:]), # Truncated - elem("2", Tag.Int32, i32le(-8)), + elem('0', Tag.Int32, i32le(42)), + elem('1', Tag.Int32, i32le(1729)[-1:]), # Truncated + elem('2', Tag.Int32, i32le(-8)), ), ) ), - "Simple array element of integers, but one element is truncated", + 'Simple array element of integers, but one element is truncated', error=corruption_at(34), ), TestCase( - "array/invalid-element-check-offset", + 'array/invalid-element-check-offset', doc( elem( - "array-shifted", + 'array-shifted', Tag.Array, doc( - elem("0", Tag.Int32, i32le(42)), - elem("1", Tag.Int32, i32le(1729)[-1:]), # Truncated - elem("2", Tag.Int32, i32le(-8)), + elem('0', Tag.Int32, i32le(42)), + elem('1', Tag.Int32, i32le(1729)[-1:]), # Truncated + elem('2', Tag.Int32, i32le(-8)), ), ) ), @@ -642,37 +642,37 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(42), ), TestCase( - "symbol/simple", - doc(elem("symbol", Tag.Symbol, string("void 0;"))), + 'symbol/simple', + doc(elem('symbol', Tag.Symbol, string('void 0;'))), """ A simple document: { symbol: Symbol("void 0;") } """, ), TestCase( - "symbol/invalid-utf8/accept", - doc(elem("symbol", Tag.Symbol, string(b"void\xff 0;"))), + 'symbol/invalid-utf8/accept', + doc(elem('symbol', Tag.Symbol, string(b'void\xff 0;'))), """ A simple symbol document, but the string contains invalid UTF-8 """, ), TestCase( - "symbol/invalid-utf8/reject", - doc(elem("symbol", Tag.Symbol, string(b"void\xff 0;"))), + 'symbol/invalid-utf8/reject', + doc(elem('symbol', Tag.Symbol, string(b'void\xff 0;'))), """ A simple symbol document, but the string contains invalid UTF-8 """, flags=BSON_VALIDATE_UTF8, - error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4), + error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4), ), TestCase( - "symbol/length-zero", - doc(b"\x0e\0" + i32le(0) + b"\0"), - "Symbol string length must always be at least 1 for the null terminator", + 'symbol/length-zero', + doc(b'\x0e\0' + i32le(0) + b'\0'), + 'Symbol string length must always be at least 1 for the null terminator', error=corruption_at(6), ), TestCase( - "symbol/length-too-short", - doc(b"\x0e\0" + i32le(3) + b"bar\0"), + 'symbol/length-too-short', + doc(b'\x0e\0' + i32le(3) + b'bar\0'), """ Symbol string is three chars and a null terminator, but the declared length is 3 (should be 4) @@ -680,55 +680,55 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(12), ), TestCase( - "code/simple", - doc(elem("code", Tag.Code, string("void 0;"))), + 'code/simple', + doc(elem('code', Tag.Code, string('void 0;'))), """ A simple document: { code: Code("void 0;") } """, ), TestCase( - "code/invalid-utf8/accept", - doc(elem("code", Tag.Code, string(b"void\xff 0;"))), + 'code/invalid-utf8/accept', + doc(elem('code', Tag.Code, string(b'void\xff 0;'))), """ A simple code document, but the string contains invalid UTF-8 """, ), TestCase( - "code/invalid-utf8/reject", - doc(elem("code", Tag.Code, string(b"void\xff 0;"))), + 'code/invalid-utf8/reject', + doc(elem('code', Tag.Code, string(b'void\xff 0;'))), """ A simple code document, but the string contains invalid UTF-8 """, flags=BSON_VALIDATE_UTF8, - error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4), + error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4), ), TestCase( - "code/length-zero", - doc(elem("code", Tag.Code, i32le(0), b"\0")), - "Code string length must always be at least 1 for the null terminator", + 'code/length-zero', + doc(elem('code', Tag.Code, i32le(0), b'\0')), + 'Code string length must always be at least 1 for the null terminator', error=corruption_at(10), ), TestCase( - "code/length-too-short", - doc(elem("code", Tag.Code, i32le(3), b"bar\0")), - "Code string is three chars and a null terminator, but the declared length is 3 (should be 4)", + 'code/length-too-short', + doc(elem('code', Tag.Code, i32le(3), b'bar\0')), + 'Code string is three chars and a null terminator, but the declared length is 3 (should be 4)', error=corruption_at(16), ), # Code w/ scope TestCase( - "code-with-scope/simple", - doc(elem("foo", Tag.CodeWithScope, code_with_scope("void 0;", doc()))), - "A simple valid code-with-scope element", + 'code-with-scope/simple', + doc(elem('foo', Tag.CodeWithScope, code_with_scope('void 0;', doc()))), + 'A simple valid code-with-scope element', ), TestCase( - "code-with-scope/invalid-code-length-zero", + 'code-with-scope/invalid-code-length-zero', doc( elem( - "", + '', Tag.CodeWithScope, i32le(10), - b"\0\0\0\0", # strlen - b"\0", # code + b'\0\0\0\0', # strlen + b'\0', # code doc(), # scope ) ), @@ -739,14 +739,14 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(6), ), TestCase( - "code-with-scope/invalid-code-length-too-large", + 'code-with-scope/invalid-code-length-too-large', doc( elem( - "", + '', Tag.CodeWithScope, i32le(10), - b"\xff\xff\xff\xff", # strlen (too big) - b"\0", + b'\xff\xff\xff\xff', # strlen (too big) + b'\0', doc(), # Scope ) ), @@ -756,21 +756,21 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(6), ), TestCase( - "code-with-scope/invalid-scope", - doc(elem("foo", Tag.CodeWithScope, code_with_scope("void 0;", doc()[:-1]))), - "A code-with-scope element, but the scope document is corrupted", + 'code-with-scope/invalid-scope', + doc(elem('foo', Tag.CodeWithScope, code_with_scope('void 0;', doc()[:-1]))), + 'A code-with-scope element, but the scope document is corrupted', error=corruption_at(13), ), TestCase( - "code-with-scope/empty-key-in-scope", + 'code-with-scope/empty-key-in-scope', doc( elem( - "code", + 'code', Tag.CodeWithScope, code_with_scope( - "void 0;", + 'void 0;', doc( - elem("obj", Tag.Document, doc(utf8elem("", "string"))), + elem('obj', Tag.Document, doc(utf8elem('', 'string'))), ), ), ) @@ -784,59 +784,59 @@ def disallowed_key(char: str, k: str) -> str: flags=BSON_VALIDATE_EMPTY_KEYS, ), TestCase( - "code-with-scope/corrupt-scope", + 'code-with-scope/corrupt-scope', doc( elem( - "code", + 'code', Tag.CodeWithScope, code_with_scope( - "void 0;", + 'void 0;', doc( elem( - "foo", + 'foo', Tag.UTF8, i32le(0), # Invalid string length - b"\0", + b'\0', ) ), ), ) ), - "A code-with-scope element, but the scope contains corruption", + 'A code-with-scope element, but the scope contains corruption', error=ErrorInfo(BSON_VALIDATE_CORRUPT, 'Error in scope document for element "code": corrupt BSON', offset=13), ), TestCase( - "code-with-scope/corrupt-scope-2", + 'code-with-scope/corrupt-scope-2', doc( elem( - "code", + 'code', Tag.CodeWithScope, code_with_scope( - "void 0;", + 'void 0;', doc( elem( - "foo", + 'foo', Tag.UTF8, - b"\xff\xff\xff\xff", # Invalid string length - b"\0", + b'\xff\xff\xff\xff', # Invalid string length + b'\0', ) ), ), ) ), - "A code-with-scope element, but the scope contains corruption", + 'A code-with-scope element, but the scope contains corruption', error=ErrorInfo(BSON_VALIDATE_CORRUPT, 'Error in scope document for element "code": corrupt BSON', offset=13), ), TestCase( - "regex/simple", - doc(elem("regex", Tag.Regex, b"1234\0gi\0")), + 'regex/simple', + doc(elem('regex', Tag.Regex, b'1234\0gi\0')), """ Simple document: { regex: Regex("1234", "gi") } """, ), TestCase( - "regex/invalid-opts", - doc(elem("regex", Tag.Regex, b"foo\0bar")), + 'regex/invalid-opts', + doc(elem('regex', Tag.Regex, b'foo\0bar')), """ A regular expression element with missing null terminator. The main option string "foo" has a null terminator, but the option component "bar" @@ -847,8 +847,8 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(18), ), TestCase( - "regex/double-null", - doc(elem("regex", Tag.Regex, b"foo\0bar\0\0")), + 'regex/double-null', + doc(elem('regex', Tag.Regex, b'foo\0bar\0\0')), """ A regular expression element with an extra null terminator. Since regex is delimited by its null terminator, the iterator will stop early before @@ -857,24 +857,24 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(21), ), TestCase( - "regex/invalid-utf8/accept", - doc(elem("regex", Tag.Regex, b"foo\xffbar\0gi\0")), + 'regex/invalid-utf8/accept', + doc(elem('regex', Tag.Regex, b'foo\xffbar\0gi\0')), """ A regular expression that contains invalid UTF-8. """, ), TestCase( - "regex/invalid-utf8/reject", - doc(elem("regex", Tag.Regex, b"foo\xffbar\0gi\0")), + 'regex/invalid-utf8/reject', + doc(elem('regex', Tag.Regex, b'foo\xffbar\0gi\0')), """ A regular expression that contains invalid UTF-8. """, flags=BSON_VALIDATE_UTF8, - error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4), + error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4), ), TestCase( - "regex/invalid-utf8/accept-if-absent", - doc(elem("regex", Tag.Regex, b"foo\0gi\0")), + 'regex/invalid-utf8/accept-if-absent', + doc(elem('regex', Tag.Regex, b'foo\0gi\0')), """ A regular valid UTf-8 regex. We check for invalid UTf-8, and accept becaues the regex is fine. @@ -882,14 +882,14 @@ def disallowed_key(char: str, k: str) -> str: flags=BSON_VALIDATE_UTF8, ), TestCase( - "dbpointer/string-length-zero", + 'dbpointer/string-length-zero', doc( elem( - "foo", + 'foo', Tag.DBPointer, i32le(0), # String length (invalid) - b"\0", # Empty string - b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99", # OID + b'\0', # Empty string + b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99', # OID ) ), """ @@ -899,14 +899,14 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(9), ), TestCase( - "dbpointer/string-length-too-big", + 'dbpointer/string-length-too-big', doc( elem( - "foo", + 'foo', Tag.DBPointer, - b"\xff\xff\xff\xff", # String length (invalid) - b"foobar\0", # Simple string - b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99", # OID + b'\xff\xff\xff\xff', # String length (invalid) + b'foobar\0', # Simple string + b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99', # OID ) ), """ @@ -916,17 +916,17 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(9), ), TestCase( - "dbpointer/truncated", + 'dbpointer/truncated', doc( - utf8elem("a", "b"), + utf8elem('a', 'b'), elem( - "foo", + 'foo', Tag.DBPointer, i32le(7), # 7 bytes, bleeding into the null terminator - b"foobar", # Simple string, missing a null terminator. - b"\x00\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99", # OID + b'foobar', # Simple string, missing a null terminator. + b'\x00\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99', # OID ), - utf8elem("a", "b"), + utf8elem('a', 'b'), ), """ Document { "foo": DBPointer("foobar", ) }, but the length header on @@ -936,14 +936,14 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(43), ), TestCase( - "dbpointer/missing-null", + 'dbpointer/missing-null', doc( elem( - "foo", + 'foo', Tag.DBPointer, i32le(4), - b"abcd", # Missing null terminator - b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99", # OID + b'abcd', # Missing null terminator + b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99', # OID ) ), """ @@ -953,13 +953,13 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(16), ), TestCase( - "dbpointer/invalid-utf8/accept", + 'dbpointer/invalid-utf8/accept', doc( elem( - "foo", + 'foo', Tag.DBPointer, - string(b"abc\xffdef"), # String with invalid UTF-8 - b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99", # OID + string(b'abc\xffdef'), # String with invalid UTF-8 + b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99', # OID ) ), """ @@ -967,29 +967,29 @@ def disallowed_key(char: str, k: str) -> str: """, ), TestCase( - "dbpointer/invalid-utf8/reject", + 'dbpointer/invalid-utf8/reject', doc( elem( - "foo", + 'foo', Tag.DBPointer, - string(b"abc\xffdef"), # String with invalid UTF-8 - b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99", # OID + string(b'abc\xffdef'), # String with invalid UTF-8 + b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99', # OID ) ), """ DBPointer document, but the collection string contains invalid UTF-8 """, flags=BSON_VALIDATE_UTF8, - error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4), + error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4), ), TestCase( - "dbpointer/invalid-utf8/accept-if-absent", + 'dbpointer/invalid-utf8/accept-if-absent', doc( elem( - "foo", + 'foo', Tag.DBPointer, - string(b"abcdef"), # Valid string - b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99", # OID + string(b'abcdef'), # Valid string + b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99', # OID ) ), """ @@ -999,15 +999,15 @@ def disallowed_key(char: str, k: str) -> str: flags=BSON_VALIDATE_UTF8, ), TestCase( - "subdoc/simple", - doc(elem("doc", Tag.Document, doc(utf8elem("foo", "bar")))), + 'subdoc/simple', + doc(elem('doc', Tag.Document, doc(utf8elem('foo', 'bar')))), """ A simple document: { doc: { foo: "bar" } } """, ), TestCase( - "subdoc/invalid-shared-null", - doc(elem("doc", Tag.Document, doc()[:-1])), + 'subdoc/invalid-shared-null', + doc(elem('doc', Tag.Document, doc()[:-1])), """ A truncated subdocument element, with its null terminator accidentally overlapping the parent document's null. @@ -1015,8 +1015,8 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(9), ), TestCase( - "subdoc/overlapping-utf8-null", - doc(elem("doc", Tag.Document, doc(utf8elem("bar", "baz\0")[:-1]))), + 'subdoc/overlapping-utf8-null', + doc(elem('doc', Tag.Document, doc(utf8elem('bar', 'baz\0')[:-1]))), """ Encodes the document: @@ -1028,18 +1028,18 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(18), ), TestCase( - "subdoc/invalid-element", - doc(elem("doc", Tag.Document, doc(elem("dbl", Tag.Double, b"abcd")))), - "A subdocument that contains an invalid element", + 'subdoc/invalid-element', + doc(elem('doc', Tag.Document, doc(elem('dbl', Tag.Double, b'abcd')))), + 'A subdocument that contains an invalid element', error=corruption_at(18), ), TestCase( - "subdoc/header-too-large", + 'subdoc/header-too-large', doc( elem( - "foo", + 'foo', Tag.Document, - b"\xf7\xff\xff\xff\0", # Bad document + b'\xf7\xff\xff\xff\0', # Bad document ), ), """ @@ -1048,12 +1048,12 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(9), ), TestCase( - "subdoc/header-too-small", + 'subdoc/header-too-small', doc( elem( - "test", + 'test', Tag.Document, - b"\x04\0\0\0", # Only four bytes. All docs must be at least 5 + b'\x04\0\0\0', # Only four bytes. All docs must be at least 5 ), ), """ @@ -1062,12 +1062,12 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(4), ), TestCase( - "subdoc/impossible-size", + 'subdoc/impossible-size', doc( elem( - "foo", + 'foo', Tag.Document, - b"\xff\xff\xff\xff\0", # Bad document + b'\xff\xff\xff\xff\0', # Bad document ), ), """ @@ -1077,36 +1077,36 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(9), ), TestCase( - "null/simple", - doc(elem("null", Tag.Null)), + 'null/simple', + doc(elem('null', Tag.Null)), """ A simple document: { "null": null } """, ), TestCase( - "undefined/simple", - doc(elem("undefined", Tag.Undefined)), + 'undefined/simple', + doc(elem('undefined', Tag.Undefined)), """ A simple document: { "undefined": undefined } """, ), TestCase( - "binary/simple", - doc(elem("binary", Tag.Binary, binary(0x80, b"12345"))), + 'binary/simple', + doc(elem('binary', Tag.Binary, binary(0x80, b'12345'))), """ Simple binary data { "binary": Binary(0x80, b'12345') } """, ), TestCase( - "binary/bad-length-zero-subtype-2", + 'binary/bad-length-zero-subtype-2', doc( elem( - "binary", + 'binary', Tag.Binary, i32le(0), # Invalid: Zero length - b"\x02", # subtype two + b'\x02', # subtype two i32le(4), # Length of 4 - b"1234", # payload + b'1234', # payload ), ), """ @@ -1116,15 +1116,15 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(12), ), TestCase( - "binary/bad-inner-length-on-subtype-2", + 'binary/bad-inner-length-on-subtype-2', doc( elem( - "binary", + 'binary', Tag.Binary, i32le(8), # Valid length - b"\x02", # subtype two + b'\x02', # subtype two i32le(2), # Invalid length of (should be 4) - b"1234", # payload + b'1234', # payload ), ), """ @@ -1134,14 +1134,14 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(17), ), TestCase( - "binary/bad-length-too-small", + 'binary/bad-length-too-small', doc( elem( - "binary", + 'binary', Tag.Binary, i32le(2), # Length prefix (too small) - b"\x80", # subtype - b"1234", # payload + b'\x80', # subtype + b'1234', # payload ), ), """ @@ -1155,14 +1155,14 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(22), ), TestCase( - "binary/bad-length-too-big", + 'binary/bad-length-too-big', doc( elem( - "binary", + 'binary', Tag.Binary, - b"\xf3\xff\xff\xff", # Length prefix (too big) - b"\x80", # subtype - b"1234", # data + b'\xf3\xff\xff\xff', # Length prefix (too big) + b'\x80', # subtype + b'1234', # data ), ), """ @@ -1172,15 +1172,15 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(12), ), TestCase( - "binary/old-invalid/1", + 'binary/old-invalid/1', doc( elem( - "binary", + 'binary', Tag.Binary, binary( 2, i32le(5), # Bad length prefix: Should be 4 - b"abcd", + b'abcd', ), ), ), @@ -1191,14 +1191,14 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(17), ), TestCase( - "binary/old-invalid/2", + 'binary/old-invalid/2', doc( elem( - "bin", + 'bin', Tag.Binary, binary( 2, - b"abc", # Bad: Subtype 2 requires at least four bytes + b'abc', # Bad: Subtype 2 requires at least four bytes ), ) ), @@ -1209,95 +1209,95 @@ def disallowed_key(char: str, k: str) -> str: error=corruption_at(9), ), TestCase( - "minkey/simple", - doc(elem("min", Tag.MinKey)), - "A simple document with a MinKey element", + 'minkey/simple', + doc(elem('min', Tag.MinKey)), + 'A simple document with a MinKey element', ), TestCase( - "maxkey/simple", - doc(elem("max", Tag.MaxKey)), - "A simple document with a MaxKey element", + 'maxkey/simple', + doc(elem('max', Tag.MaxKey)), + 'A simple document with a MaxKey element', ), TestCase( - "int32/simple", - doc(elem("int32", Tag.Int32, i32le(42))), - "A simple document with a valid single int32 element", + 'int32/simple', + doc(elem('int32', Tag.Int32, i32le(42))), + 'A simple document with a valid single int32 element', ), TestCase( - "int32/truncated", - doc(elem("int32-truncated", Tag.Int32, i32le(42)[:-1])), - "Truncated 32-bit integer", + 'int32/truncated', + doc(elem('int32-truncated', Tag.Int32, i32le(42)[:-1])), + 'Truncated 32-bit integer', error=corruption_at(21), ), - TestCase("timestamp/simple", doc(elem("timestamp", Tag.Timestamp, i64le(1729))), """A simple timestamp element"""), + TestCase('timestamp/simple', doc(elem('timestamp', Tag.Timestamp, i64le(1729))), """A simple timestamp element"""), TestCase( - "timestamp/truncated", - doc(elem("timestamp", Tag.Timestamp, i64le(1729)[:-1])), + 'timestamp/truncated', + doc(elem('timestamp', Tag.Timestamp, i64le(1729)[:-1])), """A truncated timestamp element""", error=corruption_at(15), ), TestCase( - "int64/simple", - doc(elem("int64", Tag.Int64, i64le(1729))), - "A simple document with a valid single int64 element", + 'int64/simple', + doc(elem('int64', Tag.Int64, i64le(1729))), + 'A simple document with a valid single int64 element', ), TestCase( - "int64/truncated", - doc(elem("int64-truncated", Tag.Int64, i64le(1729)[:-1])), - "Truncated 64-bit integer", + 'int64/truncated', + doc(elem('int64-truncated', Tag.Int64, i64le(1729)[:-1])), + 'Truncated 64-bit integer', error=corruption_at(21), ), TestCase( - "double/simple", - doc(elem("double", Tag.Double, f64le(3.14))), - "Simple float64 element", + 'double/simple', + doc(elem('double', Tag.Double, f64le(3.14))), + 'Simple float64 element', ), TestCase( - "double/truncated", - doc(elem("double-truncated", Tag.Double, f64le(3.13)[:-1])), - "Truncated 64-bit float", + 'double/truncated', + doc(elem('double-truncated', Tag.Double, f64le(3.13)[:-1])), + 'Truncated 64-bit float', error=corruption_at(22), ), TestCase( - "boolean/simple-false", - doc(elem("bool", Tag.Boolean, b"\x00")), + 'boolean/simple-false', + doc(elem('bool', Tag.Boolean, b'\x00')), """A simple boolean 'false'""", ), TestCase( - "boolean/simple-true", - doc(elem("bool", Tag.Boolean, b"\x01")), + 'boolean/simple-true', + doc(elem('bool', Tag.Boolean, b'\x01')), """A simple boolean 'true'""", ), TestCase( - "boolean/invalid", - doc(elem("bool", Tag.Boolean, b"\xc3")), + 'boolean/invalid', + doc(elem('bool', Tag.Boolean, b'\xc3')), """ An invalid boolean octet. Must be '0' or '1', but is 0xc3. """, error=corruption_at(10), ), TestCase( - "datetime/simple", - doc(elem("utc", Tag.Datetime, b"\x0b\x98\x8c\x2b\x33\x01\x00\x00")), - "Simple datetime element", + 'datetime/simple', + doc(elem('utc', Tag.Datetime, b'\x0b\x98\x8c\x2b\x33\x01\x00\x00')), + 'Simple datetime element', ), TestCase( - "datetime/truncated", - doc(elem("utc", Tag.Datetime, b"\x0b\x98\x8c\x2b\x33\x01\x00")), - "Truncated datetime element", + 'datetime/truncated', + doc(elem('utc', Tag.Datetime, b'\x0b\x98\x8c\x2b\x33\x01\x00')), + 'Truncated datetime element', error=corruption_at(9), ), # DBRef TestCase( - "dbref/missing-id", - doc(utf8elem("$ref", "foo")), + 'dbref/missing-id', + doc(utf8elem('$ref', 'foo')), """This dbref document is missing an $id element""", flags=BSON_VALIDATE_DOLLAR_KEYS, error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, MSG_EXPECTED_ID_FOLLOWING_REF, 18), ), TestCase( - "dbref/non-id", - doc(utf8elem("$ref", "foo"), utf8elem("bar", "baz")), + 'dbref/non-id', + doc(utf8elem('$ref', 'foo'), utf8elem('bar', 'baz')), """ The 'bar' element should be an '$id' element. """, @@ -1305,17 +1305,17 @@ def disallowed_key(char: str, k: str) -> str: error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, MSG_EXPECTED_ID_FOLLOWING_REF, 18), ), TestCase( - "dbref/not-first-elements", - doc(utf8elem("foo", "bar"), utf8elem("$ref", "a"), utf8elem("$id", "b")), + 'dbref/not-first-elements', + doc(utf8elem('foo', 'bar'), utf8elem('$ref', 'a'), utf8elem('$id', 'b')), """ This would be a valid DBRef, but the "$ref" key must come first. """, flags=BSON_VALIDATE_DOLLAR_KEYS, - error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key("$", "$ref"), 17), + error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key('$', '$ref'), 17), ), TestCase( - "dbref/ref-without-id-with-db", - doc(utf8elem("$ref", "foo"), utf8elem("$db", "bar")), + 'dbref/ref-without-id-with-db', + doc(utf8elem('$ref', 'foo'), utf8elem('$db', 'bar')), """ There should be an $id element, but we skip straight to $db """, @@ -1323,48 +1323,48 @@ def disallowed_key(char: str, k: str) -> str: error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, MSG_EXPECTED_ID_FOLLOWING_REF, 18), ), TestCase( - "dbref/non-string-ref", - doc(elem("$ref", Tag.Int32, i32le(42))), + 'dbref/non-string-ref', + doc(elem('$ref', Tag.Int32, i32le(42))), """ The $ref element must be a string, but is an integer. """, flags=BSON_VALIDATE_DOLLAR_KEYS, - error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, "$ref element must be a UTF-8 element", 4), + error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, '$ref element must be a UTF-8 element', 4), ), TestCase( - "dbref/non-string-db", + 'dbref/non-string-db', doc( - utf8elem("$ref", "foo"), - utf8elem("$id", "bar"), - elem("$db", Tag.Int32, i32le(42)), + utf8elem('$ref', 'foo'), + utf8elem('$id', 'bar'), + elem('$db', Tag.Int32, i32le(42)), ), """ The $db element should be a string, but is an integer. """, flags=BSON_VALIDATE_DOLLAR_KEYS, - error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, "$db element in DBRef must be a UTF-8 element", 31), + error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, '$db element in DBRef must be a UTF-8 element', 31), ), TestCase( - "dbref/invalid-extras-between", + 'dbref/invalid-extras-between', doc( - utf8elem("$ref", "foo"), - utf8elem("$id", "bar"), - utf8elem("extra", "field"), - utf8elem("$db", "baz"), + utf8elem('$ref', 'foo'), + utf8elem('$id', 'bar'), + utf8elem('extra', 'field'), + utf8elem('$db', 'baz'), ), """ Almost a valid DBRef, but there is an extra field before $db. We reject $db as an invalid key. """, flags=BSON_VALIDATE_DOLLAR_KEYS, - error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key("$", "$db"), 48), + error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key('$', '$db'), 48), ), TestCase( - "dbref/invalid-double-ref", + 'dbref/invalid-double-ref', doc( - utf8elem("$ref", "foo"), - utf8elem("$ref", "bar"), - utf8elem("$id", "baz"), + utf8elem('$ref', 'foo'), + utf8elem('$ref', 'bar'), + utf8elem('$id', 'baz'), ), """ Invalid DBRef contains a second $ref element. @@ -1373,25 +1373,25 @@ def disallowed_key(char: str, k: str) -> str: error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, MSG_EXPECTED_ID_FOLLOWING_REF, 18), ), TestCase( - "dbref/invalid-missing-ref", - doc(utf8elem("$id", "foo")), + 'dbref/invalid-missing-ref', + doc(utf8elem('$id', 'foo')), """ DBRef document requires a $ref key to be first. """, flags=BSON_VALIDATE_DOLLAR_KEYS, - error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key("$", "$id"), 4), + error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key('$', '$id'), 4), ), TestCase( - "dbref/valid/simple", - doc(utf8elem("$ref", "foo"), utf8elem("$id", "bar")), + 'dbref/valid/simple', + doc(utf8elem('$ref', 'foo'), utf8elem('$id', 'bar')), """ This is a simple valid DBRef element. """, flags=BSON_VALIDATE_DOLLAR_KEYS, ), TestCase( - "dbref/valid/simple-with-db", - doc(utf8elem("$ref", "foo"), utf8elem("$id", "bar"), utf8elem("$db", "baz")), + 'dbref/valid/simple-with-db', + doc(utf8elem('$ref', 'foo'), utf8elem('$id', 'bar'), utf8elem('$db', 'baz')), """ A simple DBRef of the form: @@ -1400,19 +1400,19 @@ def disallowed_key(char: str, k: str) -> str: flags=BSON_VALIDATE_DOLLAR_KEYS, ), TestCase( - "dbref/valid/nested-id-doc", + 'dbref/valid/nested-id-doc', doc( - utf8elem("$ref", "foo"), + utf8elem('$ref', 'foo'), elem( - "$id", + '$id', Tag.Document, doc( - utf8elem("$ref", "foo2"), - utf8elem("$id", "bar2"), - utf8elem("$db", "baz2"), + utf8elem('$ref', 'foo2'), + utf8elem('$id', 'bar2'), + utf8elem('$db', 'baz2'), ), ), - utf8elem("$db", "baz"), + utf8elem('$db', 'baz'), ), """ This is a valid DBRef of the form: @@ -1422,12 +1422,12 @@ def disallowed_key(char: str, k: str) -> str: flags=BSON_VALIDATE_DOLLAR_KEYS, ), TestCase( - "dbref/valid/trailing-content", + 'dbref/valid/trailing-content', doc( - utf8elem("$ref", "foo"), - utf8elem("$id", "bar"), - utf8elem("$db", "baz"), - utf8elem("extra", "field"), + utf8elem('$ref', 'foo'), + utf8elem('$id', 'bar'), + utf8elem('$db', 'baz'), + utf8elem('extra', 'field'), ), """ A valid DBRef of the form: @@ -1442,11 +1442,11 @@ def disallowed_key(char: str, k: str) -> str: flags=BSON_VALIDATE_DOLLAR_KEYS, ), TestCase( - "dbref/valid/trailing-content-no-db", + 'dbref/valid/trailing-content-no-db', doc( - utf8elem("$ref", "foo"), - utf8elem("$id", "bar"), - utf8elem("extra", "field"), + utf8elem('$ref', 'foo'), + utf8elem('$id', 'bar'), + utf8elem('extra', 'field'), ), """ A valid DBRef of the form: @@ -1461,7 +1461,7 @@ def disallowed_key(char: str, k: str) -> str: ), ] -if __name__ == "__main__": +if __name__ == '__main__': # We don't take an arguments, but error if any are given parser = argparse.ArgumentParser(description=__doc__) parser.parse_args() @@ -1471,11 +1471,11 @@ def disallowed_key(char: str, k: str) -> str: for c in CASES: print() for part in generate(c): - print(part, end="") + print(part, end='') # Print the registration function - print(f"\n{GENERATED_NOTE}") - print("void test_install_generated_bson_validation(TestSuite* suite) {") + print(f'\n{GENERATED_NOTE}') + print('void test_install_generated_bson_validation(TestSuite* suite) {') for c in CASES: print(f' TestSuite_Add(suite, "/bson/validate/" {json.dumps(c.name)}, {c.fn_name});') - print("}") + print('}') diff --git a/src/libmongoc/doc/cmakerefdomain.py b/src/libmongoc/doc/cmakerefdomain.py index ef9bf2ba4eb..151b503bc1a 100644 --- a/src/libmongoc/doc/cmakerefdomain.py +++ b/src/libmongoc/doc/cmakerefdomain.py @@ -8,38 +8,40 @@ replaced by a more full-featured extension. """ + from typing import Any, List + from sphinx.application import Sphinx -from sphinx.roles import XRefRole from sphinx.domains import Domain, ObjType +from sphinx.roles import XRefRole kinds = [ - "command", - "cpack_gen", - "envvar", - "generator", - "genex", - "guide", - "variable", - "module", - "policy", - "prop_cache", - "prop_dir", - "prop_gbl", - "prop_inst", - "prop_sf", - "prop_test", - "prop_tgt", - "manual", + 'command', + 'cpack_gen', + 'envvar', + 'generator', + 'genex', + 'guide', + 'variable', + 'module', + 'policy', + 'prop_cache', + 'prop_dir', + 'prop_gbl', + 'prop_inst', + 'prop_sf', + 'prop_test', + 'prop_tgt', + 'manual', ] class CMakeRefDomain(Domain): - name = "cmake" - label = "CMake (Minimal)" + name = 'cmake' + label = 'CMake (Minimal)' object_types = {k: ObjType(k, k) for k in kinds} roles = {k: XRefRole() for k in kinds} - roles["command"] = XRefRole(fix_parens=True) + roles['command'] = XRefRole(fix_parens=True) directives = {} initial_data: Any = {} @@ -51,6 +53,6 @@ def merge_domaindata(self, docnames: List[str], otherdata: Any) -> None: def setup(app: Sphinx): app.add_domain(CMakeRefDomain) return { - "parallel_read_safe": True, - "parallel_write_safe": True, + 'parallel_read_safe': True, + 'parallel_write_safe': True, } diff --git a/src/libmongoc/doc/conf.py b/src/libmongoc/doc/conf.py index 072fa2d7308..7ac74de70d0 100644 --- a/src/libmongoc/doc/conf.py +++ b/src/libmongoc/doc/conf.py @@ -12,7 +12,7 @@ # Try importing from older Sphinx version path. from sphinx.builders.html import DirectoryHTMLBuilder -from docutils.parsers.rst import directives, Directive +from docutils.parsers.rst import Directive, directives from sphinx.application import Sphinx from sphinx.application import logger as sphinx_log from sphinx.config import Config @@ -23,6 +23,7 @@ # sphinx-design is not required for building man pages. # python-sphinx-design is not currently available on EPEL. The package for EPEL includes man pages. from sphinx_design.dropdown import DropdownDirective + has_sphinx_design = True except ImportError: pass @@ -30,40 +31,40 @@ # Ensure we can import "mongoc" extension module. this_path = os.path.dirname(__file__) sys.path.append(this_path) -sys.path.append(os.path.normpath(os.path.join(this_path, "../../../build/sphinx"))) +sys.path.append(os.path.normpath(os.path.join(this_path, '../../../build/sphinx'))) from mongoc_common import * extensions = [ - "mongoc", - "sphinx.ext.intersphinx", + 'mongoc', + 'sphinx.ext.intersphinx', # NOTE: We use our own "minimal" CMake domain that lets us refer to external # objects from the CMake inventory, but provides no other features. The # build *could* otherwise use sphinxcontrib-moderncmakedomain, which is # more full-featured, but it is not (currently) available in repositories for # package building. # "sphinxcontrib.moderncmakedomain", - "cmakerefdomain", - "sphinx.ext.mathjax", + 'cmakerefdomain', + 'sphinx.ext.mathjax', ] if has_sphinx_design: - extensions.append("sphinx_design") + extensions.append('sphinx_design') # General information about the project. -project = "libmongoc" -copyright = "2009-present, MongoDB, Inc." -author = "MongoDB, Inc" +project = 'libmongoc' +copyright = '2009-present, MongoDB, Inc.' +author = 'MongoDB, Inc' -version_path = os.path.join(os.path.dirname(__file__), "../../..", "VERSION_CURRENT") +version_path = os.path.join(os.path.dirname(__file__), '../../..', 'VERSION_CURRENT') version = open(version_path).read().strip() # The extension requires the "base" to contain '%s' exactly once, but we never intend to use it though -language = "en" -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] -master_doc = "index" -html_static_path = ["static"] +language = 'en' +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +master_doc = 'index' +html_static_path = ['static'] # Set an empty list of disabled reftypes. # Sphinx 5.0 disables "std:doc" by default. @@ -73,13 +74,13 @@ intersphinx_timeout = 30 intersphinx_mapping = { - "sphinx": ("https://www.sphinx-doc.org/en/master", "includes/sphinx.inv"), - "python": ("https://docs.python.org/3", "includes/python.inv"), - "bson": ("https://www.mongoc.org/libbson/current", "includes/libbson.inv"), - "cmake": ("https://cmake.org/cmake/help/latest", "includes/cmake.inv"), + 'sphinx': ('https://www.sphinx-doc.org/en/master', 'includes/sphinx.inv'), + 'python': ('https://docs.python.org/3', 'includes/python.inv'), + 'bson': ('https://www.mongoc.org/libbson/current', 'includes/libbson.inv'), + 'cmake': ('https://cmake.org/cmake/help/latest', 'includes/cmake.inv'), } -_UPDATE_KEY = "update_external_inventories" +_UPDATE_KEY = 'update_external_inventories' def _maybe_update_inventories(app: Sphinx): @@ -91,30 +92,30 @@ def _maybe_update_inventories(app: Sphinx): This function will only have an effect if the appropriate command-line config value is defined. """ - prefix = "[libmongoc/doc/conf.py]" + prefix = '[libmongoc/doc/conf.py]' config = app.config if not config[_UPDATE_KEY]: sphinx_log.info( - "%s Using existing intersphinx inventories. Refresh by running with ‘-D %s=1’", + '%s Using existing intersphinx inventories. Refresh by running with ‘-D %s=1’', prefix, _UPDATE_KEY, ) return for name, tup in intersphinx_mapping.items(): urlbase, filename = tup - url = f"{urlbase}/objects.inv" - sphinx_log.info("%s Downloading external inventory for %s from [%s]", prefix, name, url) + url = f'{urlbase}/objects.inv' + sphinx_log.info('%s Downloading external inventory for %s from [%s]', prefix, name, url) with urllib.request.urlopen(url) as req: req: http.client.HTTPResponse = req dest = Path(app.srcdir) / filename - sphinx_log.info("%s Saving inventory [%s] to file [%s]", prefix, url, dest) - with dest.open("wb") as out: + sphinx_log.info('%s Saving inventory [%s] to file [%s]', prefix, url, dest) + with dest.open('wb') as out: buf = req.read(1024 * 4) while buf: out.write(buf) buf = req.read(1024 * 4) sphinx_log.info( - "%s Inventory file [%s] was updated. Commit the result to save it for subsequent builds.", + '%s Inventory file [%s] was updated. Commit the result to save it for subsequent builds.', prefix, dest, ) @@ -122,8 +123,8 @@ def _maybe_update_inventories(app: Sphinx): # -- Options for HTML output ---------------------------------------------- -html_theme = "furo" -html_title = html_shorttitle = "libmongoc %s" % version +html_theme = 'furo' +html_title = html_shorttitle = 'libmongoc %s' % version # html_favicon = None html_use_index = True @@ -232,50 +233,60 @@ def _maybe_update_inventories(app: Sphinx): def add_canonical_link(app: Sphinx, pagename: str, templatename: str, context: Dict[str, Any], doctree: Any): link = f'' - context["metatags"] = context.get("metatags", "") + link + context['metatags'] = context.get('metatags', '') + link if has_sphinx_design: + class AdDropdown(DropdownDirective): """A sphinx-design dropdown that can also be an admonition.""" - option_spec = DropdownDirective.option_spec | {"admonition": directives.unchanged_required} + option_spec = DropdownDirective.option_spec | {'admonition': directives.unchanged_required} def run(self): - adm = self.options.get("admonition") + adm = self.options.get('admonition') if adm is not None: - self.options.setdefault("class-container", []).extend(("admonition", adm)) - self.options.setdefault("class-title", []).append(f"admonition-title") + self.options.setdefault('class-container', []).extend(('admonition', adm)) + self.options.setdefault('class-title', []).append(f'admonition-title') return super().run() else: + class EmptyDirective(Directive): has_content = True + def run(self): return [] - + + has_add_css_file = True - -def check_html_builder_requirements (app): + + +def check_html_builder_requirements(app): if isinstance(app.builder, DirectoryHTMLBuilder): if not has_sphinx_design: - raise RuntimeError("The sphinx-design package is required to build HTML documentation but was not detected. Install sphinx-design.") + raise RuntimeError( + 'The sphinx-design package is required to build HTML documentation but was not detected. Install sphinx-design.' + ) if not has_add_css_file: - raise RuntimeError("A newer version of Sphinx is required to build HTML documentation with CSS files. Upgrade Sphinx to v3.5.0 or newer") + raise RuntimeError( + 'A newer version of Sphinx is required to build HTML documentation with CSS files. Upgrade Sphinx to v3.5.0 or newer' + ) + def setup(app: Sphinx): mongoc_common_setup(app) - app.connect("builder-inited", check_html_builder_requirements) + app.connect('builder-inited', check_html_builder_requirements) if has_sphinx_design: - app.add_directive("ad-dropdown", AdDropdown) + app.add_directive('ad-dropdown', AdDropdown) else: - app.add_directive("ad-dropdown", EmptyDirective) - app.add_directive("tab-set", EmptyDirective) - app.connect("html-page-context", add_canonical_link) - if hasattr(app, "add_css_file"): - app.add_css_file("styles.css") + app.add_directive('ad-dropdown', EmptyDirective) + app.add_directive('tab-set', EmptyDirective) + app.connect('html-page-context', add_canonical_link) + if hasattr(app, 'add_css_file'): + app.add_css_file('styles.css') else: global has_add_css_file has_add_css_file = False - - app.connect("builder-inited", _maybe_update_inventories) + + app.connect('builder-inited', _maybe_update_inventories) app.add_config_value(_UPDATE_KEY, default=False, rebuild=True, types=[bool]) diff --git a/src/libmongoc/examples/parse_handshake_cfg.py b/src/libmongoc/examples/parse_handshake_cfg.py index cb55dd44e0c..77bf5123211 100644 --- a/src/libmongoc/examples/parse_handshake_cfg.py +++ b/src/libmongoc/examples/parse_handshake_cfg.py @@ -3,61 +3,63 @@ # `MD_FLAGS` maps the flag to its bit position. # The bit positions must match those defined in src/mongoc/mongoc-handshake-private.h MD_FLAGS = { - "MONGOC_MD_FLAG_ENABLE_CRYPTO": 0, - "MONGOC_MD_FLAG_ENABLE_CRYPTO_CNG": 1, - "MONGOC_MD_FLAG_ENABLE_CRYPTO_COMMON_CRYPTO": 2, - "MONGOC_MD_FLAG_ENABLE_CRYPTO_LIBCRYPTO": 3, - "MONGOC_MD_FLAG_ENABLE_CRYPTO_SYSTEM_PROFILE": 4, - "MONGOC_MD_FLAG_ENABLE_SASL": 5, - "MONGOC_MD_FLAG_ENABLE_SSL": 6, - "MONGOC_MD_FLAG_ENABLE_SSL_OPENSSL": 7, - "MONGOC_MD_FLAG_ENABLE_SSL_SECURE_CHANNEL": 8, - "MONGOC_MD_FLAG_ENABLE_SSL_SECURE_TRANSPORT": 9, - "MONGOC_MD_FLAG_EXPERIMENTAL_FEATURES": 10, - "MONGOC_MD_FLAG_HAVE_SASL_CLIENT_DONE": 11, - "MONGOC_MD_FLAG_HAVE_WEAK_SYMBOLS": 12, - "MONGOC_MD_FLAG_NO_AUTOMATIC_GLOBALS": 13, # Removed in CDRIVER-1330. - "MONGOC_MD_FLAG_ENABLE_SSL_LIBRESSL": 14, # Removed in CDRIVER-5693. - "MONGOC_MD_FLAG_ENABLE_SASL_CYRUS": 15, - "MONGOC_MD_FLAG_ENABLE_SASL_SSPI": 16, - "MONGOC_MD_FLAG_HAVE_SOCKLEN": 17, - "MONGOC_MD_FLAG_ENABLE_COMPRESSION": 18, - "MONGOC_MD_FLAG_ENABLE_COMPRESSION_SNAPPY": 19, - "MONGOC_MD_FLAG_ENABLE_COMPRESSION_ZLIB": 20, - "MONGOC_MD_FLAG_ENABLE_SASL_GSSAPI": 21, - "MONGOC_MD_FLAG_ENABLE_RES_NSEARCH": 22, - "MONGOC_MD_FLAG_ENABLE_RES_NDESTROY": 23, - "MONGOC_MD_FLAG_ENABLE_RES_NCLOSE": 24, - "MONGOC_MD_FLAG_ENABLE_RES_SEARCH": 25, - "MONGOC_MD_FLAG_ENABLE_DNSAPI": 26, - "MONGOC_MD_FLAG_ENABLE_RDTSCP": 27, - "MONGOC_MD_FLAG_HAVE_SCHED_GETCPU": 28, - "MONGOC_MD_FLAG_ENABLE_SHM_COUNTERS": 29, - "MONGOC_MD_FLAG_TRACE": 30, + 'MONGOC_MD_FLAG_ENABLE_CRYPTO': 0, + 'MONGOC_MD_FLAG_ENABLE_CRYPTO_CNG': 1, + 'MONGOC_MD_FLAG_ENABLE_CRYPTO_COMMON_CRYPTO': 2, + 'MONGOC_MD_FLAG_ENABLE_CRYPTO_LIBCRYPTO': 3, + 'MONGOC_MD_FLAG_ENABLE_CRYPTO_SYSTEM_PROFILE': 4, + 'MONGOC_MD_FLAG_ENABLE_SASL': 5, + 'MONGOC_MD_FLAG_ENABLE_SSL': 6, + 'MONGOC_MD_FLAG_ENABLE_SSL_OPENSSL': 7, + 'MONGOC_MD_FLAG_ENABLE_SSL_SECURE_CHANNEL': 8, + 'MONGOC_MD_FLAG_ENABLE_SSL_SECURE_TRANSPORT': 9, + 'MONGOC_MD_FLAG_EXPERIMENTAL_FEATURES': 10, + 'MONGOC_MD_FLAG_HAVE_SASL_CLIENT_DONE': 11, + 'MONGOC_MD_FLAG_HAVE_WEAK_SYMBOLS': 12, + 'MONGOC_MD_FLAG_NO_AUTOMATIC_GLOBALS': 13, # Removed in CDRIVER-1330. + 'MONGOC_MD_FLAG_ENABLE_SSL_LIBRESSL': 14, # Removed in CDRIVER-5693. + 'MONGOC_MD_FLAG_ENABLE_SASL_CYRUS': 15, + 'MONGOC_MD_FLAG_ENABLE_SASL_SSPI': 16, + 'MONGOC_MD_FLAG_HAVE_SOCKLEN': 17, + 'MONGOC_MD_FLAG_ENABLE_COMPRESSION': 18, + 'MONGOC_MD_FLAG_ENABLE_COMPRESSION_SNAPPY': 19, + 'MONGOC_MD_FLAG_ENABLE_COMPRESSION_ZLIB': 20, + 'MONGOC_MD_FLAG_ENABLE_SASL_GSSAPI': 21, + 'MONGOC_MD_FLAG_ENABLE_RES_NSEARCH': 22, + 'MONGOC_MD_FLAG_ENABLE_RES_NDESTROY': 23, + 'MONGOC_MD_FLAG_ENABLE_RES_NCLOSE': 24, + 'MONGOC_MD_FLAG_ENABLE_RES_SEARCH': 25, + 'MONGOC_MD_FLAG_ENABLE_DNSAPI': 26, + 'MONGOC_MD_FLAG_ENABLE_RDTSCP': 27, + 'MONGOC_MD_FLAG_HAVE_SCHED_GETCPU': 28, + 'MONGOC_MD_FLAG_ENABLE_SHM_COUNTERS': 29, + 'MONGOC_MD_FLAG_TRACE': 30, # `MONGOC_MD_FLAG_ENABLE_ICU` was accidentally removed in libmongoc 1.25.0-1.25.3. # If parsing a config-bitfield produced by libmongoc 1.25.0-1.25.3, use the version of `parse_handshake_cfg.py` from the git tag 1.25.0. - "MONGOC_MD_FLAG_ENABLE_ICU": 31, - "MONGOC_MD_FLAG_ENABLE_CLIENT_SIDE_ENCRYPTION": 32, - "MONGOC_MD_FLAG_ENABLE_MONGODB_AWS_AUTH": 33, - "MONGOC_MD_FLAG_ENABLE_SRV": 34, - "MONGOC_MD_FLAG_HAVE_BCRYPT_PBKDF2": 35, + 'MONGOC_MD_FLAG_ENABLE_ICU': 31, + 'MONGOC_MD_FLAG_ENABLE_CLIENT_SIDE_ENCRYPTION': 32, + 'MONGOC_MD_FLAG_ENABLE_MONGODB_AWS_AUTH': 33, + 'MONGOC_MD_FLAG_ENABLE_SRV': 34, + 'MONGOC_MD_FLAG_HAVE_BCRYPT_PBKDF2': 35, } + def main(): - flag_to_number = {s: 2 ** i for s,i in MD_FLAGS.items()} + flag_to_number = {s: 2**i for s, i in MD_FLAGS.items()} if len(sys.argv) < 2: - print ("Usage: python {0} config-bitfield".format(sys.argv[0])) - print ("Example: python parse_handshake_cfg.py 0x3e65") + print('Usage: python {0} config-bitfield'.format(sys.argv[0])) + print('Example: python parse_handshake_cfg.py 0x3e65') return config_bitfield_string = sys.argv[1] config_bitfield_num = int(config_bitfield_string, 0) - print ("Decimal value: {}".format(config_bitfield_num)) + print('Decimal value: {}'.format(config_bitfield_num)) for flag, num in flag_to_number.items(): - v = "true" if config_bitfield_num & num else "false" - print ("{:<50}: {}".format(flag, v)) + v = 'true' if config_bitfield_num & num else 'false' + print('{:<50}: {}'.format(flag, v)) + -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/base.sh b/tools/base.sh index d3fd8a18a44..5b0625b4d73 100755 --- a/tools/base.sh +++ b/tools/base.sh @@ -21,50 +21,49 @@ # * exists # • Return zero if names a file, directory, or either, respectively. - set -o errexit set -o pipefail set -o nounset is-set() { - [[ -n ${!1+x} ]] + [[ -n ${!1+x} ]] } log() { - echo "${@}" 1>&2 - return 0 + echo "${@}" 1>&2 + return 0 } debug() { - if [[ "${PRINT_DEBUG_LOGS:-0}" != "0" ]]; then - log "${@}" - fi + if [[ "${PRINT_DEBUG_LOGS:-0}" != "0" ]]; then + log "${@}" + fi } fail() { - log "${@}" - return 1 + log "${@}" + return 1 } run-chdir() { - [[ "$#" -gt 1 ]] || fail "run-chdir expects at least two arguments" - local _dir="$1" - shift - pushd "$_dir" > /dev/null - debug "Run in directory [$_dir]:" "$@" - "$@" - local _rc=$? - popd > /dev/null - return $_rc + [[ "$#" -gt 1 ]] || fail "run-chdir expects at least two arguments" + local _dir="$1" + shift + pushd "$_dir" >/dev/null + debug "Run in directory [$_dir]:" "$@" + "$@" + local _rc=$? + popd >/dev/null + return $_rc } -is-file() { [[ -f "$1" ]];} -is-dir() { [[ -d "$1" ]];} -exists() { [[ -e "$1" ]];} +is-file() { [[ -f "$1" ]]; } +is-dir() { [[ -d "$1" ]]; } +exists() { [[ -e "$1" ]]; } have-command() { - [[ "$#" -eq 1 ]] || fail "have-command expects a single argument" - type "$1" > /dev/null 2>&1 + [[ "$#" -eq 1 ]] || fail "have-command expects a single argument" + type "$1" >/dev/null 2>&1 } # Inhibit msys path conversion diff --git a/tools/download.sh b/tools/download.sh index 64d5e13cdd8..9467bf9789f 100755 --- a/tools/download.sh +++ b/tools/download.sh @@ -12,77 +12,77 @@ . "$(dirname "${BASH_SOURCE[0]}")/use.sh" base download-file() { - declare uri - declare out - unset no_tls_verify - while [[ "$#" != 0 ]]; do - case "$1" in - --uri) - shift - uri=$1 - ;; - --uri=*) - uri="${1#--uri=*}" - ;; - --out) - shift - out=$1 - ;; - --out=*) - out=${1#--out=*} - ;; - --no-tls-verify) - # shellcheck disable=2034 - no_tls_verify=1 - ;; - *) - fail "Unknown argument: $1" - esac - shift - done - if ! is-set uri || ! is-set out; then - fail "download-file requires --uri= and --out= arguments" - return - fi - debug "Download [$uri] to [$out]" + declare uri + declare out + unset no_tls_verify + while [[ "$#" != 0 ]]; do + case "$1" in + --uri) + shift + uri=$1 + ;; + --uri=*) + uri="${1#--uri=*}" + ;; + --out) + shift + out=$1 + ;; + --out=*) + out=${1#--out=*} + ;; + --no-tls-verify) + # shellcheck disable=2034 + no_tls_verify=1 + ;; + *) + fail "Unknown argument: $1" + ;; + esac + shift + done + if ! is-set uri || ! is-set out; then + fail "download-file requires --uri= and --out= arguments" + return + fi + debug "Download [$uri] to [$out]" - if have-command curl; then - curl_argv=( - --silent - --show-error - --fail - --retry 5 - --max-time 120 - --location # (Follow redirects) - --output "$out" - ) - if is-set no_tls_verify; then - curl_argv+=(--insecure) - fi - curl_argv+=(-- "$uri") - debug "Execute curl command: [curl ${curl_argv[*]}]" - output=$(curl "${curl_argv[@]}") || fail "$output" || return - debug "$output" - elif have-command wget; then - wget_argv=( - --output-document="$out" - --tries=5 - --timeout=120 - ) - if is-set no_tls_verify; then - wget_argv+=(--no-check-certificate) - fi - wget_argv+=(-- "$uri") - debug "Execute wget command: [wget ${wget_argv[*]}]" - output=$(wget "${wget_argv[@]}" 2>&1) || fail "wget failed: $output" || return - debug "$output" - else - fail "This script requires either curl or wget to be available" || return + if have-command curl; then + curl_argv=( + --silent + --show-error + --fail + --retry 5 + --max-time 120 + --location # (Follow redirects) + --output "$out" + ) + if is-set no_tls_verify; then + curl_argv+=(--insecure) + fi + curl_argv+=(-- "$uri") + debug "Execute curl command: [curl ${curl_argv[*]}]" + output=$(curl "${curl_argv[@]}") || fail "$output" || return + debug "$output" + elif have-command wget; then + wget_argv=( + --output-document="$out" + --tries=5 + --timeout=120 + ) + if is-set no_tls_verify; then + wget_argv+=(--no-check-certificate) fi - debug "Download [$uri] to [$out] - Done" + wget_argv+=(-- "$uri") + debug "Execute wget command: [wget ${wget_argv[*]}]" + output=$(wget "${wget_argv[@]}" 2>&1) || fail "wget failed: $output" || return + debug "$output" + else + fail "This script requires either curl or wget to be available" || return + fi + debug "Download [$uri] to [$out] - Done" } - if is-main; then - download-file "$@" + download-file "$@" fi diff --git a/tools/earthly.sh b/tools/earthly.sh index 14389c40e1b..34bf8eed373 100755 --- a/tools/earthly.sh +++ b/tools/earthly.sh @@ -8,16 +8,16 @@ set -euo pipefail # Calc the arch of the executable we want case "$ARCHNAME" in - x64) - arch=amd64 - ;; - arm64) - arch=arm64 - ;; - *) - echo "Unsupported architecture for automatic Earthly download: $HOSTTYPE" 1>&1 - exit 99 - ;; +x64) + arch=amd64 + ;; +arm64) + arch=arm64 + ;; +*) + echo "Unsupported architecture for automatic Earthly download: $HOSTTYPE" 1>&1 + exit 99 + ;; esac # The location where the Earthly executable will live @@ -29,16 +29,16 @@ EARTHLY_EXE="$cache_dir/$exe_filename" # Download if it isn't already present if ! is-file "$EARTHLY_EXE"; then - echo "Downloading $exe_filename $EARTHLY_VERSION" - url="https://github.com/earthly/earthly/releases/download/v$EARTHLY_VERSION/$exe_filename" - curl --retry 5 -LsS --max-time 120 --fail "$url" --output "$EARTHLY_EXE" - chmod a+x "$EARTHLY_EXE" + echo "Downloading $exe_filename $EARTHLY_VERSION" + url="https://github.com/earthly/earthly/releases/download/v$EARTHLY_VERSION/$exe_filename" + curl --retry 5 -LsS --max-time 120 --fail "$url" --output "$EARTHLY_EXE" + chmod a+x "$EARTHLY_EXE" fi run-earthly() { - "$EARTHLY_EXE" "$@" + "$EARTHLY_EXE" "$@" } if is-main; then - run-earthly "$@" + run-earthly "$@" fi diff --git a/tools/format.py b/tools/format.py index 767270f34ca..4ffdea9cb22 100644 --- a/tools/format.py +++ b/tools/format.py @@ -32,30 +32,30 @@ def main(argv: Sequence[str]) -> int: # By default, add two jobs to the CPU count since some work is waiting on disk dflt_jobs = multiprocessing.cpu_count() + 2 parser.add_argument( - "--jobs", - "-j", + '--jobs', + '-j', type=int, - help=f"Number of parallel jobs to run (default: {dflt_jobs})", - metavar="", + help=f'Number of parallel jobs to run (default: {dflt_jobs})', + metavar='', default=dflt_jobs, ) parser.add_argument( - "--mode", + '--mode', choices=RunMode.__args__, - help="Whether to apply changes, or simply check for formatting violations (default: apply)", - default="apply", + help='Whether to apply changes, or simply check for formatting violations (default: apply)', + default='apply', ) parser.add_argument( - "--clang-format-bin", - help="The clang-format executable to be used (default: “clang-format”)", - default="clang-format", - metavar="", + '--clang-format-bin', + help='The clang-format executable to be used (default: “clang-format”)', + default='clang-format', + metavar='', ) parser.add_argument( - "files", - metavar="", - nargs="*", - help="List of files to be selected for formatting. If omitted, the default set of files are selected", + 'files', + metavar='', + nargs='*', + help='List of files to be selected for formatting. If omitted, the default set of files are selected', ) args = parser.parse_args(argv) mode: RunMode = args.mode @@ -70,13 +70,13 @@ def main(argv: Sequence[str]) -> int: case patterns: files = [Path(p).resolve() for p in patterns] except Exception as e: - raise RuntimeError("Failed to collect files for formatting (See above)") from e + raise RuntimeError('Failed to collect files for formatting (See above)') from e # Fail if no files matched assert files # Split the file list into groups to be dispatched num_jobs: int = min(args.jobs, len(files)) groups = [files[n::num_jobs] for n in range(num_jobs)] - print(f"Formatting {len(files)} files with {num_jobs} workers...", file=sys.stderr) + print(f'Formatting {len(files)} files with {num_jobs} workers...', file=sys.stderr) # Bind the formatting arguments to the formatter function format_group = functools.partial(_format_files, mode=mode, clang_format=cf) @@ -86,14 +86,14 @@ def main(argv: Sequence[str]) -> int: try: okay = all(pool.map(format_group, groups)) except Exception as e: - raise RuntimeError("Unexpected error while formatting files (See above)") from e + raise RuntimeError('Unexpected error while formatting files (See above)') from e if not okay: return 1 return 0 -RunMode = Literal["apply", "check"] -"Whether we should apply changes, or just check for violations" +RunMode = Literal['apply', 'check'] +'Whether we should apply changes, or just check for violations' #: This regex tells us which #include directives should be modified to use angle brackets #: The regex is written to preserve whitespace and surrounding context. re.VERBOSE @@ -130,27 +130,27 @@ def main(argv: Sequence[str]) -> int: """ SOURCE_PATTERNS = [ - "**/*.h", - "**/*.hpp", - "**/*.c", - "**/*.cpp", + '**/*.h', + '**/*.hpp', + '**/*.c', + '**/*.cpp', ] """ Recursive source file patterns, based on file extensions. """ SOURCE_DIRS = [ - "src/common", - "src/libbson", - "src/libmongoc", - "tests", + 'src/common', + 'src/libbson', + 'src/libmongoc', + 'tests', ] """ Directories that contain our own source files (not vendored code) """ EXCLUDE_SOURCES = [ - "src/libbson/src/jsonsl/**/*", + 'src/libbson/src/jsonsl/**/*', ] """ Globbing patterns that select files that are contained in our source directories, @@ -181,14 +181,14 @@ def _include_subst_fn(fpath: Path): def f(mat: re.Match[str]) -> str: # See groups in INCLUDE_RE - target = mat["path"] + target = mat['path'] abs_target = parent_dir / target if abs_target.is_file(): # This should be a relative include: newl = f'{mat["directive"]}"./{target}"{mat["tail"]}' else: - newl = f"{mat['directive']}<{target}>{mat['tail']}" - print(f" - {fpath}: update #include directive: {mat[0]!r} → {newl!r}", file=sys.stderr) + newl = f'{mat["directive"]}<{target}>{mat["tail"]}' + print(f' - {fpath}: update #include directive: {mat[0]!r} → {newl!r}', file=sys.stderr) return newl return f @@ -199,7 +199,7 @@ def _fixup_includes(fpath: Path, *, mode: RunMode) -> bool: Apply #include-fixup to the content of the given source file. """ # Split into lines - old_lines = fpath.read_text().split("\n") + old_lines = fpath.read_text().split('\n') # Do a regex substitution on ever line: rx = re.compile(INCLUDE_RE, re.VERBOSE) new_lines = [rx.sub(_include_subst_fn(fpath), ln) for ln in old_lines] @@ -210,14 +210,14 @@ def _fixup_includes(fpath: Path, *, mode: RunMode) -> bool: case False, _: # No file changes. Nothing to do return True - case _, "apply": + case _, 'apply': # We are applying changes. Write the lines back into the file and tell # the caller that we succeeded - fpath.write_text("\n".join(new_lines), newline="\n") + fpath.write_text('\n'.join(new_lines), newline='\n') return True - case _, "check": + case _, 'check': # File changes, and we are only checking. Print an error message and indicate failure to the caller - print(f"File [{fpath}] contains improper #include directives", file=sys.stderr) + print(f'File [{fpath}] contains improper #include directives', file=sys.stderr) return False @@ -230,7 +230,7 @@ def fixup_one(p: Path) -> bool: try: return _fixup_includes(p, mode=mode) except Exception as e: - raise RuntimeError(f"Unexpected error while fixing-up the #includes on file [{p}] (See above)") from e + raise RuntimeError(f'Unexpected error while fixing-up the #includes on file [{p}] (See above)') from e # First update the `#include` directives, since that can change the sort order # that clang-format might want to apply @@ -239,18 +239,18 @@ def fixup_one(p: Path) -> bool: # Whether we check for format violations or modify the files in-place match mode: - case "apply": - mode_args = ["-i"] - case "check": - mode_args = ["--dry-run", "-Werror"] + case 'apply': + mode_args = ['-i'] + case 'check': + mode_args = ['--dry-run', '-Werror'] cmd = [clang_format, *mode_args, *map(str, files)] try: res = subprocess.run(cmd, check=False, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) except Exception as e: - raise RuntimeError(f"Failed to spawn [{clang_format}] process for formatting files (See above)") from e + raise RuntimeError(f'Failed to spawn [{clang_format}] process for formatting files (See above)') from e sys.stderr.buffer.write(res.stdout) return res.returncode == 0 -if __name__ == "__main__": +if __name__ == '__main__': sys.exit(main(sys.argv[1:])) diff --git a/tools/paths.sh b/tools/paths.sh index 5e55532671e..a9a22f8cd2a 100755 --- a/tools/paths.sh +++ b/tools/paths.sh @@ -28,89 +28,93 @@ # Check for Cygpath, used by various commands. Better to check once than check every time. _HAVE_CYGPATH=false if have-command cygpath; then - _HAVE_CYGPATH=true + _HAVE_CYGPATH=true fi # Usage: native-path native-path() { - [[ "$#" -eq 1 ]] || fail "native_path expects exactly one argument" - local arg=$1 - if $IS_WINDOWS; then - $_HAVE_CYGPATH || fail "No 'cygpath' command is available, but we require it to normalize file paths on Windows." - local ret - ret="$(cygpath -w "$arg")" - debug "Convert path [$arg] → [$ret]" - printf %s "$ret" - else - printf %s "$arg" - fi + [[ "$#" -eq 1 ]] || fail "native_path expects exactly one argument" + local arg=$1 + if $IS_WINDOWS; then + $_HAVE_CYGPATH || fail "No 'cygpath' command is available, but we require it to normalize file paths on Windows." + local ret + ret="$(cygpath -w "$arg")" + debug "Convert path [$arg] → [$ret]" + printf %s "$ret" + else + printf %s "$arg" + fi } # Usage: to_absolute to_absolute() { - [[ "$#" -eq 1 ]] || fail "to_absolute expects a single argument" - local ret - local arg="$1" - debug "Resolve path [$arg]" - - # Cygpath can resolve the path in a single subprocess: - if $_HAVE_CYGPATH; then - # Ask Cygpath to resolve the path. It knows how to do it reliably and quickly: - ret=$(cygpath --absolute --mixed --long-name -- "$arg") - debug "Cygpath resolved: [$arg]" - printf %s "$ret" - return 0 - fi - - # If the given directory exists, we can ask the shell to resolve the path - # by going there and asking the PWD: - if is-dir "$arg"; then - ret=$(run-chdir "$arg" pwd) - debug "Resolved: [$arg]" - printf %s "$ret" - return 0 - fi - - # Do it the "slow" way: - - # The parent path: - local _parent - _parent="$(dirname "$arg")" - # The filename part: - local _fname - _fname="$(basename "$arg")" - # There are four cases to consider from dirname: - if [[ $_parent = "." ]]; then # The parent is '.' as in './foo' - # Replace the leading '.' with the working directory - _parent="$(pwd)" - elif [[ $_parent = ".." ]]; then # The parent is '..' as in '../foo' - # Replace a leading '..' with the parent of the working directory - _parent="$(dirname "$(pwd)")" - elif [[ $arg == "$_parent" ]]; then # The parent is itself, as in '/' - # A root directory is its own parent according to 'dirname' - printf %s "$arg" - return 0 - else # The parent is some other path, like 'foo' in 'foo/bar' - # Resolve the parent path - _parent="$(set +x; DEBUG=0 to_absolute "$_parent")" - fi - # At this point $_parent is an absolute path - if [[ $_fname = ".." ]]; then - # Strip one component - ret="$(dirname "$_parent")" - elif [[ $_fname = "." ]]; then - # Drop a '.' at the end of a path - ret="$_parent" - else - # Join the result - ret="$_parent/$_fname" - fi - # Remove duplicate dir separators - while [[ $ret =~ "//" ]]; do - ret="${ret//\/\///}" - done - debug "Resolved path: [$arg] → [$ret]" + [[ "$#" -eq 1 ]] || fail "to_absolute expects a single argument" + local ret + local arg="$1" + debug "Resolve path [$arg]" + + # Cygpath can resolve the path in a single subprocess: + if $_HAVE_CYGPATH; then + # Ask Cygpath to resolve the path. It knows how to do it reliably and quickly: + ret=$(cygpath --absolute --mixed --long-name -- "$arg") + debug "Cygpath resolved: [$arg]" + printf %s "$ret" + return 0 + fi + + # If the given directory exists, we can ask the shell to resolve the path + # by going there and asking the PWD: + if is-dir "$arg"; then + ret=$(run-chdir "$arg" pwd) + debug "Resolved: [$arg]" printf %s "$ret" + return 0 + fi + + # Do it the "slow" way: + + # The parent path: + local _parent + _parent="$(dirname "$arg")" + # The filename part: + local _fname + _fname="$(basename "$arg")" + # There are four cases to consider from dirname: + if [[ $_parent = "." ]]; then # The parent is '.' as in './foo' + # Replace the leading '.' with the working directory + _parent="$(pwd)" + elif [[ $_parent = ".." ]]; then # The parent is '..' as in '../foo' + # Replace a leading '..' with the parent of the working directory + _parent="$(dirname "$(pwd)")" + elif [[ $arg == "$_parent" ]]; then # The parent is itself, as in '/' + # A root directory is its own parent according to 'dirname' + printf %s "$arg" + return 0 + else + # The parent is some other path, like 'foo' in 'foo/bar' + # Resolve the parent path + _parent="$( + set +x + DEBUG=0 to_absolute "$_parent" + )" + fi + # At this point $_parent is an absolute path + if [[ $_fname = ".." ]]; then + # Strip one component + ret="$(dirname "$_parent")" + elif [[ $_fname = "." ]]; then + # Drop a '.' at the end of a path + ret="$_parent" + else + # Join the result + ret="$_parent/$_fname" + fi + # Remove duplicate dir separators + while [[ $ret =~ "//" ]]; do + ret="${ret//\/\///}" + done + debug "Resolved path: [$arg] → [$ret]" + printf %s "$ret" } # Get the TOOLS_DIR as a native absolute path. All other path vars are derived @@ -125,21 +129,21 @@ declare -r MONGOC_DIR=$MONGOC_DIR EXE_SUFFIX="" if $IS_WINDOWS; then - EXE_SUFFIX=".exe" + EXE_SUFFIX=".exe" fi declare -r EXE_SUFFIX=$EXE_SUFFIX if [[ "${USER_CACHES_DIR:=${XDG_CACHE_HOME:-}}" = "" ]]; then - if $IS_DARWIN; then - USER_CACHES_DIR=$HOME/Library/Caches - elif $IS_UNIX_LIKE; then - USER_CACHES_DIR=$HOME/.cache - elif $IS_WINDOWS; then - USER_CACHES_DIR=${LOCALAPPDATA:-$USERPROFILE/.cache} - else - log "Using ~/.cache as fallback user caching directory" - USER_CACHES_DIR="$(to_absolute ~/.cache)" - fi + if $IS_DARWIN; then + USER_CACHES_DIR=$HOME/Library/Caches + elif $IS_UNIX_LIKE; then + USER_CACHES_DIR=$HOME/.cache + elif $IS_WINDOWS; then + USER_CACHES_DIR=${LOCALAPPDATA:-$USERPROFILE/.cache} + else + log "Using ~/.cache as fallback user caching directory" + USER_CACHES_DIR="$(to_absolute ~/.cache)" + fi fi # Ensure we are dealing with a complete path @@ -150,12 +154,12 @@ declare -r USER_CACHES_DIR=$USER_CACHES_DIR : "${BUILD_CACHE_DIR:="$USER_CACHES_DIR/mongoc/build.$BUILD_CACHE_BUST"}" if is-main; then - # Just print the paths that we detected - log "Paths:" - log " • USER_CACHES_DIR=[$USER_CACHES_DIR]" - log " • BUILD_CACHE_DIR=[$BUILD_CACHE_DIR]" - log " • BUILD_CACHE_BUST=[$BUILD_CACHE_BUST]" - log " • EXE_SUFFIX=[$EXE_SUFFIX]" - log " • TOOLS_DIR=[$TOOLS_DIR]" - log " • MONGOC_DIR=[$MONGOC_DIR]" + # Just print the paths that we detected + log "Paths:" + log " • USER_CACHES_DIR=[$USER_CACHES_DIR]" + log " • BUILD_CACHE_DIR=[$BUILD_CACHE_DIR]" + log " • BUILD_CACHE_BUST=[$BUILD_CACHE_BUST]" + log " • EXE_SUFFIX=[$EXE_SUFFIX]" + log " • TOOLS_DIR=[$TOOLS_DIR]" + log " • MONGOC_DIR=[$MONGOC_DIR]" fi diff --git a/tools/platform.sh b/tools/platform.sh index e25a779646c..566d856def9 100755 --- a/tools/platform.sh +++ b/tools/platform.sh @@ -21,31 +21,31 @@ _is_wsl=false _is_bsd=false _os_family=unknown case "$OSTYPE" in - linux-*) - if have-command cmd.exe; then - _is_wsl=true - _is_unix_like=true - _os_family=windows - else - _is_linux=true - _is_unix_like=true - _os_family=linux - fi - ;; - darwin*) - _is_darwin=true - _is_unix_like=true - _os_family=darwin - ;; - FreeBSD|openbsd*|netbsd) - _is_bsd=true - _is_unix_like=true - _os_family=bsd - ;; - msys*|cygwin*) - _is_windows=true - _os_family=windows - ;; +linux-*) + if have-command cmd.exe; then + _is_wsl=true + _is_unix_like=true + _os_family=windows + else + _is_linux=true + _is_unix_like=true + _os_family=linux + fi + ;; +darwin*) + _is_darwin=true + _is_unix_like=true + _os_family=darwin + ;; +FreeBSD | openbsd* | netbsd) + _is_bsd=true + _is_unix_like=true + _os_family=bsd + ;; +msys* | cygwin*) + _is_windows=true + _os_family=windows + ;; esac declare -r IS_DARWIN=$_is_darwin @@ -59,33 +59,33 @@ declare -r OS_FAMILY=$_os_family _is_redhat_based=false _is_debian_based=false if $IS_LINUX; then - if is-file /etc/redhat-release; then - _is_redhat_based=true - _dist_version=$(sed 's|.*release \([^ ]\+\).*|\1|' < /etc/redhat-release) - elif is-file /etc/debian_version; then - _is_debian_based=true - _dist_version=$(grep VERSION_ID /etc/os-release | sed 's|VERSION_ID="\(.*\)"|\1|') - elif is-file /etc/alpine-release; then - _is_alpine=true - _dist_version=$(cat /etc/alpine-release) - fi - _dist_version=${_dist_version:-0} - _major_version=${_dist_version/.*/} - declare -r DIST_VERSION=$_dist_version - declare -r DIST_MAJOR_VERSION=$_major_version + if is-file /etc/redhat-release; then + _is_redhat_based=true + _dist_version=$(sed 's|.*release \([^ ]\+\).*|\1|' &2 - _err=0 - # Detect self-import: - if printf '%s\0' "${BASH_SOURCE[@]}" | grep -qFxZ -- "$file"; then - echo "File '$file' imports itself transitively" 1>&2 - _err=1 - fi - # Detect non-existing imports: - if ! [[ -f $file ]]; then - echo "No script '$file' exists to import." 1>&2 - _err=1 - fi - # Print the stacktrace of imports upon error: - if [[ $_err -eq 1 ]]; then - printf " • [%s] loaded by:\n" "${BASH_SOURCE[@]}" 1>&2 - echo " • (user)" 1>&2 - echo "Bailing out" 1>&2 - return 1 - fi - # shellcheck disable=1090 - . "$file" - # Recover item from the stack, since we may have recursed: - item="${_USE_IMPORTING[${#_USE_IMPORTING[@]}-1]}" - # Pop the top stack item: - unset "_USE_IMPORTING[${#_USE_IMPORTING[@]}-1]" - # Declare that the item has been imported, for future reference: - declare "_IMPORTED_$item=1" - ! [[ ${PRINT_DEBUG_LOGS:-} = 1 ]] || echo "Import: [$item] - done" 1>&2 + # Don't double-import items: + _varname="_IMPORTED_$item" + if [[ -n "${!_varname+n}" ]]; then + continue + fi + # Push this item: + _USE_IMPORTING+=("$item") + # The file to be imported: + file=$_this_dir/$item.sh + ! [[ ${PRINT_DEBUG_LOGS:-} = 1 ]] || echo "Import: [$item]" 1>&2 + _err=0 + # Detect self-import: + if printf '%s\0' "${BASH_SOURCE[@]}" | grep -qFxZ -- "$file"; then + echo "File '$file' imports itself transitively" 1>&2 + _err=1 + fi + # Detect non-existing imports: + if ! [[ -f $file ]]; then + echo "No script '$file' exists to import." 1>&2 + _err=1 + fi + # Print the stacktrace of imports upon error: + if [[ $_err -eq 1 ]]; then + printf " • [%s] loaded by:\n" "${BASH_SOURCE[@]}" 1>&2 + echo " • (user)" 1>&2 + echo "Bailing out" 1>&2 + return 1 + fi + # shellcheck disable=1090 + . "$file" + # Recover item from the stack, since we may have recursed: + item="${_USE_IMPORTING[${#_USE_IMPORTING[@]} - 1]}" + # Pop the top stack item: + unset "_USE_IMPORTING[${#_USE_IMPORTING[@]}-1]" + # Declare that the item has been imported, for future reference: + declare "_IMPORTED_$item=1" + ! [[ ${PRINT_DEBUG_LOGS:-} = 1 ]] || echo "Import: [$item] - done" 1>&2 done # Set _IS_MAIN to zero if the import stack is empty if [[ "${_USE_IMPORTING+${_USE_IMPORTING[*]}}" = "" ]]; then - _IS_MAIN=true + _IS_MAIN=true fi From 27ef84c1ee0784c78d070a5b7722e5b4b2d75311 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:42 -0500 Subject: [PATCH 07/20] Apply `ruff check --fix` --- .../config_generator/components/kms_divergence_check.py | 2 +- .evergreen/config_generator/components/loadbalanced.py | 2 +- .../legacy_config_generator/evergreen_config_lib/tasks.py | 7 +++---- .../evergreen_config_lib/testgcpkms.py | 2 -- .evergreen/ocsp/mock_ocsp_responder.py | 5 ++--- build/sphinx/homepage-config/conf.py | 2 +- build/sphinx/mongoc_common.py | 1 - docs/dev/conf.py | 2 +- lldb_bson.py | 4 ++-- src/libbson/doc/conf.py | 2 +- src/libmongoc/doc/conf.py | 3 +-- tools/format.py | 1 - 12 files changed, 13 insertions(+), 20 deletions(-) diff --git a/.evergreen/config_generator/components/kms_divergence_check.py b/.evergreen/config_generator/components/kms_divergence_check.py index 224d5406150..c927bf80c24 100644 --- a/.evergreen/config_generator/components/kms_divergence_check.py +++ b/.evergreen/config_generator/components/kms_divergence_check.py @@ -1,4 +1,4 @@ -from shrub.v3.evg_command import EvgCommandType, s3_put +from shrub.v3.evg_command import EvgCommandType from shrub.v3.evg_task import EvgTask from config_generator.etc.function import Function diff --git a/.evergreen/config_generator/components/loadbalanced.py b/.evergreen/config_generator/components/loadbalanced.py index c26afd5d230..b86bca3c949 100644 --- a/.evergreen/config_generator/components/loadbalanced.py +++ b/.evergreen/config_generator/components/loadbalanced.py @@ -9,7 +9,7 @@ from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.components.funcs.run_tests import RunTests from config_generator.components.funcs.upload_build import UploadBuild -from config_generator.etc.distros import find_large_distro, find_small_distro, make_distro_str +from config_generator.etc.distros import find_large_distro, make_distro_str from config_generator.etc.utils import bash_exec # Use `rhel8-latest` distro. `rhel8-latest` distro includes necessary dependency: `haproxy`. diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py b/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py index cb9ad5bfc0b..9ebdc675629 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py @@ -14,15 +14,14 @@ from collections import OrderedDict as OD from itertools import chain -from typing import ClassVar, Iterable, Literal, Mapping, MutableMapping, MutableSequence, Optional, Sequence +from typing import ClassVar, Iterable, Literal, Mapping, MutableSequence, Optional, Sequence -from evergreen_config_generator import Scalar, Value +from evergreen_config_generator import Value from evergreen_config_generator.functions import func, s3_put from evergreen_config_generator.tasks import ( DependencySpec, MatrixTask, NamedTask, - Task, both_or_neither, prohibit, require, @@ -742,7 +741,7 @@ def additional_dependencies(self) -> Iterable[DependencySpec]: def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() yield f'{self.settings.version}' - yield f'test-aws' + yield 'test-aws' def post_commands(self) -> Iterable[Value]: return [ diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py b/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py index 99de9d4519d..62b036ed48f 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py @@ -22,8 +22,6 @@ from evergreen_config_generator.tasks import NamedTask from evergreen_config_generator.variants import Variant -from config_generator.components.funcs.find_cmake_latest import FindCMakeLatest - def _create_tasks(): passtask = NamedTask( diff --git a/.evergreen/ocsp/mock_ocsp_responder.py b/.evergreen/ocsp/mock_ocsp_responder.py index 9de06cf78c7..23281b97380 100644 --- a/.evergreen/ocsp/mock_ocsp_responder.py +++ b/.evergreen/ocsp/mock_ocsp_responder.py @@ -49,7 +49,6 @@ import sys import textwrap from datetime import datetime, timedelta, timezone -from typing import Callable, Optional, Tuple from asn1crypto import core, keys, ocsp, x509 from asn1crypto.ocsp import OCSPRequest, OCSPResponse @@ -498,7 +497,7 @@ def validate(self): elif self._fault == FAULT_UNKNOWN: return (CertificateStatus.unknown, None) elif self._fault != None: - raise NotImplemented('Fault type could not be found') + raise NotImplementedError('Fault type could not be found') return (CertificateStatus.good, time) def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: @@ -510,7 +509,7 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: request_list = tbs_request['request_list'] if len(request_list) < 1: logger.warning('Received OCSP request with no requests') - raise NotImplemented('Empty requests not supported') + raise NotImplementedError('Empty requests not supported') single_request = request_list[0] # TODO: Support more than one request req_cert = single_request['req_cert'] diff --git a/build/sphinx/homepage-config/conf.py b/build/sphinx/homepage-config/conf.py index 9e4fe048b8d..e7de65f6593 100644 --- a/build/sphinx/homepage-config/conf.py +++ b/build/sphinx/homepage-config/conf.py @@ -59,7 +59,7 @@ def setup(app): html_use_smartypants = False html_show_sourcelink = False html_use_index = False -rst_prolog = rf""" +rst_prolog = r""" .. _mongodb_docs_cdriver: https://www.mongodb.com/docs/languages/c/c-driver/current/ diff --git a/build/sphinx/mongoc_common.py b/build/sphinx/mongoc_common.py index 96d50f3fb25..ae20576b53a 100644 --- a/build/sphinx/mongoc_common.py +++ b/build/sphinx/mongoc_common.py @@ -14,7 +14,6 @@ # Try importing from older Sphinx version path. from sphinx.builders.html import DirectoryHTMLBuilder from docutils.parsers.rst import Directive -from sphinx.config import Config # Do not require newer sphinx. EPEL packages build man pages with Sphinx 1.7.6. Refer: CDRIVER-4767 needs_sphinx = '1.7' diff --git a/docs/dev/conf.py b/docs/dev/conf.py index e09fd16fa8b..fd520f89fd8 100644 --- a/docs/dev/conf.py +++ b/docs/dev/conf.py @@ -38,7 +38,7 @@ pygments_style = 'sphinx' html_static_path = [] -rst_prolog = rf""" +rst_prolog = r""" .. role:: bash(code) :language: bash """ diff --git a/lldb_bson.py b/lldb_bson.py index 18fa86ff32a..929b4263cb6 100644 --- a/lldb_bson.py +++ b/lldb_bson.py @@ -525,7 +525,7 @@ def _parse_one( return DocumentError(f'Invalid element type tag 0x{buf[0]:x}', elem_offset) except IndexError: # 'buf' was empty - return DocumentError(f'Unexpected end-of-data', elem_offset) + return DocumentError('Unexpected end-of-data', elem_offset) # Stop if this is the end: if type_tag == BSONType.EOD: return DocumentElement(type_tag, '', 0, 0) @@ -533,7 +533,7 @@ def _parse_one( try: key_nulpos = buf.index(0, 1) except ValueError: - return DocumentError(f'Unexpected end-of-data while parsing the element key', elem_offset) + return DocumentError('Unexpected end-of-data while parsing the element key', elem_offset) key_bytes = buf[1:key_nulpos] try: key = key_bytes.decode('utf-8') diff --git a/src/libbson/doc/conf.py b/src/libbson/doc/conf.py index bf8187fac2f..3507c293433 100644 --- a/src/libbson/doc/conf.py +++ b/src/libbson/doc/conf.py @@ -35,7 +35,7 @@ html_use_index = False -rst_prolog = rf""" +rst_prolog = r""" .. _mongodb_docs_cdriver: https://www.mongodb.com/docs/languages/c/c-driver/current/ diff --git a/src/libmongoc/doc/conf.py b/src/libmongoc/doc/conf.py index 7ac74de70d0..b27eea20e04 100644 --- a/src/libmongoc/doc/conf.py +++ b/src/libmongoc/doc/conf.py @@ -15,7 +15,6 @@ from docutils.parsers.rst import Directive, directives from sphinx.application import Sphinx from sphinx.application import logger as sphinx_log -from sphinx.config import Config has_sphinx_design = False try: @@ -247,7 +246,7 @@ def run(self): adm = self.options.get('admonition') if adm is not None: self.options.setdefault('class-container', []).extend(('admonition', adm)) - self.options.setdefault('class-title', []).append(f'admonition-title') + self.options.setdefault('class-title', []).append('admonition-title') return super().run() else: diff --git a/tools/format.py b/tools/format.py index 4ffdea9cb22..4b2f75fff29 100644 --- a/tools/format.py +++ b/tools/format.py @@ -14,7 +14,6 @@ import functools import itertools import multiprocessing -import os import re import subprocess import sys From aa301c1d2ea42bc82a8dc30d55234e5c03674665 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:42 -0500 Subject: [PATCH 08/20] Apply `ruff check --unsafe-fixes` --- .evergreen/ocsp/mock_ocsp_responder.py | 2 +- build/bottle.py | 19 +++++++++++++++---- lldb_bson.py | 5 ++++- 3 files changed, 20 insertions(+), 6 deletions(-) diff --git a/.evergreen/ocsp/mock_ocsp_responder.py b/.evergreen/ocsp/mock_ocsp_responder.py index 23281b97380..a53d5396230 100644 --- a/.evergreen/ocsp/mock_ocsp_responder.py +++ b/.evergreen/ocsp/mock_ocsp_responder.py @@ -496,7 +496,7 @@ def validate(self): return (CertificateStatus.revoked, time) elif self._fault == FAULT_UNKNOWN: return (CertificateStatus.unknown, None) - elif self._fault != None: + elif self._fault is not None: raise NotImplementedError('Fault type could not be found') return (CertificateStatus.good, time) diff --git a/build/bottle.py b/build/bottle.py index 84e5729ffca..27822f53763 100644 --- a/build/bottle.py +++ b/build/bottle.py @@ -132,8 +132,13 @@ def getargspec(func): basestring = str unicode = str - json_loads = lambda s: json_lds(touni(s)) - callable = lambda x: hasattr(x, '__call__') + + def json_loads(s): + return json_lds(touni(s)) + + def callable(x): + return hasattr(x, '__call__') + imap = map def _raise(*a): @@ -1155,7 +1160,10 @@ def _cast(self, out, peek=None): elif isinstance(first, bytes): new_iter = itertools.chain([first], iout) elif isinstance(first, unicode): - encoder = lambda x: x.encode(response.charset) + + def encoder(x): + return x.encode(response.charset) + new_iter = imap(encoder, itertools.chain([first], iout)) else: msg = 'Unsupported response type: %s' % type(first) @@ -4158,7 +4166,10 @@ def __init__(self, lockfile, interval): def run(self): exists = os.path.exists - mtime = lambda p: os.stat(p).st_mtime + + def mtime(p): + return os.stat(p).st_mtime + files = dict() for module in list(sys.modules.values()): diff --git a/lldb_bson.py b/lldb_bson.py index 929b4263cb6..9fbc0273e67 100644 --- a/lldb_bson.py +++ b/lldb_bson.py @@ -1002,8 +1002,11 @@ def __parse__(cls, value: SBValue) -> Decimal128Value: d128_tetra = (hi_word << 64) | low_word # Create an array of individual bits (high bits first): bits = tuple(((d128_tetra >> n) & 1) for n in range(127, -1, -1)) + # Recombine a sequence of bits into an int (high bits first) - mergebits: Callable[[tuple[int, ...]], int] = lambda bs: functools.reduce(lambda a, b: (a << 1) | b, bs, 0) + def mergebits(bs: tuple[int, ...]) -> int: + return functools.reduce(lambda a, b: (a << 1) | b, bs, 0) + # Sign bit: sign = bits[0] # BID uses the first two combo bits to indicate that the exponent is shifted From 375cfbd4e3bc813cd4c347e430121568cabc2c29 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:42 -0500 Subject: [PATCH 09/20] Address ruff check warning F401 --- .../evergreen_config_generator/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py b/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py index c5f877586d1..17021df59ff 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py @@ -13,7 +13,6 @@ # limitations under the License. -import sys from collections import OrderedDict as OD from typing import Any, Iterable, Mapping, MutableMapping, MutableSequence, Sequence, Union From a328e3ca1b7a9ee2e346ac9c59b3d3ab8358b7ec Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:42 -0500 Subject: [PATCH 10/20] Address ruff check warning E402 --- .../evergreen_config_generator/__init__.py | 7 ++-- build/bottle.py | 42 +++++++++---------- build/fake_kms_provider_server.py | 3 +- build/sphinx/homepage-config/conf.py | 2 +- src/libbson/doc/conf.py | 2 +- src/libmongoc/doc/conf.py | 2 +- 6 files changed, 28 insertions(+), 30 deletions(-) diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py b/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py index 17021df59ff..f6394b85b8a 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py @@ -16,6 +16,9 @@ from collections import OrderedDict as OD from typing import Any, Iterable, Mapping, MutableMapping, MutableSequence, Sequence, Union +import yaml +import yamlloader + Scalar = Union[str, bool, int, None, float] 'YAML simple schema scalar types' ValueSequence = Sequence['Value'] @@ -35,10 +38,6 @@ 'An OrderedDict of YAML values' -import yaml -import yamlloader - - class ConfigObject(object): @property def name(self) -> str: diff --git a/build/bottle.py b/build/bottle.py index 27822f53763..718e7da0137 100644 --- a/build/bottle.py +++ b/build/bottle.py @@ -70,27 +70,27 @@ def _cli_patch(cli_args): # pragma: no coverage # Imports and Python 2/3 unification ########################################## ############################################################################### -import base64 -import calendar -import email.utils -import functools -import hashlib -import hmac -import itertools -import mimetypes -import os -import re -import tempfile -import threading -import time -import warnings -import weakref -from datetime import date as datedate -from datetime import datetime, timedelta -from tempfile import NamedTemporaryFile -from traceback import format_exc, print_exc -from types import FunctionType -from unicodedata import normalize +import base64 # noqa: E402 +import calendar # noqa: E402 +import email.utils # noqa: E402 +import functools # noqa: E402 +import hashlib # noqa: E402 +import hmac # noqa: E402 +import itertools # noqa: E402 +import mimetypes # noqa: E402 +import os # noqa: E402 +import re # noqa: E402 +import tempfile # noqa: E402 +import threading # noqa: E402 +import time # noqa: E402 +import warnings # noqa: E402 +import weakref # noqa: E402 +from datetime import date as datedate # noqa: E402 +from datetime import datetime, timedelta # noqa: E402 +from tempfile import NamedTemporaryFile # noqa: E402 +from traceback import format_exc, print_exc # noqa: E402 +from types import FunctionType # noqa: E402 +from unicodedata import normalize # noqa: E402 try: from ujson import dumps as json_dumps diff --git a/build/fake_kms_provider_server.py b/build/fake_kms_provider_server.py index c934cd00cf9..8b6448a3bbd 100644 --- a/build/fake_kms_provider_server.py +++ b/build/fake_kms_provider_server.py @@ -4,6 +4,7 @@ import time import traceback from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, Iterable, cast, overload import bottle from bottle import Bottle, HTTPResponse @@ -11,8 +12,6 @@ kms_provider = Bottle(autojson=True) """A mock server for Azure IMDS and GCP metadata""" -from typing import TYPE_CHECKING, Any, Callable, Iterable, cast, overload - if not TYPE_CHECKING: from bottle import request else: diff --git a/build/sphinx/homepage-config/conf.py b/build/sphinx/homepage-config/conf.py index e7de65f6593..18ab43b5f6b 100644 --- a/build/sphinx/homepage-config/conf.py +++ b/build/sphinx/homepage-config/conf.py @@ -8,7 +8,7 @@ this_path = os.path.dirname(__file__) sys.path.append(os.path.normpath(os.path.join(this_path, '../'))) -from mongoc_common import * +from mongoc_common import * # noqa: E402 with open(this_path + '/../../../VERSION_CURRENT') as vc: release = version = vc.read() diff --git a/src/libbson/doc/conf.py b/src/libbson/doc/conf.py index 3507c293433..29f963c4a40 100644 --- a/src/libbson/doc/conf.py +++ b/src/libbson/doc/conf.py @@ -7,7 +7,7 @@ this_path = os.path.dirname(__file__) sys.path.append(os.path.normpath(os.path.join(this_path, '../../../build/sphinx'))) -from mongoc_common import * +from mongoc_common import * # noqa: E402 extensions = [ 'mongoc', diff --git a/src/libmongoc/doc/conf.py b/src/libmongoc/doc/conf.py index b27eea20e04..d24193ce31c 100644 --- a/src/libmongoc/doc/conf.py +++ b/src/libmongoc/doc/conf.py @@ -32,7 +32,7 @@ sys.path.append(this_path) sys.path.append(os.path.normpath(os.path.join(this_path, '../../../build/sphinx'))) -from mongoc_common import * +from mongoc_common import * # noqa: E402 extensions = [ 'mongoc', From f6b63b1fce2f22a03eabd516af6f3dc275eca2fb Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:42 -0500 Subject: [PATCH 11/20] Address ruff check warning E722 --- build/bottle.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build/bottle.py b/build/bottle.py index 718e7da0137..4819c3aef63 100644 --- a/build/bottle.py +++ b/build/bottle.py @@ -4144,7 +4144,7 @@ def run( pass except (SystemExit, MemoryError): raise - except: + except Exception: if not reloader: raise if not getattr(server, 'quiet', quiet): From 6ec4052b9dc7e83e4289719bffe8668101686869 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 10 Sep 2025 15:29:42 -0500 Subject: [PATCH 12/20] Address ruff check warning F403 --- build/sphinx/homepage-config/conf.py | 2 +- src/libbson/doc/conf.py | 2 +- src/libmongoc/doc/conf.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/build/sphinx/homepage-config/conf.py b/build/sphinx/homepage-config/conf.py index 18ab43b5f6b..806a29f33bc 100644 --- a/build/sphinx/homepage-config/conf.py +++ b/build/sphinx/homepage-config/conf.py @@ -8,7 +8,7 @@ this_path = os.path.dirname(__file__) sys.path.append(os.path.normpath(os.path.join(this_path, '../'))) -from mongoc_common import * # noqa: E402 +from mongoc_common import mongoc_common_setup # noqa: E402 with open(this_path + '/../../../VERSION_CURRENT') as vc: release = version = vc.read() diff --git a/src/libbson/doc/conf.py b/src/libbson/doc/conf.py index 29f963c4a40..4e9824f626a 100644 --- a/src/libbson/doc/conf.py +++ b/src/libbson/doc/conf.py @@ -7,7 +7,7 @@ this_path = os.path.dirname(__file__) sys.path.append(os.path.normpath(os.path.join(this_path, '../../../build/sphinx'))) -from mongoc_common import * # noqa: E402 +from mongoc_common import mongoc_common_setup # noqa: E402 extensions = [ 'mongoc', diff --git a/src/libmongoc/doc/conf.py b/src/libmongoc/doc/conf.py index d24193ce31c..148a2a7588c 100644 --- a/src/libmongoc/doc/conf.py +++ b/src/libmongoc/doc/conf.py @@ -32,7 +32,7 @@ sys.path.append(this_path) sys.path.append(os.path.normpath(os.path.join(this_path, '../../../build/sphinx'))) -from mongoc_common import * # noqa: E402 +from mongoc_common import mongoc_common_setup # noqa: E402 extensions = [ 'mongoc', From dc3663fab391a0639a4d7d5d2990f5884744dd73 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Thu, 11 Sep 2025 09:10:07 -0500 Subject: [PATCH 13/20] Remove `silent: true` for upload-man-docs --- .evergreen/config_generator/components/make_docs.py | 1 - .evergreen/generated_configs/functions.yml | 1 - 2 files changed, 2 deletions(-) diff --git a/.evergreen/config_generator/components/make_docs.py b/.evergreen/config_generator/components/make_docs.py index a149261961b..ff9ea6cec20 100644 --- a/.evergreen/config_generator/components/make_docs.py +++ b/.evergreen/config_generator/components/make_docs.py @@ -68,7 +68,6 @@ class UploadManPages(Function): commands = [ bash_exec( working_dir='mongoc', - silent=True, script="""\ set -o errexit # Get "aha", the ANSI HTML Adapter. diff --git a/.evergreen/generated_configs/functions.yml b/.evergreen/generated_configs/functions.yml index 2081883f53c..18e6d46f869 100644 --- a/.evergreen/generated_configs/functions.yml +++ b/.evergreen/generated_configs/functions.yml @@ -737,7 +737,6 @@ functions: params: binary: bash working_dir: mongoc - silent: true args: - -c - | From 8c78737ecc67142e86387796cfde1a13573c838c Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Thu, 11 Sep 2025 09:10:07 -0500 Subject: [PATCH 14/20] Silence F403 and restore `from mongoc_common import *` --- build/sphinx/homepage-config/conf.py | 4 ++-- src/libbson/doc/conf.py | 4 ++-- src/libmongoc/doc/conf.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/build/sphinx/homepage-config/conf.py b/build/sphinx/homepage-config/conf.py index 806a29f33bc..75415195717 100644 --- a/build/sphinx/homepage-config/conf.py +++ b/build/sphinx/homepage-config/conf.py @@ -8,7 +8,7 @@ this_path = os.path.dirname(__file__) sys.path.append(os.path.normpath(os.path.join(this_path, '../'))) -from mongoc_common import mongoc_common_setup # noqa: E402 +from mongoc_common import * # noqa: E402, F403 with open(this_path + '/../../../VERSION_CURRENT') as vc: release = version = vc.read() @@ -46,7 +46,7 @@ def download_link(typ, rawtext, text, lineno, inliner, options={}, content=[]): def setup(app): - mongoc_common_setup(app) + mongoc_common_setup(app) # noqa: F405 app.add_role('download-link', download_link) diff --git a/src/libbson/doc/conf.py b/src/libbson/doc/conf.py index 4e9824f626a..82832fcb64d 100644 --- a/src/libbson/doc/conf.py +++ b/src/libbson/doc/conf.py @@ -7,7 +7,7 @@ this_path = os.path.dirname(__file__) sys.path.append(os.path.normpath(os.path.join(this_path, '../../../build/sphinx'))) -from mongoc_common import mongoc_common_setup # noqa: E402 +from mongoc_common import * # noqa: E402, F403 extensions = [ 'mongoc', @@ -51,5 +51,5 @@ def add_canonical_link(app, pagename, templatename, context, doctree): def setup(app): - mongoc_common_setup(app) + mongoc_common_setup(app) # noqa: F405 app.connect('html-page-context', add_canonical_link) diff --git a/src/libmongoc/doc/conf.py b/src/libmongoc/doc/conf.py index 148a2a7588c..009c33670fe 100644 --- a/src/libmongoc/doc/conf.py +++ b/src/libmongoc/doc/conf.py @@ -32,7 +32,7 @@ sys.path.append(this_path) sys.path.append(os.path.normpath(os.path.join(this_path, '../../../build/sphinx'))) -from mongoc_common import mongoc_common_setup # noqa: E402 +from mongoc_common import * # noqa: E402, F403 extensions = [ 'mongoc', @@ -273,7 +273,7 @@ def check_html_builder_requirements(app): def setup(app: Sphinx): - mongoc_common_setup(app) + mongoc_common_setup(app) # noqa: F405 app.connect('builder-inited', check_html_builder_requirements) if has_sphinx_design: app.add_directive('ad-dropdown', AdDropdown) From 5bb953ce5eb1c24db82092c055d385ba9ca9af30 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 17 Sep 2025 09:28:26 -0500 Subject: [PATCH 15/20] More formatting and lint tweaks --- .../evergreen_config_lib/functions.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py b/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py index 328aa353613..a10015cc273 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py @@ -14,7 +14,7 @@ from collections import OrderedDict as OD -from evergreen_config_generator.functions import Function, s3_put, shell_exec +from evergreen_config_generator.functions import Function, s3_put from evergreen_config_lib import shell_mongoc @@ -94,9 +94,7 @@ 'run auth tests', Function( shell_mongoc( - r""" - .evergreen/scripts/run-auth-tests.sh - """, + '.evergreen/scripts/run-auth-tests.sh', add_expansions_to_env=True, ), ), @@ -193,14 +191,16 @@ ( 'link sample program mingw bson', Function( - shell_mongoc(r""" - # Build libmongoc with CMake and compile a program that links - # dynamically to it, using variables from pkg-config.exe. - . .evergreen/scripts/use-tools.sh paths - . .evergreen/scripts/find-cmake-latest.sh - export CMAKE="$(native-path "$(find_cmake_latest)")" - cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw-bson.cmd - """), + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically to it, using variables from pkg-config.exe. + . .evergreen/scripts/use-tools.sh paths + . .evergreen/scripts/find-cmake-latest.sh + export CMAKE="$(native-path "$(find_cmake_latest)")" + cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw-bson.cmd + """, + ), ), ), ( From c2740c20d9dcb45bf4b7b192ff194d4789c39f8f Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 24 Sep 2025 16:27:12 -0500 Subject: [PATCH 16/20] Re-fix indentation of multi-line strings --- .../evergreen_config_lib/functions.py | 190 +++++++++--------- 1 file changed, 97 insertions(+), 93 deletions(-) diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py b/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py index 1e7700aaa9c..b8175fcdd1d 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py @@ -26,9 +26,7 @@ 'install ssl', Function( shell_mongoc( - r""" - .evergreen/scripts/install-ssl.sh - """, + '.evergreen/scripts/install-ssl.sh', test=False, add_expansions_to_env=True, ), @@ -39,10 +37,10 @@ Function( shell_mongoc( r""" - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - aws s3 cp coverage s3://mciuploads/${project}/%s/coverage/ --recursive --acl public-read --region us-east-1 - """ + export AWS_ACCESS_KEY_ID=${aws_key} + export AWS_SECRET_ACCESS_KEY=${aws_secret} + aws s3 cp coverage s3://mciuploads/${project}/%s/coverage/ --recursive --acl public-read --region us-east-1 + """ % (build_path,), test=False, silent=True, @@ -62,19 +60,21 @@ ( 'upload scan artifacts', Function( - shell_mongoc(r""" - if find scan -name \*.html | grep -q html; then - (cd scan && find . -name index.html -exec echo "
  • {}
  • " \;) >> scan.html - else - echo "No issues found" > scan.html - fi - """), shell_mongoc( r""" - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - aws s3 cp scan s3://mciuploads/${project}/%s/scan/ --recursive --acl public-read --region us-east-1 - """ + if find scan -name \*.html | grep -q html; then + (cd scan && find . -name index.html -exec echo "
  • {}
  • " \;) >> scan.html + else + echo "No issues found" > scan.html + fi + """, + ), + shell_mongoc( + r""" + export AWS_ACCESS_KEY_ID=${aws_key} + export AWS_SECRET_ACCESS_KEY=${aws_secret} + aws s3 cp scan s3://mciuploads/${project}/%s/scan/ --recursive --acl public-read --region us-east-1 + """ % (build_path,), test=False, silent=True, @@ -96,9 +96,7 @@ 'run auth tests', Function( shell_mongoc( - r""" - .evergreen/scripts/run-auth-tests.sh - """, + '.evergreen/scripts/run-auth-tests.sh', add_expansions_to_env=True, ), ), @@ -108,15 +106,15 @@ Function( shell_mongoc( r""" - # Compile a program that links dynamically or statically to libmongoc, - # using variables from pkg-config or CMake's find_package command. - export BUILD_SAMPLE_WITH_CMAKE=${BUILD_SAMPLE_WITH_CMAKE} - export ENABLE_SSL=${ENABLE_SSL} - export ENABLE_SNAPPY=${ENABLE_SNAPPY} - PATH="${UV_INSTALL_DIR}:$PATH" - LINK_STATIC= .evergreen/scripts/link-sample-program.sh - LINK_STATIC=1 .evergreen/scripts/link-sample-program.sh - """, + # Compile a program that links dynamically or statically to libmongoc, + # using variables from pkg-config or CMake's find_package command. + export BUILD_SAMPLE_WITH_CMAKE=${BUILD_SAMPLE_WITH_CMAKE} + export ENABLE_SSL=${ENABLE_SSL} + export ENABLE_SNAPPY=${ENABLE_SNAPPY} + PATH="${UV_INSTALL_DIR}:$PATH" + LINK_STATIC= .evergreen/scripts/link-sample-program.sh + LINK_STATIC=1 .evergreen/scripts/link-sample-program.sh + """, include_expansions_in_env=['distro_id', 'UV_INSTALL_DIR'], ), ), @@ -126,14 +124,14 @@ Function( shell_mongoc( r""" - # Compile a program that links dynamically or statically to libbson, - # using variables from pkg-config or from CMake's find_package command. - PATH="${UV_INSTALL_DIR}:$PATH" - BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh - BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh - BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh - BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh - """, + # Compile a program that links dynamically or statically to libbson, + # using variables from pkg-config or from CMake's find_package command. + PATH="${UV_INSTALL_DIR}:$PATH" + BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh + BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh + BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh + BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh + """, include_expansions_in_env=['distro_id', 'UV_INSTALL_DIR'], ), ), @@ -141,53 +139,61 @@ ( 'link sample program MSVC', Function( - shell_mongoc(r""" - # Build libmongoc with CMake and compile a program that links - # dynamically or statically to it, using variables from CMake's - # find_package command. - export ENABLE_SSL=${ENABLE_SSL} - export ENABLE_SNAPPY=${ENABLE_SNAPPY} - PATH="${UV_INSTALL_DIR}:$PATH" - LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd - LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd - """) + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically or statically to it, using variables from CMake's + # find_package command. + export ENABLE_SSL=${ENABLE_SSL} + export ENABLE_SNAPPY=${ENABLE_SNAPPY} + PATH="${UV_INSTALL_DIR}:$PATH" + LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd + LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd + """, + ) ), ), ( 'link sample program mingw', Function( - shell_mongoc(r""" - # Build libmongoc with CMake and compile a program that links - # dynamically to it, using variables from pkg-config.exe. - PATH="${UV_INSTALL_DIR}:$PATH" - cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw.cmd - """) + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically to it, using variables from pkg-config.exe. + PATH="${UV_INSTALL_DIR}:$PATH" + cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw.cmd + """, + ) ), ), ( 'link sample program MSVC bson', Function( - shell_mongoc(r""" - # Build libmongoc with CMake and compile a program that links - # dynamically or statically to it, using variables from CMake's - # find_package command. - export ENABLE_SSL=${ENABLE_SSL} - export ENABLE_SNAPPY=${ENABLE_SNAPPY} - PATH="${UV_INSTALL_DIR}:$PATH" - LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd - LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd - """) + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically or statically to it, using variables from CMake's + # find_package command. + export ENABLE_SSL=${ENABLE_SSL} + export ENABLE_SNAPPY=${ENABLE_SNAPPY} + PATH="${UV_INSTALL_DIR}:$PATH" + LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd + LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd + """, + ) ), ), ( 'link sample program mingw bson', Function( - shell_mongoc(r""" - # Build libmongoc with CMake and compile a program that links - # dynamically to it, using variables from pkg-config.exe. - PATH="${UV_INSTALL_DIR}:$PATH" - cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw-bson.cmd - """) + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically to it, using variables from pkg-config.exe. + PATH="${UV_INSTALL_DIR}:$PATH" + cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw-bson.cmd + """, + ) ), ), ( @@ -195,17 +201,17 @@ Function( shell_mongoc( r""" - # Note: coverage is currently only enabled on the ubuntu1804 distro. - # This script does not support MacOS, Windows, or non-x86_64 distros. - # Update accordingly if code coverage is expanded to other distros. - curl -Os https://uploader.codecov.io/latest/linux/codecov - chmod +x codecov - # -Z: Exit with a non-zero value if error. - # -g: Run with gcov support. - # -t: Codecov upload token. - # perl: filter verbose "Found" list and "Processing" messages. - ./codecov -Zgt "${codecov_token}" | perl -lne 'print if not m|^.*\.gcov(\.\.\.)?$|' - """, + # Note: coverage is currently only enabled on the ubuntu1804 distro. + # This script does not support MacOS, Windows, or non-x86_64 distros. + # Update accordingly if code coverage is expanded to other distros. + curl -Os https://uploader.codecov.io/latest/linux/codecov + chmod +x codecov + # -Z: Exit with a non-zero value if error. + # -g: Run with gcov support. + # -t: Codecov upload token. + # perl: filter verbose "Found" list and "Processing" messages. + ./codecov -Zgt "${codecov_token}" | perl -lne 'print if not m|^.*\.gcov(\.\.\.)?$|' + """, test=False, ), ), @@ -214,9 +220,7 @@ 'compile coverage', Function( shell_mongoc( - r""" - COVERAGE=ON .evergreen/scripts/compile.sh - """, + 'COVERAGE=ON .evergreen/scripts/compile.sh', add_expansions_to_env=True, ), ), @@ -228,19 +232,19 @@ {'command': 'ec2.assume_role', 'params': {'role_arn': '${aws_test_secrets_role}'}}, shell_mongoc( r""" - pushd ../drivers-evergreen-tools/.evergreen/auth_aws - ./setup_secrets.sh drivers/aws_auth - popd # ../drivers-evergreen-tools/.evergreen/auth_aws - """, + pushd ../drivers-evergreen-tools/.evergreen/auth_aws + ./setup_secrets.sh drivers/aws_auth + popd # ../drivers-evergreen-tools/.evergreen/auth_aws + """, include_expansions_in_env=['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN'], ), shell_mongoc( r""" - pushd ../drivers-evergreen-tools/.evergreen/auth_aws - . ./activate-authawsvenv.sh - popd # ../drivers-evergreen-tools/.evergreen/auth_aws - .evergreen/scripts/run-aws-tests.sh - """, + pushd ../drivers-evergreen-tools/.evergreen/auth_aws + . ./activate-authawsvenv.sh + popd # ../drivers-evergreen-tools/.evergreen/auth_aws + .evergreen/scripts/run-aws-tests.sh + """, include_expansions_in_env=['TESTCASE'], ), ), From eea28d4a9e3ffc1f84350b20e7f24fb33a47f6d4 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 24 Sep 2025 16:27:12 -0500 Subject: [PATCH 17/20] Fix missing import statements --- .evergreen/config_generator/components/openssl_compat.py | 3 ++- .../evergreen_config_lib/testazurekms.py | 1 + .../legacy_config_generator/evergreen_config_lib/testgcpkms.py | 1 + 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.evergreen/config_generator/components/openssl_compat.py b/.evergreen/config_generator/components/openssl_compat.py index f22549218e0..68043171cfe 100644 --- a/.evergreen/config_generator/components/openssl_compat.py +++ b/.evergreen/config_generator/components/openssl_compat.py @@ -1,4 +1,5 @@ -from config_generator.components.funcs.find_cmake_latest import FindCMakeLatest +from itertools import product + from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_command import EvgCommandType, FunctionCall from shrub.v3.evg_task import EvgTask, EvgTaskRef diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py b/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py index 55021d6eb1f..4d34fb16699 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py @@ -19,6 +19,7 @@ from typing import MutableSequence from evergreen_config_generator.functions import func, shell_exec +from evergreen_config_generator.taskgroups import TaskGroup from evergreen_config_generator.tasks import NamedTask from evergreen_config_generator.variants import Variant diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py b/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py index 359cec9091b..1790636fac9 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py @@ -18,6 +18,7 @@ from typing import MutableSequence from evergreen_config_generator.functions import func, shell_exec +from evergreen_config_generator.taskgroups import TaskGroup from evergreen_config_generator.tasks import NamedTask from evergreen_config_generator.variants import Variant From ca30cc76ebf430b3d56ee4ef5d34ac06e07b1a3f Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Wed, 24 Sep 2025 16:29:48 -0500 Subject: [PATCH 18/20] Re-fix unused import warnings --- .evergreen/config_generator/components/funcs/fetch_det.py | 2 +- .../legacy_config_generator/evergreen_config_lib/tasks.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.evergreen/config_generator/components/funcs/fetch_det.py b/.evergreen/config_generator/components/funcs/fetch_det.py index e06bfe22483..71db4d30fea 100644 --- a/.evergreen/config_generator/components/funcs/fetch_det.py +++ b/.evergreen/config_generator/components/funcs/fetch_det.py @@ -1,4 +1,4 @@ -from shrub.v3.evg_command import EvgCommandType, expansions_update +from shrub.v3.evg_command import EvgCommandType from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py b/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py index 888e0d9db00..2a25d88b5cb 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py @@ -16,7 +16,7 @@ from itertools import chain from typing import ClassVar, Iterable, Literal, Mapping, MutableSequence, Optional, Sequence -from evergreen_config_generator import Scalar, Value +from evergreen_config_generator import Value from evergreen_config_generator.functions import func, s3_put from evergreen_config_generator.tasks import ( DependencySpec, From d158832f3788e1c1dd70d9baec6728efb9fb72b3 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Thu, 25 Sep 2025 12:53:12 -0500 Subject: [PATCH 19/20] Revert formatting of bottle.py and mongodl.py --- build/bottle.py | 2370 +++++++++++++++++++++------------------------- build/mongodl.py | 314 +++--- 2 files changed, 1270 insertions(+), 1414 deletions(-) diff --git a/build/bottle.py b/build/bottle.py index 4819c3aef63..cb7dd208320 100644 --- a/build/bottle.py +++ b/build/bottle.py @@ -14,7 +14,6 @@ """ from __future__ import print_function - import sys __author__ = 'Marcel Hellkamp' @@ -32,16 +31,18 @@ def _cli_parse(args): # pragma: no coverage from argparse import ArgumentParser - parser = ArgumentParser(prog=args[0], usage='%(prog)s [options] package.module:app') + parser = ArgumentParser(prog=args[0], usage="%(prog)s [options] package.module:app") opt = parser.add_argument - opt('--version', action='store_true', help='show version number.') - opt('-b', '--bind', metavar='ADDRESS', help='bind socket to ADDRESS.') - opt('-s', '--server', default='wsgiref', help='use SERVER as backend.') - opt('-p', '--plugin', action='append', help='install additional plugin/s.') - opt('-c', '--conf', action='append', metavar='FILE', help='load config values from FILE.') - opt('-C', '--param', action='append', metavar='NAME=VALUE', help='override config values.') - opt('--debug', action='store_true', help='start server in debug mode.') - opt('--reload', action='store_true', help='auto-reload on file changes.') + opt("--version", action="store_true", help="show version number.") + opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.") + opt("-s", "--server", default='wsgiref', help="use SERVER as backend.") + opt("-p", "--plugin", action="append", help="install additional plugin/s.") + opt("-c", "--conf", action="append", metavar="FILE", + help="load config values from FILE.") + opt("-C", "--param", action="append", metavar="NAME=VALUE", + help="override config values.") + opt("--debug", action="store_true", help="start server in debug mode.") + opt("--reload", action="store_true", help="auto-reload on file changes.") opt('app', help='WSGI app entry point.', nargs='?') cli_args = parser.parse_args(args[1:]) @@ -55,11 +56,9 @@ def _cli_patch(cli_args): # pragma: no coverage if opts.server: if opts.server.startswith('gevent'): import gevent.monkey - gevent.monkey.patch_all() elif opts.server.startswith('eventlet'): import eventlet - eventlet.monkey_patch() @@ -70,61 +69,41 @@ def _cli_patch(cli_args): # pragma: no coverage # Imports and Python 2/3 unification ########################################## ############################################################################### -import base64 # noqa: E402 -import calendar # noqa: E402 -import email.utils # noqa: E402 -import functools # noqa: E402 -import hashlib # noqa: E402 -import hmac # noqa: E402 -import itertools # noqa: E402 -import mimetypes # noqa: E402 -import os # noqa: E402 -import re # noqa: E402 -import tempfile # noqa: E402 -import threading # noqa: E402 -import time # noqa: E402 -import warnings # noqa: E402 -import weakref # noqa: E402 -from datetime import date as datedate # noqa: E402 -from datetime import datetime, timedelta # noqa: E402 -from tempfile import NamedTemporaryFile # noqa: E402 -from traceback import format_exc, print_exc # noqa: E402 -from types import FunctionType # noqa: E402 -from unicodedata import normalize # noqa: E402 +import base64, calendar, email.utils, functools, hmac, itertools,\ + mimetypes, os, re, tempfile, threading, time, warnings, weakref, hashlib + +from types import FunctionType +from datetime import date as datedate, datetime, timedelta +from tempfile import NamedTemporaryFile +from traceback import format_exc, print_exc +from unicodedata import normalize try: - from ujson import dumps as json_dumps - from ujson import loads as json_lds + from ujson import dumps as json_dumps, loads as json_lds except ImportError: - from json import dumps as json_dumps - from json import loads as json_lds + from json import dumps as json_dumps, loads as json_lds py = sys.version_info py3k = py.major > 2 # Lots of stdlib and builtin differences. if py3k: - import _thread as thread import http.client as httplib - from urllib.parse import SplitResult as UrlSplitResult - from urllib.parse import quote as urlquote - from urllib.parse import unquote as urlunquote - from urllib.parse import urlencode, urljoin - + import _thread as thread + from urllib.parse import urljoin, SplitResult as UrlSplitResult + from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote urlunquote = functools.partial(urlunquote, encoding='latin1') - import configparser - import pickle + from http.cookies import SimpleCookie, Morsel, CookieError from collections.abc import MutableMapping as DictMixin - from datetime import timezone - from http.cookies import CookieError, Morsel, SimpleCookie - from io import BytesIO from types import ModuleType as new_module - + import pickle + from io import BytesIO + import configparser + from datetime import timezone UTC = timezone.utc # getfullargspec was deprecated in 3.5 and un-deprecated in 3.6 # getargspec was deprecated in 3.0 and removed in 3.11 from inspect import getfullargspec - def getargspec(func): spec = getfullargspec(func) kwargs = makelist(spec[0]) + makelist(spec.kwonlyargs) @@ -132,47 +111,32 @@ def getargspec(func): basestring = str unicode = str - - def json_loads(s): - return json_lds(touni(s)) - - def callable(x): - return hasattr(x, '__call__') - + json_loads = lambda s: json_lds(touni(s)) + callable = lambda x: hasattr(x, '__call__') imap = map def _raise(*a): raise a[0](a[1]).with_traceback(a[2]) else: # 2.x - warnings.warn('Python 2 support will be dropped in Bottle 0.14', DeprecationWarning) - from collections import MutableMapping as DictMixin - from datetime import tzinfo - from imp import new_module - from inspect import getargspec - from itertools import imap - from urllib import quote as urlquote - from urllib import unquote as urlunquote - from urllib import urlencode - - import ConfigParser as configparser - import cPickle as pickle + warnings.warn("Python 2 support will be dropped in Bottle 0.14", DeprecationWarning) import httplib import thread - from Cookie import CookieError, Morsel, SimpleCookie + from urlparse import urljoin, SplitResult as UrlSplitResult + from urllib import urlencode, quote as urlquote, unquote as urlunquote + from Cookie import SimpleCookie, Morsel, CookieError + from itertools import imap + import cPickle as pickle + from imp import new_module from StringIO import StringIO as BytesIO - from urlparse import SplitResult as UrlSplitResult - from urlparse import urljoin + import ConfigParser as configparser + from collections import MutableMapping as DictMixin + from inspect import getargspec + from datetime import tzinfo class _UTC(tzinfo): - def utcoffset(self, dt): - return timedelta(0) - - def tzname(self, dt): - return 'UTC' - - def dst(self, dt): - return timedelta(0) - + def utcoffset(self, dt): return timedelta(0) + def tzname(self, dt): return "UTC" + def dst(self, dt): return timedelta(0) UTC = _UTC() unicode = unicode @@ -180,7 +144,6 @@ def dst(self, dt): exec(compile('def _raise(*a): raise a[0], a[1], a[2]', '', 'exec')) - # Some helpers for string/byte handling def tob(s, enc='utf8'): if isinstance(s, unicode): @@ -191,7 +154,7 @@ def tob(s, enc='utf8'): def touni(s, enc='utf8', err='strict'): if isinstance(s, bytes): return s.decode(enc, err) - return unicode('' if s is None else s) + return unicode("" if s is None else s) tonat = touni if py3k else tob @@ -201,7 +164,7 @@ def _stderr(*args): try: print(*args, file=sys.stderr) except (IOError, AttributeError): - pass # Some environments do not allow printing (mod_wsgi) + pass # Some environments do not allow printing (mod_wsgi) # A bug in functools causes it to break if the wrapper is an instance method @@ -211,18 +174,14 @@ def update_wrapper(wrapper, wrapped, *a, **ka): except AttributeError: pass - # These helpers are used at module level and need to be defined first. # And yes, I know PEP-8, but sometimes a lower-case classname makes more sense. def depr(major, minor, cause, fix, stacklevel=3): - text = 'Warning: Use of deprecated feature or API. (Deprecated in Bottle-%d.%d)\nCause: %s\nFix: %s\n' % ( - major, - minor, - cause, - fix, - ) + text = "Warning: Use of deprecated feature or API. (Deprecated in Bottle-%d.%d)\n"\ + "Cause: %s\n"\ + "Fix: %s\n" % (major, minor, cause, fix) if DEBUG == 'strict': raise DeprecationWarning(text) warnings.warn(text, DeprecationWarning, stacklevel=stacklevel) @@ -239,7 +198,7 @@ def makelist(data): # This is just too handy class DictProperty(object): - """Property that maps to a key in a local dict-like attribute.""" + """ Property that maps to a key in a local dict-like attribute. """ def __init__(self, attr, key=None, read_only=False): self.attr, self.key, self.read_only = attr, key, read_only @@ -250,42 +209,37 @@ def __call__(self, func): return self def __get__(self, obj, cls): - if obj is None: - return self + if obj is None: return self key, storage = self.key, getattr(obj, self.attr) - if key not in storage: - storage[key] = self.getter(obj) + if key not in storage: storage[key] = self.getter(obj) return storage[key] def __set__(self, obj, value): - if self.read_only: - raise AttributeError('Read-Only property.') + if self.read_only: raise AttributeError("Read-Only property.") getattr(obj, self.attr)[self.key] = value def __delete__(self, obj): - if self.read_only: - raise AttributeError('Read-Only property.') + if self.read_only: raise AttributeError("Read-Only property.") del getattr(obj, self.attr)[self.key] class cached_property(object): - """A property that is only computed once per instance and then replaces - itself with an ordinary attribute. Deleting the attribute resets the - property.""" + """ A property that is only computed once per instance and then replaces + itself with an ordinary attribute. Deleting the attribute resets the + property. """ def __init__(self, func): update_wrapper(self, func) self.func = func def __get__(self, obj, cls): - if obj is None: - return self + if obj is None: return self value = obj.__dict__[self.func.__name__] = self.func(obj) return value class lazy_attribute(object): - """A property that caches itself to the class object.""" + """ A property that caches itself to the class object. """ def __init__(self, func): functools.update_wrapper(self, func, updated=[]) @@ -303,57 +257,55 @@ def __get__(self, obj, cls): class BottleException(Exception): - """A base class for exceptions used by bottle.""" - + """ A base class for exceptions used by bottle. """ pass - ############################################################################### # Routing ###################################################################### ############################################################################### class RouteError(BottleException): - """This is a base class for all routing related exceptions""" + """ This is a base class for all routing related exceptions """ class RouteReset(BottleException): - """If raised by a plugin or request handler, the route is reset and all - plugins are re-applied.""" + """ If raised by a plugin or request handler, the route is reset and all + plugins are re-applied. """ class RouterUnknownModeError(RouteError): + pass class RouteSyntaxError(RouteError): - """The route parser found something not supported by this router.""" + """ The route parser found something not supported by this router. """ class RouteBuildError(RouteError): - """The route could not be built.""" + """ The route could not be built. """ def _re_flatten(p): - """Turn all capturing groups in a regular expression pattern into - non-capturing groups.""" + """ Turn all capturing groups in a regular expression pattern into + non-capturing groups. """ if '(' not in p: return p - return re.sub( - r'(\\*)(\(\?P<[^>]+>|\((?!\?))', lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p - ) + return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))', lambda m: m.group(0) if + len(m.group(1)) % 2 else m.group(1) + '(?:', p) class Router(object): - """A Router is an ordered collection of route->target pairs. It is used to - efficiently match WSGI requests against a number of routes and return - the first target that satisfies the request. The target may be anything, - usually a string, ID or callable object. A route consists of a path-rule - and a HTTP method. - - The path-rule is either a static path (e.g. `/contact`) or a dynamic - path that contains wildcards (e.g. `/wiki/`). The wildcard syntax - and details on the matching order are described in docs:`routing`. + """ A Router is an ordered collection of route->target pairs. It is used to + efficiently match WSGI requests against a number of routes and return + the first target that satisfies the request. The target may be anything, + usually a string, ID or callable object. A route consists of a path-rule + and a HTTP method. + + The path-rule is either a static path (e.g. `/contact`) or a dynamic + path that contains wildcards (e.g. `/wiki/`). The wildcard syntax + and details on the matching order are described in docs:`routing`. """ default_pattern = '[^/]+' @@ -373,34 +325,35 @@ def __init__(self, strict=False): #: If true, static routes are no longer checked first. self.strict_order = strict self.filters = { - 're': lambda conf: (_re_flatten(conf or self.default_pattern), None, None), + 're': lambda conf: (_re_flatten(conf or self.default_pattern), + None, None), 'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))), 'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))), - 'path': lambda conf: (r'.+?', None, None), + 'path': lambda conf: (r'.+?', None, None) } def add_filter(self, name, func): - """Add a filter. The provided function is called with the configuration + """ Add a filter. The provided function is called with the configuration string as parameter and must return a (regexp, to_python, to_url) tuple. - The first element is a string, the last two are callables or None.""" + The first element is a string, the last two are callables or None. """ self.filters[name] = func - rule_syntax = re.compile( - '(\\\\*)' + rule_syntax = re.compile('(\\\\*)' '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)' - '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)' - '(?::((?:\\\\.|[^\\\\>])+)?)?)?>))' - ) + '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)' + '(?::((?:\\\\.|[^\\\\>])+)?)?)?>))') def _itertokens(self, rule): offset, prefix = 0, '' for match in self.rule_syntax.finditer(rule): - prefix += rule[offset : match.start()] + prefix += rule[offset:match.start()] g = match.groups() if g[2] is not None: - depr(0, 13, 'Use of old route syntax.', 'Use instead of :name in routes.', stacklevel=4) + depr(0, 13, "Use of old route syntax.", + "Use instead of :name in routes.", + stacklevel=4) if len(g[0]) % 2: # Escaped wildcard - prefix += match.group(0)[len(g[0]) :] + prefix += match.group(0)[len(g[0]):] offset = match.end() continue if prefix: @@ -412,7 +365,7 @@ def _itertokens(self, rule): yield prefix + rule[offset:], None, None def add(self, rule, method, target, name=None): - """Add a new rule or replace the target for an existing rule.""" + """ Add a new rule or replace the target for an existing rule. """ anons = 0 # Number of anonymous wildcards found keys = [] # Names of keys pattern = '' # Regular expression pattern with named groups @@ -423,8 +376,7 @@ def add(self, rule, method, target, name=None): for key, mode, conf in self._itertokens(rule): if mode: is_static = False - if mode == 'default': - mode = self.default_filter + if mode == 'default': mode = self.default_filter mask, in_filter, out_filter = self.filters[mode](conf) if not key: pattern += '(?:%s)' % mask @@ -433,16 +385,14 @@ def add(self, rule, method, target, name=None): else: pattern += '(?P<%s>%s)' % (key, mask) keys.append(key) - if in_filter: - filters.append((key, in_filter)) + if in_filter: filters.append((key, in_filter)) builder.append((key, out_filter or str)) elif key: pattern += re.escape(key) builder.append((None, key)) self.builder[rule] = builder - if name: - self.builder[name] = builder + if name: self.builder[name] = builder if is_static and not self.strict_order: self.static.setdefault(method, {}) @@ -453,7 +403,7 @@ def add(self, rule, method, target, name=None): re_pattern = re.compile('^(%s)$' % pattern) re_match = re_pattern.match except re.error as e: - raise RouteSyntaxError('Could not add Route: %s (%s)' % (rule, e)) + raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, e)) if filters: @@ -479,7 +429,8 @@ def getargs(path): if DEBUG: msg = 'Route <%s %s> overwrites a previously defined route' warnings.warn(msg % (method, rule), RuntimeWarning, stacklevel=3) - self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule + self.dyna_routes[method][ + self._groups[flatpat, method]] = whole_rule else: self.dyna_routes.setdefault(method, []).append(whole_rule) self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1 @@ -491,7 +442,7 @@ def _compile(self, method): comborules = self.dyna_regexes[method] = [] maxgroups = self._MAX_GROUPS_PER_PATTERN for x in range(0, len(all_rules), maxgroups): - some = all_rules[x : x + maxgroups] + some = all_rules[x:x + maxgroups] combined = (flatpat for (_, flatpat, _, _) in some) combined = '|'.join('(^%s$)' % flatpat for flatpat in combined) combined = re.compile(combined).match @@ -499,10 +450,10 @@ def _compile(self, method): comborules.append((combined, rules)) def build(self, _name, *anons, **query): - """Build an URL by filling the wildcards in a rule.""" + """ Build an URL by filling the wildcards in a rule. """ builder = self.builder.get(_name) if not builder: - raise RouteBuildError('No route with that name.', _name) + raise RouteBuildError("No route with that name.", _name) try: for i, value in enumerate(anons): query['anon%d' % i] = value @@ -512,7 +463,7 @@ def build(self, _name, *anons, **query): raise RouteBuildError('Missing URL argument: %r' % E.args[0]) def match(self, environ): - """Return a (target, url_args) tuple or raise HTTPError(400/404/405).""" + """ Return a (target, url_args) tuple or raise HTTPError(400/404/405). """ verb = environ['REQUEST_METHOD'].upper() path = environ['PATH_INFO'] or '/' @@ -541,20 +492,23 @@ def match(self, environ): if match: allowed.add(method) if allowed: - allow_header = ','.join(sorted(allowed)) - raise HTTPError(405, 'Method not allowed.', Allow=allow_header) + allow_header = ",".join(sorted(allowed)) + raise HTTPError(405, "Method not allowed.", Allow=allow_header) # No matching route and no alternative method found. We give up - raise HTTPError(404, 'Not found: ' + repr(path)) + raise HTTPError(404, "Not found: " + repr(path)) class Route(object): - """This class wraps a route callback along with route specific metadata and - configuration and applies Plugins on demand. It is also responsible for - turning an URL path rule into a regular expression usable by the Router. + """ This class wraps a route callback along with route specific metadata and + configuration and applies Plugins on demand. It is also responsible for + turning an URL path rule into a regular expression usable by the Router. """ - def __init__(self, app, rule, method, callback, name=None, plugins=None, skiplist=None, **config): + def __init__(self, app, rule, method, callback, + name=None, + plugins=None, + skiplist=None, **config): #: The application this route is installed to. self.app = app #: The path-rule string (e.g. ``/wiki/``). @@ -577,32 +531,28 @@ def __init__(self, app, rule, method, callback, name=None, plugins=None, skiplis @cached_property def call(self): - """The route callback with all plugins applied. This property is - created on demand and then cached to speed up subsequent requests.""" + """ The route callback with all plugins applied. This property is + created on demand and then cached to speed up subsequent requests.""" return self._make_callback() def reset(self): - """Forget any cached values. The next time :attr:`call` is accessed, - all plugins are re-applied.""" + """ Forget any cached values. The next time :attr:`call` is accessed, + all plugins are re-applied. """ self.__dict__.pop('call', None) def prepare(self): - """Do all on-demand work immediately (useful for debugging).""" + """ Do all on-demand work immediately (useful for debugging).""" self.call def all_plugins(self): - """Yield all Plugins affecting this route.""" + """ Yield all Plugins affecting this route. """ unique = set() for p in reversed(self.app.plugins + self.plugins): - if True in self.skiplist: - break + if True in self.skiplist: break name = getattr(p, 'name', False) - if name and (name in self.skiplist or name in unique): - continue - if p in self.skiplist or type(p) in self.skiplist: - continue - if name: - unique.add(name) + if name and (name in self.skiplist or name in unique): continue + if p in self.skiplist or type(p) in self.skiplist: continue + if name: unique.add(name) yield p def _make_callback(self): @@ -620,8 +570,8 @@ def _make_callback(self): return callback def get_undecorated_callback(self): - """Return the callback. If the callback is a decorated function, try to - recover the original function.""" + """ Return the callback. If the callback is a decorated function, try to + recover the original function. """ func = self.callback func = getattr(func, '__func__' if py3k else 'im_func', func) closure_attr = '__closure__' if py3k else 'func_closure' @@ -632,45 +582,41 @@ def get_undecorated_callback(self): # in case of decorators with multiple arguments if not isinstance(func, FunctionType): # pick first FunctionType instance from multiple arguments - func = filter(lambda x: isinstance(x, FunctionType), map(lambda x: x.cell_contents, attributes)) + func = filter(lambda x: isinstance(x, FunctionType), + map(lambda x: x.cell_contents, attributes)) func = list(func)[0] # py3 support return func def get_callback_args(self): - """Return a list of argument names the callback (most likely) accepts - as keyword arguments. If the callback is a decorated function, try - to recover the original function before inspection.""" + """ Return a list of argument names the callback (most likely) accepts + as keyword arguments. If the callback is a decorated function, try + to recover the original function before inspection. """ return getargspec(self.get_undecorated_callback())[0] def get_config(self, key, default=None): - """Lookup a config field and return its value, first checking the - route.config, then route.app.config.""" - depr( - 0, - 13, - 'Route.get_config() is deprecated.', - 'The Route.config property already includes values from the' - ' application config for missing keys. Access it directly.', - ) + """ Lookup a config field and return its value, first checking the + route.config, then route.app.config.""" + depr(0, 13, "Route.get_config() is deprecated.", + "The Route.config property already includes values from the" + " application config for missing keys. Access it directly.") return self.config.get(key, default) def __repr__(self): cb = self.get_undecorated_callback() return '<%s %s -> %s:%s>' % (self.method, self.rule, cb.__module__, cb.__name__) - ############################################################################### # Application Object ########################################################### ############################################################################### class Bottle(object): - """Each Bottle object represents a single, distinct web application and - consists of routes, callbacks, plugins, resources and configuration. - Instances are callable WSGI applications. + """ Each Bottle object represents a single, distinct web application and + consists of routes, callbacks, plugins, resources and configuration. + Instances are callable WSGI applications. - :param catchall: If true (default), handle all exceptions. Turn off to - let debugging middleware handle exceptions. + :param catchall: If true (default), handle all exceptions. Turn off to + let debugging middleware handle exceptions. """ @lazy_attribute @@ -682,25 +628,22 @@ def _global_config(cls): def __init__(self, **kwargs): #: A :class:`ConfigDict` for app specific configuration. self.config = self._global_config._make_overlay() - self.config._add_change_listener(functools.partial(self.trigger_hook, 'config')) + self.config._add_change_listener( + functools.partial(self.trigger_hook, 'config')) - self.config.update({'catchall': True}) + self.config.update({ + "catchall": True + }) if kwargs.get('catchall') is False: - depr( - 0, - 13, - 'Bottle(catchall) keyword argument.', - "The 'catchall' setting is now part of the app configuration. Fix: `app.config['catchall'] = False`", - ) + depr(0, 13, "Bottle(catchall) keyword argument.", + "The 'catchall' setting is now part of the app " + "configuration. Fix: `app.config['catchall'] = False`") self.config['catchall'] = False if kwargs.get('autojson') is False: - depr( - 0, - 13, - 'Bottle(autojson) keyword argument.', - "The 'autojson' setting is now part of the app configuration. Fix: `app.config['json.enable'] = False`", - ) + depr(0, 13, "Bottle(autojson) keyword argument.", + "The 'autojson' setting is now part of the app " + "configuration. Fix: `app.config['json.enable'] = False`") self.config['json.disable'] = True self._mounts = [] @@ -728,15 +671,15 @@ def _hooks(self): return dict((name, []) for name in self.__hook_names) def add_hook(self, name, func): - """Attach a callback to a hook. Three hooks are currently implemented: - - before_request - Executed once before each request. The request context is - available, but no routing has happened yet. - after_request - Executed once after each request regardless of its outcome. - app_reset - Called whenever :meth:`Bottle.reset` is called. + """ Attach a callback to a hook. Three hooks are currently implemented: + + before_request + Executed once before each request. The request context is + available, but no routing has happened yet. + after_request + Executed once after each request regardless of its outcome. + app_reset + Called whenever :meth:`Bottle.reset` is called. """ if name in self.__hook_reversed: self._hooks[name].insert(0, func) @@ -744,18 +687,18 @@ def add_hook(self, name, func): self._hooks[name].append(func) def remove_hook(self, name, func): - """Remove a callback from a hook.""" + """ Remove a callback from a hook. """ if name in self._hooks and func in self._hooks[name]: self._hooks[name].remove(func) return True def trigger_hook(self, __name, *args, **kwargs): - """Trigger a hook and return a list of results.""" + """ Trigger a hook and return a list of results. """ return [hook(*args, **kwargs) for hook in self._hooks[__name][:]] def hook(self, name): - """Return a decorator that attaches a callback to a hook. See - :meth:`add_hook` for details.""" + """ Return a decorator that attaches a callback to a hook. See + :meth:`add_hook` for details.""" def decorator(func): self.add_hook(name, func) @@ -782,7 +725,8 @@ def start_response(status, headerlist, exc_info=None): # follow PEP-3333 (which requires latin1) or used a # pre-encoding other than utf8 :/ status = status.encode('latin1').decode('utf8') - headerlist = [(k, v.encode('latin1').decode('utf8')) for (k, v) in headerlist] + headerlist = [(k, v.encode('latin1').decode('utf8')) + for (k, v) in headerlist] rs.status = status for name, value in headerlist: rs.add_header(name, value) @@ -805,30 +749,18 @@ def start_response(status, headerlist, exc_info=None): def _mount_app(self, prefix, app, **options): if app in self._mounts or '_mount.app' in app.config: - depr( - 0, - 13, - 'Application mounted multiple times. Falling back to WSGI mount.', - 'Clone application before mounting to a different location.', - ) + depr(0, 13, "Application mounted multiple times. Falling back to WSGI mount.", + "Clone application before mounting to a different location.") return self._mount_wsgi(prefix, app, **options) if options: - depr( - 0, - 13, - 'Unsupported mount options. Falling back to WSGI mount.', - 'Do not specify any route options when mounting bottle application.', - ) + depr(0, 13, "Unsupported mount options. Falling back to WSGI mount.", + "Do not specify any route options when mounting bottle application.") return self._mount_wsgi(prefix, app, **options) - if not prefix.endswith('/'): - depr( - 0, - 13, - "Prefix must end in '/'. Falling back to WSGI mount.", - "Consider adding an explicit redirect from '/prefix' to '/prefix/' in the parent application.", - ) + if not prefix.endswith("/"): + depr(0, 13, "Prefix must end in '/'. Falling back to WSGI mount.", + "Consider adding an explicit redirect from '/prefix' to '/prefix/' in the parent application.") return self._mount_wsgi(prefix, app, **options) self._mounts.append(app) @@ -839,25 +771,25 @@ def _mount_app(self, prefix, app, **options): self.add_route(route) def mount(self, prefix, app, **options): - """Mount an application (:class:`Bottle` or plain WSGI) to a specific - URL prefix. Example:: + """ Mount an application (:class:`Bottle` or plain WSGI) to a specific + URL prefix. Example:: - parent_app.mount('/prefix/', child_app) + parent_app.mount('/prefix/', child_app) - :param prefix: path prefix or `mount-point`. - :param app: an instance of :class:`Bottle` or a WSGI application. + :param prefix: path prefix or `mount-point`. + :param app: an instance of :class:`Bottle` or a WSGI application. - Plugins from the parent application are not applied to the routes - of the mounted child application. If you need plugins in the child - application, install them separately. + Plugins from the parent application are not applied to the routes + of the mounted child application. If you need plugins in the child + application, install them separately. - While it is possible to use path wildcards within the prefix path - (:class:`Bottle` childs only), it is highly discouraged. + While it is possible to use path wildcards within the prefix path + (:class:`Bottle` childs only), it is highly discouraged. - The prefix path must end with a slash. If you want to access the - root of the child application via `/prefix` in addition to - `/prefix/`, consider adding a route with a 307 redirect to the - parent application. + The prefix path must end with a slash. If you want to access the + root of the child application via `/prefix` in addition to + `/prefix/`, consider adding a route with a 307 redirect to the + parent application. """ if not prefix.startswith('/'): @@ -869,54 +801,49 @@ def mount(self, prefix, app, **options): return self._mount_wsgi(prefix, app, **options) def merge(self, routes): - """Merge the routes of another :class:`Bottle` application or a list of - :class:`Route` objects into this application. The routes keep their - 'owner', meaning that the :data:`Route.app` attribute is not - changed.""" + """ Merge the routes of another :class:`Bottle` application or a list of + :class:`Route` objects into this application. The routes keep their + 'owner', meaning that the :data:`Route.app` attribute is not + changed. """ if isinstance(routes, Bottle): routes = routes.routes for route in routes: self.add_route(route) def install(self, plugin): - """Add a plugin to the list of plugins and prepare it for being - applied to all routes of this application. A plugin may be a simple - decorator or an object that implements the :class:`Plugin` API. + """ Add a plugin to the list of plugins and prepare it for being + applied to all routes of this application. A plugin may be a simple + decorator or an object that implements the :class:`Plugin` API. """ - if hasattr(plugin, 'setup'): - plugin.setup(self) + if hasattr(plugin, 'setup'): plugin.setup(self) if not callable(plugin) and not hasattr(plugin, 'apply'): - raise TypeError('Plugins must be callable or implement .apply()') + raise TypeError("Plugins must be callable or implement .apply()") self.plugins.append(plugin) self.reset() return plugin def uninstall(self, plugin): - """Uninstall plugins. Pass an instance to remove a specific plugin, a type - object to remove all plugins that match that type, a string to remove - all plugins with a matching ``name`` attribute or ``True`` to remove all - plugins. Return the list of removed plugins.""" + """ Uninstall plugins. Pass an instance to remove a specific plugin, a type + object to remove all plugins that match that type, a string to remove + all plugins with a matching ``name`` attribute or ``True`` to remove all + plugins. Return the list of removed plugins. """ removed, remove = [], plugin for i, plugin in list(enumerate(self.plugins))[::-1]: - if remove is True or remove is plugin or remove is type(plugin) or getattr(plugin, 'name', True) == remove: + if remove is True or remove is plugin or remove is type(plugin) \ + or getattr(plugin, 'name', True) == remove: removed.append(plugin) del self.plugins[i] - if hasattr(plugin, 'close'): - plugin.close() - if removed: - self.reset() + if hasattr(plugin, 'close'): plugin.close() + if removed: self.reset() return removed def reset(self, route=None): - """Reset all routes (force plugins to be re-applied) and clear all - caches. If an ID or route object is given, only that specific route - is affected.""" - if route is None: - routes = self.routes - elif isinstance(route, Route): - routes = [route] - else: - routes = [self.routes[route]] + """ Reset all routes (force plugins to be re-applied) and clear all + caches. If an ID or route object is given, only that specific route + is affected. """ + if route is None: routes = self.routes + elif isinstance(route, Route): routes = [route] + else: routes = [self.routes[route]] for route in routes: route.reset() if DEBUG: @@ -925,116 +852,120 @@ def reset(self, route=None): self.trigger_hook('app_reset') def close(self): - """Close the application and all installed plugins.""" + """ Close the application and all installed plugins. """ for plugin in self.plugins: - if hasattr(plugin, 'close'): - plugin.close() + if hasattr(plugin, 'close'): plugin.close() def run(self, **kwargs): - """Calls :func:`run` with the same parameters.""" + """ Calls :func:`run` with the same parameters. """ run(self, **kwargs) def match(self, environ): - """Search for a matching route and return a (:class:`Route`, urlargs) - tuple. The second value is a dictionary with parameters extracted - from the URL. Raise :exc:`HTTPError` (404/405) on a non-match.""" + """ Search for a matching route and return a (:class:`Route`, urlargs) + tuple. The second value is a dictionary with parameters extracted + from the URL. Raise :exc:`HTTPError` (404/405) on a non-match.""" return self.router.match(environ) def get_url(self, routename, **kargs): - """Return a string that matches a named route""" + """ Return a string that matches a named route """ scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/' location = self.router.build(routename, **kargs).lstrip('/') return urljoin(urljoin('/', scriptname), location) def add_route(self, route): - """Add a route object, but do not change the :data:`Route.app` - attribute.""" + """ Add a route object, but do not change the :data:`Route.app` + attribute.""" self.routes.append(route) self.router.add(route.rule, route.method, route, name=route.name) - if DEBUG: - route.prepare() - - def route(self, path=None, method='GET', callback=None, name=None, apply=None, skip=None, **config): - """A decorator to bind a function to a request URL. Example:: - - @app.route('/hello/') - def hello(name): - return 'Hello %s' % name - - The ```` part is a wildcard. See :class:`Router` for syntax - details. - - :param path: Request path or a list of paths to listen to. If no - path is specified, it is automatically generated from the - signature of the function. - :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of - methods to listen to. (default: `GET`) - :param callback: An optional shortcut to avoid the decorator - syntax. ``route(..., callback=func)`` equals ``route(...)(func)`` - :param name: The name for this route. (default: None) - :param apply: A decorator or plugin or a list of plugins. These are - applied to the route callback in addition to installed plugins. - :param skip: A list of plugins, plugin classes or names. Matching - plugins are not installed to this route. ``True`` skips all. - - Any additional keyword arguments are stored as route-specific - configuration and passed to plugins (see :meth:`Plugin.apply`). + if DEBUG: route.prepare() + + def route(self, + path=None, + method='GET', + callback=None, + name=None, + apply=None, + skip=None, **config): + """ A decorator to bind a function to a request URL. Example:: + + @app.route('/hello/') + def hello(name): + return 'Hello %s' % name + + The ```` part is a wildcard. See :class:`Router` for syntax + details. + + :param path: Request path or a list of paths to listen to. If no + path is specified, it is automatically generated from the + signature of the function. + :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of + methods to listen to. (default: `GET`) + :param callback: An optional shortcut to avoid the decorator + syntax. ``route(..., callback=func)`` equals ``route(...)(func)`` + :param name: The name for this route. (default: None) + :param apply: A decorator or plugin or a list of plugins. These are + applied to the route callback in addition to installed plugins. + :param skip: A list of plugins, plugin classes or names. Matching + plugins are not installed to this route. ``True`` skips all. + + Any additional keyword arguments are stored as route-specific + configuration and passed to plugins (see :meth:`Plugin.apply`). """ - if callable(path): - path, callback = None, path + if callable(path): path, callback = None, path plugins = makelist(apply) skiplist = makelist(skip) def decorator(callback): - if isinstance(callback, basestring): - callback = load(callback) + if isinstance(callback, basestring): callback = load(callback) for rule in makelist(path) or yieldroutes(callback): for verb in makelist(method): verb = verb.upper() - route = Route(self, rule, verb, callback, name=name, plugins=plugins, skiplist=skiplist, **config) + route = Route(self, rule, verb, callback, + name=name, + plugins=plugins, + skiplist=skiplist, **config) self.add_route(route) return callback return decorator(callback) if callback else decorator def get(self, path=None, method='GET', **options): - """Equals :meth:`route`.""" + """ Equals :meth:`route`. """ return self.route(path, method, **options) def post(self, path=None, method='POST', **options): - """Equals :meth:`route` with a ``POST`` method parameter.""" + """ Equals :meth:`route` with a ``POST`` method parameter. """ return self.route(path, method, **options) def put(self, path=None, method='PUT', **options): - """Equals :meth:`route` with a ``PUT`` method parameter.""" + """ Equals :meth:`route` with a ``PUT`` method parameter. """ return self.route(path, method, **options) def delete(self, path=None, method='DELETE', **options): - """Equals :meth:`route` with a ``DELETE`` method parameter.""" + """ Equals :meth:`route` with a ``DELETE`` method parameter. """ return self.route(path, method, **options) def patch(self, path=None, method='PATCH', **options): - """Equals :meth:`route` with a ``PATCH`` method parameter.""" + """ Equals :meth:`route` with a ``PATCH`` method parameter. """ return self.route(path, method, **options) def error(self, code=500, callback=None): - """Register an output handler for a HTTP error code. Can - be used as a decorator or called directly :: + """ Register an output handler for a HTTP error code. Can + be used as a decorator or called directly :: - def error_handler_500(error): - return 'error_handler_500' + def error_handler_500(error): + return 'error_handler_500' - app.error(code=500, callback=error_handler_500) + app.error(code=500, callback=error_handler_500) - @app.error(404) - def error_handler_404(error): - return 'error_handler_404' + @app.error(404) + def error_handler_404(error): + return 'error_handler_404' """ def decorator(callback): - if isinstance(callback, basestring): - callback = load(callback) + if isinstance(callback, basestring): callback = load(callback) self.error_handler[int(code)] = callback return callback @@ -1053,7 +984,7 @@ def _handle(self, environ): response.bind() try: - while True: # Remove in 0.14 together with RouteReset + while True: # Remove in 0.14 together with RouteReset out = None try: self.trigger_hook('before_request') @@ -1067,12 +998,9 @@ def _handle(self, environ): out = E break except RouteReset: - depr( - 0, - 13, - 'RouteReset exception deprecated', - 'Call route.call() after route.reset() and return the result.', - ) + depr(0, 13, "RouteReset exception deprecated", + "Call route.call() after route.reset() and " + "return the result.") route.reset() continue finally: @@ -1086,19 +1014,18 @@ def _handle(self, environ): except (KeyboardInterrupt, SystemExit, MemoryError): raise except Exception as E: - if not self.catchall: - raise + if not self.catchall: raise stacktrace = format_exc() environ['wsgi.errors'].write(stacktrace) environ['wsgi.errors'].flush() environ['bottle.exc_info'] = sys.exc_info() - out = HTTPError(500, 'Internal Server Error', E, stacktrace) + out = HTTPError(500, "Internal Server Error", E, stacktrace) out.apply(response) return out def _cast(self, out, peek=None): - """Try to convert the parameter into something WSGI compatible and set + """ Try to convert the parameter into something WSGI compatible and set correct HTTP headers when possible. Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like, iterable of strings and iterable of unicodes @@ -1110,7 +1037,8 @@ def _cast(self, out, peek=None): response['Content-Length'] = 0 return [] # Join lists of byte or unicode strings. Mixed lists are NOT supported - if isinstance(out, (tuple, list)) and isinstance(out[0], (bytes, unicode)): + if isinstance(out, (tuple, list))\ + and isinstance(out[0], (bytes, unicode)): out = out[0][0:0].join(out) # b'abc'[0:0] -> b'' # Encode unicode strings if isinstance(out, unicode): @@ -1124,7 +1052,8 @@ def _cast(self, out, peek=None): # TODO: Handle these explicitly in handle() or make them iterable. if isinstance(out, HTTPError): out.apply(response) - out = self.error_handler.get(out.status_code, self.default_error_handler)(out) + out = self.error_handler.get(out.status_code, + self.default_error_handler)(out) return self._cast(out) if isinstance(out, HTTPResponse): out.apply(response) @@ -1150,8 +1079,7 @@ def _cast(self, out, peek=None): except (KeyboardInterrupt, SystemExit, MemoryError): raise except Exception as error: - if not self.catchall: - raise + if not self.catchall: raise first = HTTPError(500, 'Unhandled exception', error, format_exc()) # These are the inner types allowed in iterator or generator objects. @@ -1160,10 +1088,7 @@ def _cast(self, out, peek=None): elif isinstance(first, bytes): new_iter = itertools.chain([first], iout) elif isinstance(first, unicode): - - def encoder(x): - return x.encode(response.charset) - + encoder = lambda x: x.encode(response.charset) new_iter = imap(encoder, itertools.chain([first], iout)) else: msg = 'Unsupported response type: %s' % type(first) @@ -1173,13 +1098,13 @@ def encoder(x): return new_iter def wsgi(self, environ, start_response): - """The bottle WSGI-interface.""" + """ The bottle WSGI-interface. """ try: out = self._cast(self._handle(environ)) # rfc2616 section 4.3 - if response._status_code in (100, 101, 204, 304) or environ['REQUEST_METHOD'] == 'HEAD': - if hasattr(out, 'close'): - out.close() + if response._status_code in (100, 101, 204, 304)\ + or environ['REQUEST_METHOD'] == 'HEAD': + if hasattr(out, 'close'): out.close() out = [] exc_info = environ.get('bottle.exc_info') if exc_info is not None: @@ -1189,14 +1114,13 @@ def wsgi(self, environ, start_response): except (KeyboardInterrupt, SystemExit, MemoryError): raise except Exception as E: - if not self.catchall: - raise - err = '

    Critical error while processing request: %s

    ' % html_escape(environ.get('PATH_INFO', '/')) + if not self.catchall: raise + err = '

    Critical error while processing request: %s

    ' \ + % html_escape(environ.get('PATH_INFO', '/')) if DEBUG: - err += '

    Error:

    \n
    \n%s\n
    \n

    Traceback:

    \n
    \n%s\n
    \n' % ( - html_escape(repr(E)), - html_escape(format_exc()), - ) + err += '

    Error:

    \n
    \n%s\n
    \n' \ + '

    Traceback:

    \n
    \n%s\n
    \n' \ + % (html_escape(repr(E)), html_escape(format_exc())) environ['wsgi.errors'].write(err) environ['wsgi.errors'].flush() headers = [('Content-Type', 'text/html; charset=UTF-8')] @@ -1204,11 +1128,11 @@ def wsgi(self, environ, start_response): return [tob(err)] def __call__(self, environ, start_response): - """Each instance of :class:'Bottle' is a WSGI application.""" + """ Each instance of :class:'Bottle' is a WSGI application. """ return self.wsgi(environ, start_response) def __enter__(self): - """Use this application as default for all module-level shortcuts.""" + """ Use this application as default for all module-level shortcuts. """ default_app.push(self) return self @@ -1217,31 +1141,30 @@ def __exit__(self, exc_type, exc_value, traceback): def __setattr__(self, name, value): if name in self.__dict__: - raise AttributeError('Attribute %s already defined. Plugin conflict?' % name) + raise AttributeError("Attribute %s already defined. Plugin conflict?" % name) object.__setattr__(self, name, value) - ############################################################################### # HTTP and WSGI Tools ########################################################## ############################################################################### class BaseRequest(object): - """A wrapper for WSGI environment dictionaries that adds a lot of - convenient access methods and properties. Most of them are read-only. + """ A wrapper for WSGI environment dictionaries that adds a lot of + convenient access methods and properties. Most of them are read-only. - Adding new attributes to a request actually adds them to the environ - dictionary (as 'bottle.request.ext.'). This is the recommended - way to store and access request-specific data. + Adding new attributes to a request actually adds them to the environ + dictionary (as 'bottle.request.ext.'). This is the recommended + way to store and access request-specific data. """ - __slots__ = ('environ',) + __slots__ = ('environ', ) #: Maximum size of memory buffer for :attr:`body` in bytes. MEMFILE_MAX = 102400 def __init__(self, environ=None): - """Wrap a WSGI environ dictionary.""" + """ Wrap a WSGI environ dictionary. """ #: The wrapped WSGI environ dictionary. This is the only real attribute. #: All other attributes actually are read-only properties. self.environ = {} if environ is None else environ @@ -1249,52 +1172,52 @@ def __init__(self, environ=None): @DictProperty('environ', 'bottle.app', read_only=True) def app(self): - """Bottle application handling this request.""" + """ Bottle application handling this request. """ raise RuntimeError('This request is not connected to an application.') @DictProperty('environ', 'bottle.route', read_only=True) def route(self): - """The bottle :class:`Route` object that matches this request.""" + """ The bottle :class:`Route` object that matches this request. """ raise RuntimeError('This request is not connected to a route.') @DictProperty('environ', 'route.url_args', read_only=True) def url_args(self): - """The arguments extracted from the URL.""" + """ The arguments extracted from the URL. """ raise RuntimeError('This request is not connected to a route.') @property def path(self): - """The value of ``PATH_INFO`` with exactly one prefixed slash (to fix - broken clients and avoid the "empty path" edge case).""" + """ The value of ``PATH_INFO`` with exactly one prefixed slash (to fix + broken clients and avoid the "empty path" edge case). """ return '/' + self.environ.get('PATH_INFO', '').lstrip('/') @property def method(self): - """The ``REQUEST_METHOD`` value as an uppercase string.""" + """ The ``REQUEST_METHOD`` value as an uppercase string. """ return self.environ.get('REQUEST_METHOD', 'GET').upper() @DictProperty('environ', 'bottle.request.headers', read_only=True) def headers(self): - """A :class:`WSGIHeaderDict` that provides case-insensitive access to - HTTP request headers.""" + """ A :class:`WSGIHeaderDict` that provides case-insensitive access to + HTTP request headers. """ return WSGIHeaderDict(self.environ) def get_header(self, name, default=None): - """Return the value of a request header, or a given default value.""" + """ Return the value of a request header, or a given default value. """ return self.headers.get(name, default) @DictProperty('environ', 'bottle.request.cookies', read_only=True) def cookies(self): - """Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT - decoded. Use :meth:`get_cookie` if you expect signed cookies.""" + """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT + decoded. Use :meth:`get_cookie` if you expect signed cookies. """ cookies = SimpleCookie(self.environ.get('HTTP_COOKIE', '')).values() return FormsDict((c.key, c.value) for c in cookies) def get_cookie(self, key, default=None, secret=None, digestmod=hashlib.sha256): - """Return the content of a cookie. To read a `Signed Cookie`, the - `secret` must match the one used to create the cookie (see - :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing - cookie or wrong signature), return a default value.""" + """ Return the content of a cookie. To read a `Signed Cookie`, the + `secret` must match the one used to create the cookie (see + :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing + cookie or wrong signature), return a default value. """ value = self.cookies.get(key) if secret: # See BaseResponse.set_cookie for details on signed cookies. @@ -1310,10 +1233,10 @@ def get_cookie(self, key, default=None, secret=None, digestmod=hashlib.sha256): @DictProperty('environ', 'bottle.request.query', read_only=True) def query(self): - """The :attr:`query_string` parsed into a :class:`FormsDict`. These - values are sometimes called "URL arguments" or "GET parameters", but - not to be confused with "URL wildcards" as they are provided by the - :class:`Router`.""" + """ The :attr:`query_string` parsed into a :class:`FormsDict`. These + values are sometimes called "URL arguments" or "GET parameters", but + not to be confused with "URL wildcards" as they are provided by the + :class:`Router`. """ get = self.environ['bottle.get'] = FormsDict() pairs = _parse_qsl(self.environ.get('QUERY_STRING', '')) for key, value in pairs: @@ -1322,10 +1245,10 @@ def query(self): @DictProperty('environ', 'bottle.request.forms', read_only=True) def forms(self): - """Form values parsed from an `url-encoded` or `multipart/form-data` - encoded POST or PUT request body. The result is returned as a - :class:`FormsDict`. All keys and values are strings. File uploads - are stored separately in :attr:`files`.""" + """ Form values parsed from an `url-encoded` or `multipart/form-data` + encoded POST or PUT request body. The result is returned as a + :class:`FormsDict`. All keys and values are strings. File uploads + are stored separately in :attr:`files`. """ forms = FormsDict() forms.recode_unicode = self.POST.recode_unicode for name, item in self.POST.allitems(): @@ -1335,8 +1258,8 @@ def forms(self): @DictProperty('environ', 'bottle.request.params', read_only=True) def params(self): - """A :class:`FormsDict` with the combined values of :attr:`query` and - :attr:`forms`. File uploads are stored in :attr:`files`.""" + """ A :class:`FormsDict` with the combined values of :attr:`query` and + :attr:`forms`. File uploads are stored in :attr:`files`. """ params = FormsDict() for key, value in self.query.allitems(): params[key] = value @@ -1346,8 +1269,8 @@ def params(self): @DictProperty('environ', 'bottle.request.files', read_only=True) def files(self): - """File uploads parsed from `multipart/form-data` encoded POST or PUT - request body. The values are instances of :class:`FileUpload`. + """ File uploads parsed from `multipart/form-data` encoded POST or PUT + request body. The values are instances of :class:`FileUpload`. """ files = FormsDict() @@ -1359,11 +1282,11 @@ def files(self): @DictProperty('environ', 'bottle.request.json', read_only=True) def json(self): - """If the ``Content-Type`` header is ``application/json`` or - ``application/json-rpc``, this property holds the parsed content - of the request body. Only requests smaller than :attr:`MEMFILE_MAX` - are processed to avoid memory exhaustion. - Invalid JSON raises a 400 error response. + """ If the ``Content-Type`` header is ``application/json`` or + ``application/json-rpc``, this property holds the parsed content + of the request body. Only requests smaller than :attr:`MEMFILE_MAX` + are processed to avoid memory exhaustion. + Invalid JSON raises a 400 error response. """ ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0] if ctype in ('application/json', 'application/json-rpc'): @@ -1380,8 +1303,7 @@ def _iter_body(self, read, bufsize): maxread = max(0, self.content_length) while maxread: part = read(min(maxread, bufsize)) - if not part: - break + if not part: break yield part maxread -= len(part) @@ -1394,24 +1316,20 @@ def _iter_chunked(read, bufsize): while header[-2:] != rn: c = read(1) header += c - if not c: - raise err - if len(header) > bufsize: - raise err + if not c: raise err + if len(header) > bufsize: raise err size, _, _ = header.partition(sem) try: maxread = int(tonat(size.strip()), 16) except ValueError: raise err - if maxread == 0: - break + if maxread == 0: break buff = bs while maxread > 0: if not buff: buff = read(min(maxread, bufsize)) part, buff = buff[:maxread], buff[maxread:] - if not part: - raise err + if not part: raise err yield part maxread -= len(part) if read(2) != rn: @@ -1439,8 +1357,8 @@ def _body(self): return body def _get_body_string(self, maxread): - """Read body into a string. Raise HTTPError(413) on requests that are - too large.""" + """ Read body into a string. Raise HTTPError(413) on requests that are + too large. """ if self.content_length > maxread: raise HTTPError(413, 'Request entity too large') data = self.body.read(maxread + 1) @@ -1450,27 +1368,28 @@ def _get_body_string(self, maxread): @property def body(self): - """The HTTP request body as a seek-able file-like object. Depending on - :attr:`MEMFILE_MAX`, this is either a temporary file or a - :class:`io.BytesIO` instance. Accessing this property for the first - time reads and replaces the ``wsgi.input`` environ variable. - Subsequent accesses just do a `seek(0)` on the file object.""" + """ The HTTP request body as a seek-able file-like object. Depending on + :attr:`MEMFILE_MAX`, this is either a temporary file or a + :class:`io.BytesIO` instance. Accessing this property for the first + time reads and replaces the ``wsgi.input`` environ variable. + Subsequent accesses just do a `seek(0)` on the file object. """ self._body.seek(0) return self._body @property def chunked(self): - """True if Chunked transfer encoding was.""" - return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower() + """ True if Chunked transfer encoding was. """ + return 'chunked' in self.environ.get( + 'HTTP_TRANSFER_ENCODING', '').lower() #: An alias for :attr:`query`. GET = query @DictProperty('environ', 'bottle.request.post', read_only=True) def POST(self): - """The values of :attr:`forms` and :attr:`files` combined into a single - :class:`FormsDict`. Values are either strings (form values) or - instances of :class:`FileUpload`. + """ The values of :attr:`forms` and :attr:`files` combined into a single + :class:`FormsDict`. Values are either strings (form values) or + instances of :class:`FileUpload`. """ post = FormsDict() content_type = self.environ.get('CONTENT_TYPE', '') @@ -1484,43 +1403,40 @@ def POST(self): return post post.recode_unicode = False - charset = options.get('charset', 'utf8') - boundary = options.get('boundary') + charset = options.get("charset", "utf8") + boundary = options.get("boundary") if not boundary: - raise MultipartError('Invalid content type header, missing boundary') - parser = _MultipartParser( - self.body, - boundary, - self.content_length, - mem_limit=self.MEMFILE_MAX, - memfile_limit=self.MEMFILE_MAX, - charset=charset, - ) + raise MultipartError("Invalid content type header, missing boundary") + parser = _MultipartParser(self.body, boundary, self.content_length, + mem_limit=self.MEMFILE_MAX, memfile_limit=self.MEMFILE_MAX, + charset=charset) for part in parser.parse(): if not part.filename and part.is_buffered(): post[part.name] = tonat(part.value, 'utf8') else: - post[part.name] = FileUpload(part.file, part.name, part.filename, part.headerlist) + post[part.name] = FileUpload(part.file, part.name, + part.filename, part.headerlist) return post @property def url(self): - """The full request URI including hostname and scheme. If your app - lives behind a reverse proxy or load balancer and you get confusing - results, make sure that the ``X-Forwarded-Host`` header is set - correctly.""" + """ The full request URI including hostname and scheme. If your app + lives behind a reverse proxy or load balancer and you get confusing + results, make sure that the ``X-Forwarded-Host`` header is set + correctly. """ return self.urlparts.geturl() @DictProperty('environ', 'bottle.request.urlparts', read_only=True) def urlparts(self): - """The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. - The tuple contains (scheme, host, path, query_string and fragment), - but the fragment is always empty because it is not visible to the - server.""" + """ The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. + The tuple contains (scheme, host, path, query_string and fragment), + but the fragment is always empty because it is not visible to the + server. """ env = self.environ - http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http') + http = env.get('HTTP_X_FORWARDED_PROTO') \ + or env.get('wsgi.url_scheme', 'http') host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST') if not host: # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients. @@ -1533,96 +1449,93 @@ def urlparts(self): @property def fullpath(self): - """Request path including :attr:`script_name` (if present).""" + """ Request path including :attr:`script_name` (if present). """ return urljoin(self.script_name, self.path.lstrip('/')) @property def query_string(self): - """The raw :attr:`query` part of the URL (everything in between ``?`` - and ``#``) as a string.""" + """ The raw :attr:`query` part of the URL (everything in between ``?`` + and ``#``) as a string. """ return self.environ.get('QUERY_STRING', '') @property def script_name(self): - """The initial portion of the URL's `path` that was removed by a higher - level (server or routing middleware) before the application was - called. This script path is returned with leading and tailing - slashes.""" + """ The initial portion of the URL's `path` that was removed by a higher + level (server or routing middleware) before the application was + called. This script path is returned with leading and tailing + slashes. """ script_name = self.environ.get('SCRIPT_NAME', '').strip('/') return '/' + script_name + '/' if script_name else '/' def path_shift(self, shift=1): - """Shift path segments from :attr:`path` to :attr:`script_name` and - vice versa. + """ Shift path segments from :attr:`path` to :attr:`script_name` and + vice versa. - :param shift: The number of path segments to shift. May be negative - to change the shift direction. (default: 1) + :param shift: The number of path segments to shift. May be negative + to change the shift direction. (default: 1) """ script, path = path_shift(self.environ.get('SCRIPT_NAME', '/'), self.path, shift) self['SCRIPT_NAME'], self['PATH_INFO'] = script, path @property def content_length(self): - """The request body length as an integer. The client is responsible to - set this header. Otherwise, the real length of the body is unknown - and -1 is returned. In this case, :attr:`body` will be empty.""" + """ The request body length as an integer. The client is responsible to + set this header. Otherwise, the real length of the body is unknown + and -1 is returned. In this case, :attr:`body` will be empty. """ return int(self.environ.get('CONTENT_LENGTH') or -1) @property def content_type(self): - """The Content-Type header as a lowercase-string (default: empty).""" + """ The Content-Type header as a lowercase-string (default: empty). """ return self.environ.get('CONTENT_TYPE', '').lower() @property def is_xhr(self): - """True if the request was triggered by a XMLHttpRequest. This only - works with JavaScript libraries that support the `X-Requested-With` - header (most of the popular libraries do).""" + """ True if the request was triggered by a XMLHttpRequest. This only + works with JavaScript libraries that support the `X-Requested-With` + header (most of the popular libraries do). """ requested_with = self.environ.get('HTTP_X_REQUESTED_WITH', '') return requested_with.lower() == 'xmlhttprequest' @property def is_ajax(self): - """Alias for :attr:`is_xhr`. "Ajax" is not the right term.""" + """ Alias for :attr:`is_xhr`. "Ajax" is not the right term. """ return self.is_xhr @property def auth(self): - """HTTP authentication data as a (user, password) tuple. This - implementation currently supports basic (not digest) authentication - only. If the authentication happened at a higher level (e.g. in the - front web-server or a middleware), the password field is None, but - the user field is looked up from the ``REMOTE_USER`` environ - variable. On any errors, None is returned.""" + """ HTTP authentication data as a (user, password) tuple. This + implementation currently supports basic (not digest) authentication + only. If the authentication happened at a higher level (e.g. in the + front web-server or a middleware), the password field is None, but + the user field is looked up from the ``REMOTE_USER`` environ + variable. On any errors, None is returned. """ basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION', '')) - if basic: - return basic + if basic: return basic ruser = self.environ.get('REMOTE_USER') - if ruser: - return (ruser, None) + if ruser: return (ruser, None) return None @property def remote_route(self): - """A list of all IPs that were involved in this request, starting with - the client IP and followed by zero or more proxies. This does only - work if all proxies support the ```X-Forwarded-For`` header. Note - that this information can be forged by malicious clients.""" + """ A list of all IPs that were involved in this request, starting with + the client IP and followed by zero or more proxies. This does only + work if all proxies support the ```X-Forwarded-For`` header. Note + that this information can be forged by malicious clients. """ proxy = self.environ.get('HTTP_X_FORWARDED_FOR') - if proxy: - return [ip.strip() for ip in proxy.split(',')] + if proxy: return [ip.strip() for ip in proxy.split(',')] remote = self.environ.get('REMOTE_ADDR') return [remote] if remote else [] @property def remote_addr(self): - """The client IP as a string. Note that this information can be forged - by malicious clients.""" + """ The client IP as a string. Note that this information can be forged + by malicious clients. """ route = self.remote_route return route[0] if route else None def copy(self): - """Return a new :class:`Request` with a shallow :attr:`environ` copy.""" + """ Return a new :class:`Request` with a shallow :attr:`environ` copy. """ return Request(self.environ.copy()) def get(self, value, default=None): @@ -1632,8 +1545,8 @@ def __getitem__(self, key): return self.environ[key] def __delitem__(self, key): - self[key] = '' - del self.environ[key] + self[key] = "" + del (self.environ[key]) def __iter__(self): return iter(self.environ) @@ -1645,7 +1558,7 @@ def keys(self): return self.environ.keys() def __setitem__(self, key, value): - """Change an environ value and clear all caches that depend on it.""" + """ Change an environ value and clear all caches that depend on it. """ if self.environ.get('bottle.request.readonly'): raise KeyError('The environ dictionary is read-only.') @@ -1667,7 +1580,7 @@ def __repr__(self): return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url) def __getattr__(self, name): - """Search in self.environ for additional user defined attributes.""" + """ Search in self.environ for additional user defined attributes. """ try: var = self.environ['bottle.request.ext.%s' % name] return var.__get__(self) if hasattr(var, '__get__') else var @@ -1675,31 +1588,30 @@ def __getattr__(self, name): raise AttributeError('Attribute %r not defined.' % name) def __setattr__(self, name, value): - """Define new attributes that are local to the bound request environment.""" - if name == 'environ': - return object.__setattr__(self, name, value) + """ Define new attributes that are local to the bound request environment. """ + if name == 'environ': return object.__setattr__(self, name, value) key = 'bottle.request.ext.%s' % name if hasattr(self, name): - raise AttributeError('Attribute already defined: %s' % name) + raise AttributeError("Attribute already defined: %s" % name) self.environ[key] = value def __delattr__(self, name): try: del self.environ['bottle.request.ext.%s' % name] except KeyError: - raise AttributeError('Attribute not defined: %s' % name) + raise AttributeError("Attribute not defined: %s" % name) def _hkey(key): if '\n' in key or '\r' in key or '\0' in key: - raise ValueError('Header names must not contain control characters: %r' % key) + raise ValueError("Header names must not contain control characters: %r" % key) return key.title().replace('_', '-') def _hval(value): value = tonat(value) if '\n' in value or '\r' in value or '\0' in value: - raise ValueError('Header value must not contain control characters: %r' % value) + raise ValueError("Header value must not contain control characters: %r" % value) return value @@ -1710,8 +1622,7 @@ def __init__(self, name, reader=None, writer=None, default=''): self.__doc__ = 'Current value of the %r header.' % name.title() def __get__(self, obj, _): - if obj is None: - return self + if obj is None: return self value = obj.get_header(self.name, self.default) return self.reader(value) if self.reader else value @@ -1723,11 +1634,11 @@ def __delete__(self, obj): class BaseResponse(object): - """Storage class for a response body as well as headers and cookies. + """ Storage class for a response body as well as headers and cookies. - This class does support dict-like case-insensitive item-access to - headers, but is NOT a dict. Most notably, iterating over a response - yields parts of the body and not the headers. + This class does support dict-like case-insensitive item-access to + headers, but is NOT a dict. Most notably, iterating over a response + yields parts of the body and not the headers. """ default_status = 200 @@ -1737,22 +1648,13 @@ class BaseResponse(object): # (rfc2616 section 10.2.3 and 10.3.5) bad_headers = { 204: frozenset(('Content-Type', 'Content-Length')), - 304: frozenset( - ( - 'Allow', - 'Content-Encoding', - 'Content-Language', - 'Content-Length', - 'Content-Range', - 'Content-Type', - 'Content-Md5', - 'Last-Modified', - ) - ), + 304: frozenset(('Allow', 'Content-Encoding', 'Content-Language', + 'Content-Length', 'Content-Range', 'Content-Type', + 'Content-Md5', 'Last-Modified')) } def __init__(self, body='', status=None, headers=None, **more_headers): - """Create a new response object. + """ Create a new response object. :param body: The response body as one of the supported types. :param status: Either an HTTP status code (e.g. 200) or a status line @@ -1776,7 +1678,7 @@ def __init__(self, body='', status=None, headers=None, **more_headers): self.add_header(name, value) def copy(self, cls=None): - """Returns a copy of self.""" + """ Returns a copy of self. """ cls = cls or BaseResponse assert issubclass(cls, BaseResponse) copy = cls() @@ -1784,9 +1686,9 @@ def copy(self, cls=None): copy._headers = dict((k, v[:]) for (k, v) in self._headers.items()) if self._cookies: cookies = copy._cookies = SimpleCookie() - for k, v in self._cookies.items(): + for k,v in self._cookies.items(): cookies[k] = v.value - cookies[k].update(v) # also copy cookie attributes + cookies[k].update(v) # also copy cookie attributes return copy def __iter__(self): @@ -1798,12 +1700,12 @@ def close(self): @property def status_line(self): - """The HTTP status line as a string (e.g. ``404 Not Found``).""" + """ The HTTP status line as a string (e.g. ``404 Not Found``).""" return self._status_line @property def status_code(self): - """The HTTP status code as an integer (e.g. 404).""" + """ The HTTP status code as an integer (e.g. 404).""" return self._status_code def _set_status(self, status): @@ -1825,21 +1727,18 @@ def _get_status(self): return self._status_line status = property( - _get_status, - _set_status, - None, - """ A writeable property to change the HTTP response status. It accepts + _get_status, _set_status, None, + ''' A writeable property to change the HTTP response status. It accepts either a numeric code (100-999) or a string with a custom reason phrase (e.g. "404 Brain not found"). Both :data:`status_line` and :data:`status_code` are updated accordingly. The return value is - always a status string. """, - ) + always a status string. ''') del _get_status, _set_status @property def headers(self): - """An instance of :class:`HeaderDict`, a case-insensitive dict-like - view on the response headers.""" + """ An instance of :class:`HeaderDict`, a case-insensitive dict-like + view on the response headers. """ hdict = HeaderDict() hdict.dict = self._headers return hdict @@ -1857,33 +1756,33 @@ def __setitem__(self, name, value): self._headers[_hkey(name)] = [_hval(value)] def get_header(self, name, default=None): - """Return the value of a previously defined header. If there is no - header with that name, return a default value.""" + """ Return the value of a previously defined header. If there is no + header with that name, return a default value. """ return self._headers.get(_hkey(name), [default])[-1] def set_header(self, name, value): - """Create a new response header, replacing any previously defined - headers with the same name.""" + """ Create a new response header, replacing any previously defined + headers with the same name. """ self._headers[_hkey(name)] = [_hval(value)] def add_header(self, name, value): - """Add an additional response header, not removing duplicates.""" + """ Add an additional response header, not removing duplicates. """ self._headers.setdefault(_hkey(name), []).append(_hval(value)) def iter_headers(self): - """Yield (header, value) tuples, skipping headers that are not - allowed with the current response status code.""" + """ Yield (header, value) tuples, skipping headers that are not + allowed with the current response status code. """ return self.headerlist def _wsgi_status_line(self): - """WSGI conform status line (latin1-encodeable)""" + """ WSGI conform status line (latin1-encodeable) """ if py3k: return self._status_line.encode('utf8').decode('latin1') return self._status_line @property def headerlist(self): - """WSGI conform list of (header, value) tuples.""" + """ WSGI conform list of (header, value) tuples. """ out = [] headers = list(self._headers.items()) if 'Content-Type' not in self._headers: @@ -1902,55 +1801,56 @@ def headerlist(self): content_type = HeaderProperty('Content-Type') content_length = HeaderProperty('Content-Length', reader=int, default=-1) expires = HeaderProperty( - 'Expires', reader=lambda x: datetime.fromtimestamp(parse_date(x), UTC), writer=lambda x: http_date(x) - ) + 'Expires', + reader=lambda x: datetime.fromtimestamp(parse_date(x), UTC), + writer=lambda x: http_date(x)) @property def charset(self, default='UTF-8'): - """Return the charset specified in the content-type header (default: utf8).""" + """ Return the charset specified in the content-type header (default: utf8). """ if 'charset=' in self.content_type: return self.content_type.split('charset=')[-1].split(';')[0].strip() return default def set_cookie(self, name, value, secret=None, digestmod=hashlib.sha256, **options): - """Create a new cookie or replace an old one. If the `secret` parameter is - set, create a `Signed Cookie` (described below). - - :param name: the name of the cookie. - :param value: the value of the cookie. - :param secret: a signature key required for signed cookies. - - Additionally, this method accepts all RFC 2109 attributes that are - supported by :class:`cookie.Morsel`, including: - - :param maxage: maximum age in seconds. (default: None) - :param expires: a datetime object or UNIX timestamp. (default: None) - :param domain: the domain that is allowed to read the cookie. - (default: current domain) - :param path: limits the cookie to a given path (default: current path) - :param secure: limit the cookie to HTTPS connections (default: off). - :param httponly: prevents client-side javascript to read this cookie - (default: off, requires Python 2.6 or newer). - :param samesite: Control or disable third-party use for this cookie. - Possible values: `lax`, `strict` or `none` (default). - - If neither `expires` nor `maxage` is set (default), the cookie will - expire at the end of the browser session (as soon as the browser - window is closed). - - Signed cookies may store any pickle-able object and are - cryptographically signed to prevent manipulation. Keep in mind that - cookies are limited to 4kb in most browsers. - - Warning: Pickle is a potentially dangerous format. If an attacker - gains access to the secret key, he could forge cookies that execute - code on server side if unpickled. Using pickle is discouraged and - support for it will be removed in later versions of bottle. - - Warning: Signed cookies are not encrypted (the client can still see - the content) and not copy-protected (the client can restore an old - cookie). The main intention is to make pickling and unpickling - save, not to store secret information at client side. + """ Create a new cookie or replace an old one. If the `secret` parameter is + set, create a `Signed Cookie` (described below). + + :param name: the name of the cookie. + :param value: the value of the cookie. + :param secret: a signature key required for signed cookies. + + Additionally, this method accepts all RFC 2109 attributes that are + supported by :class:`cookie.Morsel`, including: + + :param maxage: maximum age in seconds. (default: None) + :param expires: a datetime object or UNIX timestamp. (default: None) + :param domain: the domain that is allowed to read the cookie. + (default: current domain) + :param path: limits the cookie to a given path (default: current path) + :param secure: limit the cookie to HTTPS connections (default: off). + :param httponly: prevents client-side javascript to read this cookie + (default: off, requires Python 2.6 or newer). + :param samesite: Control or disable third-party use for this cookie. + Possible values: `lax`, `strict` or `none` (default). + + If neither `expires` nor `maxage` is set (default), the cookie will + expire at the end of the browser session (as soon as the browser + window is closed). + + Signed cookies may store any pickle-able object and are + cryptographically signed to prevent manipulation. Keep in mind that + cookies are limited to 4kb in most browsers. + + Warning: Pickle is a potentially dangerous format. If an attacker + gains access to the secret key, he could forge cookies that execute + code on server side if unpickled. Using pickle is discouraged and + support for it will be removed in later versions of bottle. + + Warning: Signed cookies are not encrypted (the client can still see + the content) and not copy-protected (the client can restore an old + cookie). The main intention is to make pickling and unpickling + save, not to store secret information at client side. """ if not self._cookies: self._cookies = SimpleCookie() @@ -1962,14 +1862,12 @@ def set_cookie(self, name, value, secret=None, digestmod=hashlib.sha256, **optio if secret: if not isinstance(value, basestring): - depr( - 0, - 13, - 'Pickling of arbitrary objects into cookies is deprecated.', - 'Only store strings in cookies. JSON strings are fine, too.', - ) + depr(0, 13, "Pickling of arbitrary objects into cookies is " + "deprecated.", "Only store strings in cookies. " + "JSON strings are fine, too.") encoded = base64.b64encode(pickle.dumps([name, value], -1)) - sig = base64.b64encode(hmac.new(tob(secret), encoded, digestmod=digestmod).digest()) + sig = base64.b64encode(hmac.new(tob(secret), encoded, + digestmod=digestmod).digest()) value = touni(tob('!') + sig + tob('?') + encoded) elif not isinstance(value, basestring): raise TypeError('Secret key required for non-string cookies.') @@ -1981,23 +1879,23 @@ def set_cookie(self, name, value, secret=None, digestmod=hashlib.sha256, **optio self._cookies[name] = value for key, value in options.items(): - if key in ('max_age', 'maxage'): # 'maxage' variant added in 0.13 + if key in ('max_age', 'maxage'): # 'maxage' variant added in 0.13 key = 'max-age' if isinstance(value, timedelta): value = value.seconds + value.days * 24 * 3600 if key == 'expires': value = http_date(value) - if key in ('same_site', 'samesite'): # 'samesite' variant added in 0.13 - key, value = 'samesite', (value or 'none').lower() + if key in ('same_site', 'samesite'): # 'samesite' variant added in 0.13 + key, value = 'samesite', (value or "none").lower() if value not in ('lax', 'strict', 'none'): - raise CookieError('Invalid value for SameSite') + raise CookieError("Invalid value for SameSite") if key in ('secure', 'httponly') and not value: continue self._cookies[name][key] = value def delete_cookie(self, key, **kwargs): - """Delete a cookie. Be sure to use the same `domain` and `path` - settings as used to create the cookie.""" + """ Delete a cookie. Be sure to use the same `domain` and `path` + settings as used to create the cookie. """ kwargs['max_age'] = -1 kwargs['expires'] = 0 self.set_cookie(key, '', **kwargs) @@ -2016,7 +1914,7 @@ def fget(_): try: return ls.var except AttributeError: - raise RuntimeError('Request context not initialized.') + raise RuntimeError("Request context not initialized.") def fset(_, value): ls.var = value @@ -2028,23 +1926,21 @@ def fdel(_): class LocalRequest(BaseRequest): - """A thread-local subclass of :class:`BaseRequest` with a different - set of attributes for each thread. There is usually only one global - instance of this class (:data:`request`). If accessed during a - request/response cycle, this instance always refers to the *current* - request (even on a multithreaded server).""" - + """ A thread-local subclass of :class:`BaseRequest` with a different + set of attributes for each thread. There is usually only one global + instance of this class (:data:`request`). If accessed during a + request/response cycle, this instance always refers to the *current* + request (even on a multithreaded server). """ bind = BaseRequest.__init__ environ = _local_property() class LocalResponse(BaseResponse): - """A thread-local subclass of :class:`BaseResponse` with a different - set of attributes for each thread. There is usually only one global - instance of this class (:data:`response`). Its attributes are used - to build the HTTP response at the end of the request/response cycle. + """ A thread-local subclass of :class:`BaseResponse` with a different + set of attributes for each thread. There is usually only one global + instance of this class (:data:`response`). Its attributes are used + to build the HTTP response at the end of the request/response cycle. """ - bind = BaseResponse.__init__ _status_line = _local_property() _status_code = _local_property() @@ -2058,18 +1954,18 @@ class LocalResponse(BaseResponse): class HTTPResponse(Response, BottleException): - """A subclass of :class:`Response` that can be raised or returned from request - handlers to short-curcuit request processing and override changes made to the - global :data:`request` object. This bypasses error handlers, even if the status - code indicates an error. Return or raise :class:`HTTPError` to trigger error - handlers. + """ A subclass of :class:`Response` that can be raised or returned from request + handlers to short-curcuit request processing and override changes made to the + global :data:`request` object. This bypasses error handlers, even if the status + code indicates an error. Return or raise :class:`HTTPError` to trigger error + handlers. """ def __init__(self, body='', status=None, headers=None, **more_headers): super(HTTPResponse, self).__init__(body, status, headers, **more_headers) def apply(self, other): - """Copy the state of this response to a different :class:`Response` object.""" + """ Copy the state of this response to a different :class:`Response` object. """ other._status_code = self._status_code other._status_line = self._status_line other._headers = self._headers @@ -2078,16 +1974,19 @@ def apply(self, other): class HTTPError(HTTPResponse): - """A subclass of :class:`HTTPResponse` that triggers error handlers.""" + """ A subclass of :class:`HTTPResponse` that triggers error handlers. """ default_status = 500 - def __init__(self, status=None, body=None, exception=None, traceback=None, **more_headers): + def __init__(self, + status=None, + body=None, + exception=None, + traceback=None, **more_headers): self.exception = exception self.traceback = traceback super(HTTPError, self).__init__(body, status, **more_headers) - ############################################################################### # Plugins ###################################################################### ############################################################################### @@ -2105,25 +2004,20 @@ def __init__(self, json_dumps=json_dumps): self.json_dumps = json_dumps def setup(self, app): - app.config._define( - 'json.enable', default=True, validate=bool, help='Enable or disable automatic dict->json filter.' - ) - app.config._define( - 'json.ascii', default=False, validate=bool, help='Use only 7-bit ASCII characters in output.' - ) - app.config._define( - 'json.indent', default=True, validate=bool, help='Add whitespace to make json more readable.' - ) - app.config._define( - 'json.dump_func', - default=None, - help='If defined, use this function to transform dict into json. The other options no longer apply.', - ) + app.config._define('json.enable', default=True, validate=bool, + help="Enable or disable automatic dict->json filter.") + app.config._define('json.ascii', default=False, validate=bool, + help="Use only 7-bit ASCII characters in output.") + app.config._define('json.indent', default=True, validate=bool, + help="Add whitespace to make json more readable.") + app.config._define('json.dump_func', default=None, + help="If defined, use this function to transform" + " dict into json. The other options no longer" + " apply.") def apply(self, callback, route): dumps = self.json_dumps - if not self.json_dumps: - return callback + if not self.json_dumps: return callback @functools.wraps(callback) def wrapper(*a, **ka): @@ -2133,9 +2027,9 @@ def wrapper(*a, **ka): rv = resp if isinstance(rv, dict): - # Attempt to serialize, raises exception on failure + #Attempt to serialize, raises exception on failure json_response = dumps(rv) - # Set content type only if serialization successful + #Set content type only if serialization successful response.content_type = 'application/json' return json_response elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): @@ -2147,11 +2041,10 @@ def wrapper(*a, **ka): class TemplatePlugin(object): - """This plugin applies the :func:`view` decorator to all routes with a - `template` config parameter. If the parameter is a tuple, the second - element must be a dict with additional options (e.g. `template_engine`) - or default variables for the template.""" - + """ This plugin applies the :func:`view` decorator to all routes with a + `template` config parameter. If the parameter is a tuple, the second + element must be a dict with additional options (e.g. `template_engine`) + or default variables for the template. """ name = 'template' api = 2 @@ -2171,38 +2064,37 @@ def apply(self, callback, route): #: Not a plugin, but part of the plugin API. TODO: Find a better place. class _ImportRedirect(object): def __init__(self, name, impmask): - """Create a virtual package that redirects imports (see PEP 302).""" + """ Create a virtual package that redirects imports (see PEP 302). """ self.name = name self.impmask = impmask self.module = sys.modules.setdefault(name, new_module(name)) - self.module.__dict__.update({'__file__': __file__, '__path__': [], '__all__': [], '__loader__': self}) + self.module.__dict__.update({ + '__file__': __file__, + '__path__': [], + '__all__': [], + '__loader__': self + }) sys.meta_path.append(self) def find_spec(self, fullname, path, target=None): - if '.' not in fullname: - return - if fullname.rsplit('.', 1)[0] != self.name: - return + if '.' not in fullname: return + if fullname.rsplit('.', 1)[0] != self.name: return from importlib.util import spec_from_loader - return spec_from_loader(fullname, self) def find_module(self, fullname, path=None): - if '.' not in fullname: - return - if fullname.rsplit('.', 1)[0] != self.name: - return + if '.' not in fullname: return + if fullname.rsplit('.', 1)[0] != self.name: return return self def create_module(self, spec): return self.load_module(spec.name) def exec_module(self, module): - pass # This probably breaks importlib.reload() :/ + pass # This probably breaks importlib.reload() :/ def load_module(self, fullname): - if fullname in sys.modules: - return sys.modules[fullname] + if fullname in sys.modules: return sys.modules[fullname] modname = fullname.rsplit('.', 1)[1] realname = self.impmask % modname __import__(realname) @@ -2211,16 +2103,15 @@ def load_module(self, fullname): module.__loader__ = self return module - ############################################################################### # Common Utilities ############################################################# ############################################################################### class MultiDict(DictMixin): - """This dict stores multiple values per key, but behaves exactly like a - normal dict in that it returns only the newest value for any given key. - There are special methods available to access the full list of values. + """ This dict stores multiple values per key, but behaves exactly like a + normal dict in that it returns only the newest value for any given key. + There are special methods available to access the full list of values. """ def __init__(self, *a, **k): @@ -2287,14 +2178,14 @@ def allitems(self): return [(k, v) for k, vl in self.dict.iteritems() for v in vl] def get(self, key, default=None, index=-1, type=None): - """Return the most recent value for a key. - - :param default: The default value to be returned if the key is not - present or the type conversion fails. - :param index: An index for the list of available values. - :param type: If defined, this callable is used to cast the value - into a specific type. Exception are suppressed and result in - the default value to be returned. + """ Return the most recent value for a key. + + :param default: The default value to be returned if the key is not + present or the type conversion fails. + :param index: An index for the list of available values. + :param type: If defined, this callable is used to cast the value + into a specific type. Exception are suppressed and result in + the default value to be returned. """ try: val = self.dict[key][index] @@ -2304,15 +2195,15 @@ def get(self, key, default=None, index=-1, type=None): return default def append(self, key, value): - """Add a new value to the list of values for this key.""" + """ Add a new value to the list of values for this key. """ self.dict.setdefault(key, []).append(value) def replace(self, key, value): - """Replace the list of values with a single value.""" + """ Replace the list of values with a single value. """ self.dict[key] = [value] def getall(self, key): - """Return a (possibly empty) list of values for a key.""" + """ Return a (possibly empty) list of values for a key. """ return self.dict.get(key) or [] #: Aliases for WTForms to mimic other multi-dict APIs (Django) @@ -2321,12 +2212,12 @@ def getall(self, key): class FormsDict(MultiDict): - """This :class:`MultiDict` subclass is used to store request form data. - Additionally to the normal dict-like item access methods (which return - unmodified data as native strings), this container also supports - attribute-like access to its values. Attributes are automatically de- - or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing - attributes default to an empty string.""" + """ This :class:`MultiDict` subclass is used to store request form data. + Additionally to the normal dict-like item access methods (which return + unmodified data as native strings), this container also supports + attribute-like access to its values. Attributes are automatically de- + or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing + attributes default to an empty string. """ #: Encoding used for attribute values. input_encoding = 'utf8' @@ -2343,9 +2234,9 @@ def _fix(self, s, encoding=None): return s def decode(self, encoding=None): - """Returns a copy with all keys and values de- or recoded to match - :attr:`input_encoding`. Some libraries (e.g. WTForms) want a - unicode dictionary.""" + """ Returns a copy with all keys and values de- or recoded to match + :attr:`input_encoding`. Some libraries (e.g. WTForms) want a + unicode dictionary. """ copy = FormsDict() enc = copy.input_encoding = encoding or self.input_encoding copy.recode_unicode = False @@ -2354,7 +2245,7 @@ def decode(self, encoding=None): return copy def getunicode(self, name, default=None, encoding=None): - """Return the value as a unicode string, or the default.""" + """ Return the value as a unicode string, or the default. """ try: return self._fix(self[name], encoding) except (UnicodeError, KeyError): @@ -2366,15 +2257,13 @@ def __getattr__(self, name, default=unicode()): return super(FormsDict, self).__getattr__(name) return self.getunicode(name, default=default) - class HeaderDict(MultiDict): - """A case-insensitive version of :class:`MultiDict` that defaults to - replace the old value instead of appending it.""" + """ A case-insensitive version of :class:`MultiDict` that defaults to + replace the old value instead of appending it. """ def __init__(self, *a, **ka): self.dict = {} - if a or ka: - self.update(*a, **ka) + if a or ka: self.update(*a, **ka) def __contains__(self, key): return _hkey(key) in self.dict @@ -2407,17 +2296,16 @@ def filter(self, names): class WSGIHeaderDict(DictMixin): - """This dict-like class wraps a WSGI environ dict and provides convenient - access to HTTP_* fields. Keys and values are native strings - (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI - environment contains non-native string values, these are de- or encoded - using a lossless 'latin1' character set. - - The API will remain stable even on changes to the relevant PEPs. - Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one - that uses non-native strings.) + """ This dict-like class wraps a WSGI environ dict and provides convenient + access to HTTP_* fields. Keys and values are native strings + (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI + environment contains non-native string values, these are de- or encoded + using a lossless 'latin1' character set. + + The API will remain stable even on changes to the relevant PEPs. + Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one + that uses non-native strings.) """ - #: List of keys that do not have a ``HTTP_`` prefix. cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH') @@ -2425,14 +2313,14 @@ def __init__(self, environ): self.environ = environ def _ekey(self, key): - """Translate header field name to CGI/WSGI environ key.""" + """ Translate header field name to CGI/WSGI environ key. """ key = key.replace('-', '_').upper() if key in self.cgikeys: return key return 'HTTP_' + key def raw(self, key, default=None): - """Return the header value as is (may be bytes or unicode).""" + """ Return the header value as is (may be bytes or unicode). """ return self.environ.get(self._ekey(key), default) def __getitem__(self, key): @@ -2445,10 +2333,10 @@ def __getitem__(self, key): return val def __setitem__(self, key, value): - raise TypeError('%s is read-only.' % self.__class__) + raise TypeError("%s is read-only." % self.__class__) def __delitem__(self, key): - raise TypeError('%s is read-only.' % self.__class__) + raise TypeError("%s is read-only." % self.__class__) def __iter__(self): for key in self.environ: @@ -2466,16 +2354,14 @@ def __len__(self): def __contains__(self, key): return self._ekey(key) in self.environ - _UNSET = object() - class ConfigDict(dict): - """A dict-like configuration storage with additional support for - namespaces, validators, meta-data and overlays. + """ A dict-like configuration storage with additional support for + namespaces, validators, meta-data and overlays. - This dict-like class is heavily optimized for read access. - Read-only methods and item access should be as fast as a native dict. + This dict-like class is heavily optimized for read access. + Read-only methods and item access should be as fast as a native dict. """ __slots__ = ('_meta', '_change_listener', '_overlays', '_virtual_keys', '_source', '__weakref__') @@ -2493,15 +2379,16 @@ def __init__(self): def load_module(self, name, squash=True): """Load values from a Python module. - Import a python module by name and add all upper-case module-level - variables to this config dict. + Import a python module by name and add all upper-case module-level + variables to this config dict. - :param name: Module name to import and load. - :param squash: If true (default), nested dicts are assumed to - represent namespaces and flattened (see :meth:`load_dict`). + :param name: Module name to import and load. + :param squash: If true (default), nested dicts are assumed to + represent namespaces and flattened (see :meth:`load_dict`). """ config_obj = load(name) - obj = {key: getattr(config_obj, key) for key in dir(config_obj) if key.isupper()} + obj = {key: getattr(config_obj, key) + for key in dir(config_obj) if key.isupper()} if squash: self.load_dict(obj) @@ -2510,24 +2397,25 @@ def load_module(self, name, squash=True): return self def load_config(self, filename, **options): - """Load values from ``*.ini`` style config files using configparser. + """ Load values from ``*.ini`` style config files using configparser. - INI style sections (e.g. ``[section]``) are used as namespace for - all keys within that section. Both section and key names may contain - dots as namespace separators and are converted to lower-case. + INI style sections (e.g. ``[section]``) are used as namespace for + all keys within that section. Both section and key names may contain + dots as namespace separators and are converted to lower-case. - The special sections ``[bottle]`` and ``[ROOT]`` refer to the root - namespace and the ``[DEFAULT]`` section defines default values for all - other sections. + The special sections ``[bottle]`` and ``[ROOT]`` refer to the root + namespace and the ``[DEFAULT]`` section defines default values for all + other sections. - :param filename: The path of a config file, or a list of paths. - :param options: All keyword parameters are passed to the underlying - :class:`python:configparser.ConfigParser` constructor call. + :param filename: The path of a config file, or a list of paths. + :param options: All keyword parameters are passed to the underlying + :class:`python:configparser.ConfigParser` constructor call. """ options.setdefault('allow_no_value', True) if py3k: - options.setdefault('interpolation', configparser.ExtendedInterpolation()) + options.setdefault('interpolation', + configparser.ExtendedInterpolation()) conf = configparser.ConfigParser(**options) conf.read(filename) for section in conf.sections(): @@ -2539,12 +2427,12 @@ def load_config(self, filename, **options): return self def load_dict(self, source, namespace=''): - """Load values from a dictionary structure. Nesting can be used to - represent namespaces. + """ Load values from a dictionary structure. Nesting can be used to + represent namespaces. - >>> c = ConfigDict() - >>> c.load_dict({'some': {'namespace': {'key': 'value'} } }) - {'some.namespace.key': 'value'} + >>> c = ConfigDict() + >>> c.load_dict({'some': {'namespace': {'key': 'value'} } }) + {'some.namespace.key': 'value'} """ for key, value in source.items(): if isinstance(key, basestring): @@ -2558,11 +2446,11 @@ def load_dict(self, source, namespace=''): return self def update(self, *a, **ka): - """If the first parameter is a string, all keys are prefixed with this - namespace. Apart from that it works just as the usual dict.update(). + """ If the first parameter is a string, all keys are prefixed with this + namespace. Apart from that it works just as the usual dict.update(). - >>> c = ConfigDict() - >>> c.update('some.namespace', key='value') + >>> c = ConfigDict() + >>> c.update('some.namespace', key='value') """ prefix = '' if a and isinstance(a[0], basestring): @@ -2596,7 +2484,7 @@ def __delitem__(self, key): if key not in self: raise KeyError(key) if key in self._virtual_keys: - raise KeyError('Virtual keys cannot be deleted: %s' % key) + raise KeyError("Virtual keys cannot be deleted: %s" % key) if self._source and key in self._source: # Not virtual, but present in source -> Restore virtual value @@ -2609,7 +2497,7 @@ def __delitem__(self, key): overlay._delete_virtual(key) def _set_virtual(self, key, value): - """Recursively set or update virtual keys.""" + """ Recursively set or update virtual keys. """ if key in self and key not in self._virtual_keys: return # Do nothing for non-virtual keys. @@ -2621,7 +2509,7 @@ def _set_virtual(self, key, value): overlay._set_virtual(key, value) def _delete_virtual(self, key): - """Recursively delete virtual entry.""" + """ Recursively delete virtual entry. """ if key not in self._virtual_keys: return # Do nothing for non-virtual keys. @@ -2642,22 +2530,22 @@ def _add_change_listener(self, func): return func def meta_get(self, key, metafield, default=None): - """Return the value of a meta field for a key.""" + """ Return the value of a meta field for a key. """ return self._meta.get(key, {}).get(metafield, default) def meta_set(self, key, metafield, value): - """Set the meta field for a key to a new value. - - Meta-fields are shared between all members of an overlay tree. + """ Set the meta field for a key to a new value. + + Meta-fields are shared between all members of an overlay tree. """ self._meta.setdefault(key, {})[metafield] = value def meta_list(self, key): - """Return an iterable of meta field names defined for a key.""" + """ Return an iterable of meta field names defined for a key. """ return self._meta.get(key, {}).keys() def _define(self, key, default=_UNSET, help=_UNSET, validate=_UNSET): - """(Unstable) Shortcut for plugins to define own config parameters.""" + """ (Unstable) Shortcut for plugins to define own config parameters. """ if default is not _UNSET: self.setdefault(key, default) if help is not _UNSET: @@ -2672,28 +2560,28 @@ def _iter_overlays(self): yield overlay def _make_overlay(self): - """(Unstable) Create a new overlay that acts like a chained map: Values - missing in the overlay are copied from the source map. Both maps - share the same meta entries. - - Entries that were copied from the source are called 'virtual'. You - can not delete virtual keys, but overwrite them, which turns them - into non-virtual entries. Setting keys on an overlay never affects - its source, but may affect any number of child overlays. - - Other than collections.ChainMap or most other implementations, this - approach does not resolve missing keys on demand, but instead - actively copies all values from the source to the overlay and keeps - track of virtual and non-virtual keys internally. This removes any - lookup-overhead. Read-access is as fast as a build-in dict for both - virtual and non-virtual keys. - - Changes are propagated recursively and depth-first. A failing - on-change handler in an overlay stops the propagation of virtual - values and may result in an partly updated tree. Take extra care - here and make sure that on-change handlers never fail. - - Used by Route.config + """ (Unstable) Create a new overlay that acts like a chained map: Values + missing in the overlay are copied from the source map. Both maps + share the same meta entries. + + Entries that were copied from the source are called 'virtual'. You + can not delete virtual keys, but overwrite them, which turns them + into non-virtual entries. Setting keys on an overlay never affects + its source, but may affect any number of child overlays. + + Other than collections.ChainMap or most other implementations, this + approach does not resolve missing keys on demand, but instead + actively copies all values from the source to the overlay and keeps + track of virtual and non-virtual keys internally. This removes any + lookup-overhead. Read-access is as fast as a build-in dict for both + virtual and non-virtual keys. + + Changes are propagated recursively and depth-first. A failing + on-change handler in an overlay stops the propagation of virtual + values and may result in an partly updated tree. Take extra care + here and make sure that on-change handlers never fail. + + Used by Route.config """ # Cleanup dead references self._overlays[:] = [ref for ref in self._overlays if ref() is not None] @@ -2707,20 +2595,21 @@ def _make_overlay(self): return overlay + + class AppStack(list): - """A stack-like list. Calling it returns the head of the stack.""" + """ A stack-like list. Calling it returns the head of the stack. """ def __call__(self): - """Return the current default application.""" + """ Return the current default application. """ return self.default def push(self, value=None): - """Add a new :class:`Bottle` instance to the stack""" + """ Add a new :class:`Bottle` instance to the stack """ if not isinstance(value, Bottle): value = Bottle() self.append(value) return value - new_app = push @property @@ -2735,8 +2624,7 @@ class WSGIFileWrapper(object): def __init__(self, fp, buffer_size=1024 * 64): self.fp, self.buffer_size = fp, buffer_size for attr in 'fileno', 'close', 'read', 'readlines', 'tell', 'seek': - if hasattr(fp, attr): - setattr(self, attr, getattr(fp, attr)) + if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr)) def __iter__(self): buff, read = self.buffer_size, self.read @@ -2747,8 +2635,8 @@ def __iter__(self): class _closeiter(object): - """This only exists to be able to attach a .close method to iterators that - do not support attribute assignment (most of itertools).""" + """ This only exists to be able to attach a .close method to iterators that + do not support attribute assignment (most of itertools). """ def __init__(self, iterator, close=None): self.iterator = iterator @@ -2763,13 +2651,13 @@ def close(self): class ResourceManager(object): - """This class manages a list of search paths and helps to find and open - application-bound resources (files). + """ This class manages a list of search paths and helps to find and open + application-bound resources (files). - :param base: default value for :meth:`add_path` calls. - :param opener: callable used to open resources. - :param cachemode: controls which lookups are cached. One of 'all', - 'found' or 'none'. + :param base: default value for :meth:`add_path` calls. + :param opener: callable used to open resources. + :param cachemode: controls which lookups are cached. One of 'all', + 'found' or 'none'. """ def __init__(self, base='./', opener=open, cachemode='all'): @@ -2783,21 +2671,21 @@ def __init__(self, base='./', opener=open, cachemode='all'): self.cache = {} def add_path(self, path, base=None, index=None, create=False): - """Add a new path to the list of search paths. Return False if the - path does not exist. + """ Add a new path to the list of search paths. Return False if the + path does not exist. - :param path: The new search path. Relative paths are turned into - an absolute and normalized form. If the path looks like a file - (not ending in `/`), the filename is stripped off. - :param base: Path used to absolutize relative search paths. - Defaults to :attr:`base` which defaults to ``os.getcwd()``. - :param index: Position within the list of search paths. Defaults - to last index (appends to the list). + :param path: The new search path. Relative paths are turned into + an absolute and normalized form. If the path looks like a file + (not ending in `/`), the filename is stripped off. + :param base: Path used to absolutize relative search paths. + Defaults to :attr:`base` which defaults to ``os.getcwd()``. + :param index: Position within the list of search paths. Defaults + to last index (appends to the list). - The `base` parameter makes it easy to reference files installed - along with a python module or package:: + The `base` parameter makes it easy to reference files installed + along with a python module or package:: - res.add_path('./resources/', __file__) + res.add_path('./resources/', __file__) """ base = os.path.abspath(os.path.dirname(base or self.base)) path = os.path.abspath(os.path.join(base, os.path.dirname(path))) @@ -2814,25 +2702,22 @@ def add_path(self, path, base=None, index=None, create=False): return os.path.exists(path) def __iter__(self): - """Iterate over all existing files in all registered paths.""" + """ Iterate over all existing files in all registered paths. """ search = self.path[:] while search: path = search.pop() - if not os.path.isdir(path): - continue + if not os.path.isdir(path): continue for name in os.listdir(path): full = os.path.join(path, name) - if os.path.isdir(full): - search.append(full) - else: - yield full + if os.path.isdir(full): search.append(full) + else: yield full def lookup(self, name): - """Search for a resource and return an absolute file path, or `None`. + """ Search for a resource and return an absolute file path, or `None`. - The :attr:`path` list is searched in order. The first match is - returned. Symlinks are followed. The result is cached to speed up - future lookups.""" + The :attr:`path` list is searched in order. The first match is + returned. Symlinks are followed. The result is cached to speed up + future lookups. """ if name not in self.cache or DEBUG: for path in self.path: fpath = os.path.join(path, name) @@ -2845,16 +2730,15 @@ def lookup(self, name): return self.cache[name] def open(self, name, mode='r', *args, **kwargs): - """Find a resource and return a file object, or raise IOError.""" + """ Find a resource and return a file object, or raise IOError. """ fname = self.lookup(name) - if not fname: - raise IOError('Resource %r not found.' % name) + if not fname: raise IOError("Resource %r not found." % name) return self.opener(fname, mode=mode, *args, **kwargs) class FileUpload(object): def __init__(self, fileobj, name, filename, headers=None): - """Wrapper for a single file uploaded via ``multipart/form-data``.""" + """ Wrapper for a single file uploaded via ``multipart/form-data``. """ #: Open file(-like) object (BytesIO buffer or temporary file) self.file = fileobj #: Name of the upload form field @@ -2868,18 +2752,18 @@ def __init__(self, fileobj, name, filename, headers=None): content_length = HeaderProperty('Content-Length', reader=int, default=-1) def get_header(self, name, default=None): - """Return the value of a header within the multipart part.""" + """ Return the value of a header within the multipart part. """ return self.headers.get(name, default) @cached_property def filename(self): - """Name of the file on the client file system, but normalized to ensure - file system compatibility. An empty filename is returned as 'empty'. + """ Name of the file on the client file system, but normalized to ensure + file system compatibility. An empty filename is returned as 'empty'. - Only ASCII letters, digits, dashes, underscores and dots are - allowed in the final filename. Accents are removed, if possible. - Whitespace is replaced by a single dash. Leading or tailing dots - or dashes are removed. The filename is limited to 255 characters. + Only ASCII letters, digits, dashes, underscores and dots are + allowed in the final filename. Accents are removed, if possible. + Whitespace is replaced by a single dash. Leading or tailing dots + or dashes are removed. The filename is limited to 255 characters. """ fname = self.raw_filename if not isinstance(fname, unicode): @@ -2891,23 +2775,22 @@ def filename(self): fname = re.sub(r'[-\s]+', '-', fname).strip('.-') return fname[:255] or 'empty' - def _copy_file(self, fp, chunk_size=2**16): + def _copy_file(self, fp, chunk_size=2 ** 16): read, write, offset = self.file.read, fp.write, self.file.tell() while 1: buf = read(chunk_size) - if not buf: - break + if not buf: break write(buf) self.file.seek(offset) - def save(self, destination, overwrite=False, chunk_size=2**16): - """Save file to disk or copy its content to an open file(-like) object. - If *destination* is a directory, :attr:`filename` is added to the - path. Existing files are not overwritten by default (IOError). + def save(self, destination, overwrite=False, chunk_size=2 ** 16): + """ Save file to disk or copy its content to an open file(-like) object. + If *destination* is a directory, :attr:`filename` is added to the + path. Existing files are not overwritten by default (IOError). - :param destination: File path, directory or file(-like) object. - :param overwrite: If True, replace existing files. (default: False) - :param chunk_size: Bytes to read at a time. (default: 64kb) + :param destination: File path, directory or file(-like) object. + :param overwrite: If True, replace existing files. (default: False) + :param chunk_size: Bytes to read at a time. (default: 64kb) """ if isinstance(destination, basestring): # Except file-likes here if os.path.isdir(destination): @@ -2919,31 +2802,30 @@ def save(self, destination, overwrite=False, chunk_size=2**16): else: self._copy_file(destination, chunk_size) - ############################################################################### # Application Helper ########################################################### ############################################################################### def abort(code=500, text='Unknown Error.'): - """Aborts execution and causes a HTTP error.""" + """ Aborts execution and causes a HTTP error. """ raise HTTPError(code, text) def redirect(url, code=None): - """Aborts execution and causes a 303 or 302 redirect, depending on - the HTTP protocol version.""" + """ Aborts execution and causes a 303 or 302 redirect, depending on + the HTTP protocol version. """ if not code: - code = 303 if request.get('SERVER_PROTOCOL') == 'HTTP/1.1' else 302 + code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302 res = response.copy(cls=HTTPResponse) res.status = code - res.body = '' + res.body = "" res.set_header('Location', urljoin(request.url, url)) raise res def _rangeiter(fp, offset, limit, bufsize=1024 * 1024): - """Yield chunks from a range in a file.""" + """ Yield chunks from a range in a file. """ fp.seek(offset) while limit > 0: part = fp.read(min(limit, bufsize)) @@ -2953,36 +2835,41 @@ def _rangeiter(fp, offset, limit, bufsize=1024 * 1024): yield part -def static_file(filename, root, mimetype=True, download=False, charset='UTF-8', etag=None, headers=None): - """Open a file in a safe way and return an instance of :exc:`HTTPResponse` - that can be sent back to the client. - - :param filename: Name or path of the file to send, relative to ``root``. - :param root: Root path for file lookups. Should be an absolute directory - path. - :param mimetype: Provide the content-type header (default: guess from - file extension) - :param download: If True, ask the browser to open a `Save as...` dialog - instead of opening the file with the associated program. You can - specify a custom filename as a string. If not specified, the - original filename is used (default: False). - :param charset: The charset for files with a ``text/*`` mime-type. - (default: UTF-8) - :param etag: Provide a pre-computed ETag header. If set to ``False``, - ETag handling is disabled. (default: auto-generate ETag header) - :param headers: Additional headers dict to add to the response. - - While checking user input is always a good idea, this function provides - additional protection against malicious ``filename`` parameters from - breaking out of the ``root`` directory and leaking sensitive information - to an attacker. - - Read-protected files or files outside of the ``root`` directory are - answered with ``403 Access Denied``. Missing files result in a - ``404 Not Found`` response. Conditional requests (``If-Modified-Since``, - ``If-None-Match``) are answered with ``304 Not Modified`` whenever - possible. ``HEAD`` and ``Range`` requests (used by download managers to - check or continue partial downloads) are also handled automatically. +def static_file(filename, root, + mimetype=True, + download=False, + charset='UTF-8', + etag=None, + headers=None): + """ Open a file in a safe way and return an instance of :exc:`HTTPResponse` + that can be sent back to the client. + + :param filename: Name or path of the file to send, relative to ``root``. + :param root: Root path for file lookups. Should be an absolute directory + path. + :param mimetype: Provide the content-type header (default: guess from + file extension) + :param download: If True, ask the browser to open a `Save as...` dialog + instead of opening the file with the associated program. You can + specify a custom filename as a string. If not specified, the + original filename is used (default: False). + :param charset: The charset for files with a ``text/*`` mime-type. + (default: UTF-8) + :param etag: Provide a pre-computed ETag header. If set to ``False``, + ETag handling is disabled. (default: auto-generate ETag header) + :param headers: Additional headers dict to add to the response. + + While checking user input is always a good idea, this function provides + additional protection against malicious ``filename`` parameters from + breaking out of the ``root`` directory and leaking sensitive information + to an attacker. + + Read-protected files or files outside of the ``root`` directory are + answered with ``403 Access Denied``. Missing files result in a + ``404 Not Found`` response. Conditional requests (``If-Modified-Since``, + ``If-None-Match``) are answered with ``304 Not Modified`` whenever + possible. ``HEAD`` and ``Range`` requests (used by download managers to + check or continue partial downloads) are also handled automatically. """ root = os.path.join(os.path.abspath(root), '') @@ -2991,26 +2878,22 @@ def static_file(filename, root, mimetype=True, download=False, charset='UTF-8', getenv = request.environ.get if not filename.startswith(root): - return HTTPError(403, 'Access denied.') + return HTTPError(403, "Access denied.") if not os.path.exists(filename) or not os.path.isfile(filename): - return HTTPError(404, 'File does not exist.') + return HTTPError(404, "File does not exist.") if not os.access(filename, os.R_OK): - return HTTPError(403, 'You do not have permission to access this file.') + return HTTPError(403, "You do not have permission to access this file.") if mimetype is True: name = download if isinstance(download, str) else filename mimetype, encoding = mimetypes.guess_type(name) if encoding == 'gzip': mimetype = 'application/gzip' - elif encoding: # e.g. bzip2 -> application/x-bzip2 + elif encoding: # e.g. bzip2 -> application/x-bzip2 mimetype = 'application/x-' + encoding - if ( - charset - and mimetype - and 'charset=' not in mimetype - and (mimetype[:5] == 'text/' or mimetype == 'application/javascript') - ): + if charset and mimetype and 'charset=' not in mimetype \ + and (mimetype[:5] == 'text/' or mimetype == 'application/javascript'): mimetype += '; charset=%s' % charset if mimetype: @@ -3020,7 +2903,7 @@ def static_file(filename, root, mimetype=True, download=False, charset='UTF-8', download = os.path.basename(filename) if download: - download = download.replace('"', '') + download = download.replace('"','') headers['Content-Disposition'] = 'attachment; filename="%s"' % download stats = os.stat(filename) @@ -3029,7 +2912,8 @@ def static_file(filename, root, mimetype=True, download=False, charset='UTF-8', headers['Date'] = email.utils.formatdate(time.time(), usegmt=True) if etag is None: - etag = '%d:%d:%d:%d:%s' % (stats.st_dev, stats.st_ino, stats.st_mtime, clen, filename) + etag = '%d:%d:%d:%d:%s' % (stats.st_dev, stats.st_ino, stats.st_mtime, + clen, filename) etag = hashlib.sha1(tob(etag)).hexdigest() if etag: @@ -3040,39 +2924,36 @@ def static_file(filename, root, mimetype=True, download=False, charset='UTF-8', ims = getenv('HTTP_IF_MODIFIED_SINCE') if ims: - ims = parse_date(ims.split(';')[0].strip()) + ims = parse_date(ims.split(";")[0].strip()) if ims is not None and ims >= int(stats.st_mtime): return HTTPResponse(status=304, **headers) body = '' if request.method == 'HEAD' else open(filename, 'rb') - headers['Accept-Ranges'] = 'bytes' + headers["Accept-Ranges"] = "bytes" range_header = getenv('HTTP_RANGE') if range_header: ranges = list(parse_range_header(range_header, clen)) if not ranges: - return HTTPError(416, 'Requested Range Not Satisfiable') + return HTTPError(416, "Requested Range Not Satisfiable") offset, end = ranges[0] rlen = end - offset - headers['Content-Range'] = 'bytes %d-%d/%d' % (offset, end - 1, clen) - headers['Content-Length'] = str(rlen) - if body: - body = _closeiter(_rangeiter(body, offset, rlen), body.close) + headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end - 1, clen) + headers["Content-Length"] = str(rlen) + if body: body = _closeiter(_rangeiter(body, offset, rlen), body.close) return HTTPResponse(body, status=206, **headers) return HTTPResponse(body, **headers) - ############################################################################### # HTTP Utilities and MISC (TODO) ############################################### ############################################################################### def debug(mode=True): - """Change the debug level. + """ Change the debug level. There is only one debug level supported at the moment.""" global DEBUG - if mode: - warnings.simplefilter('default') + if mode: warnings.simplefilter('default') DEBUG = bool(mode) @@ -3093,16 +2974,16 @@ def http_date(value): def parse_date(ims): - """Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch.""" + """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """ try: ts = email.utils.parsedate_tz(ims) - return calendar.timegm(ts[:8] + (0,)) - (ts[9] or 0) + return calendar.timegm(ts[:8] + (0, )) - (ts[9] or 0) except (TypeError, ValueError, IndexError, OverflowError): return None def parse_auth(header): - """Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" + """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" try: method, data = header.split(None, 1) if method.lower() == 'basic': @@ -3113,10 +2994,9 @@ def parse_auth(header): def parse_range_header(header, maxlen=0): - """Yield (start, end) ranges parsed from a HTTP Range header. Skip - unsatisfiable ranges. The end index is non-inclusive.""" - if not header or header[:6] != 'bytes=': - return + """ Yield (start, end) ranges parsed from a HTTP Range header. Skip + unsatisfiable ranges. The end index is non-inclusive.""" + if not header or header[:6] != 'bytes=': return ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r] for start, end in ranges: try: @@ -3135,9 +3015,8 @@ def parse_range_header(header, maxlen=0): #: Header tokenizer used by _parse_http_header() _hsplit = re.compile('(?:(?:"((?:[^"\\\\]|\\\\.)*)")|([^;,=]+))([;,=]?)').findall - def _parse_http_header(h): - """Parses a typical multi-valued and parametrised HTTP header (e.g. Accept headers) and returns a list of values + """ Parses a typical multi-valued and parametrised HTTP header (e.g. Accept headers) and returns a list of values and parameters. For non-standard or broken input, this implementation may return partial results. :param h: A header string (e.g. ``text/html,text/plain;q=0.9,*/*;q=0.8``) :return: List of (value, params) tuples. The second element is a (possibly empty) dict. @@ -3172,11 +3051,9 @@ def _parse_http_header(h): def _parse_qsl(qs): r = [] for pair in qs.split('&'): - if not pair: - continue + if not pair: continue nv = pair.split('=', 1) - if len(nv) != 2: - nv.append('') + if len(nv) != 2: nv.append('') key = urlunquote(nv[0].replace('+', ' ')) value = urlunquote(nv[1].replace('+', ' ')) r.append((key, value)) @@ -3184,14 +3061,16 @@ def _parse_qsl(qs): def _lscmp(a, b): - """Compares two strings in a cryptographically safe way: - Runtime is not affected by length of common prefix.""" - return not sum(0 if x == y else 1 for x, y in zip(a, b)) and len(a) == len(b) + """ Compares two strings in a cryptographically safe way: + Runtime is not affected by length of common prefix. """ + return not sum(0 if x == y else 1 + for x, y in zip(a, b)) and len(a) == len(b) def cookie_encode(data, key, digestmod=None): - """Encode and sign a pickle-able object. Return a (byte) string""" - depr(0, 13, 'cookie_encode() will be removed soon.', 'Do not use this API directly.') + """ Encode and sign a pickle-able object. Return a (byte) string """ + depr(0, 13, "cookie_encode() will be removed soon.", + "Do not use this API directly.") digestmod = digestmod or hashlib.sha256 msg = base64.b64encode(pickle.dumps(data, -1)) sig = base64.b64encode(hmac.new(tob(key), msg, digestmod=digestmod).digest()) @@ -3199,8 +3078,9 @@ def cookie_encode(data, key, digestmod=None): def cookie_decode(data, key, digestmod=None): - """Verify and decode an encoded string. Return an object or None.""" - depr(0, 13, 'cookie_decode() will be removed soon.', 'Do not use this API directly.') + """ Verify and decode an encoded string. Return an object or None.""" + depr(0, 13, "cookie_decode() will be removed soon.", + "Do not use this API directly.") data = tob(data) if cookie_is_encoded(data): sig, msg = data.split(tob('?'), 1) @@ -3212,29 +3092,26 @@ def cookie_decode(data, key, digestmod=None): def cookie_is_encoded(data): - """Return True if the argument looks like a encoded cookie.""" - depr(0, 13, 'cookie_is_encoded() will be removed soon.', 'Do not use this API directly.') + """ Return True if the argument looks like a encoded cookie.""" + depr(0, 13, "cookie_is_encoded() will be removed soon.", + "Do not use this API directly.") return bool(data.startswith(tob('!')) and tob('?') in data) def html_escape(string): - """Escape HTML special characters ``&<>`` and quotes ``'"``.""" - return ( - string.replace('&', '&') - .replace('<', '<') - .replace('>', '>') - .replace('"', '"') - .replace("'", ''') - ) + """ Escape HTML special characters ``&<>`` and quotes ``'"``. """ + return string.replace('&', '&').replace('<', '<').replace('>', '>')\ + .replace('"', '"').replace("'", ''') def html_quote(string): - """Escape and quote a string to be used as an HTTP attribute.""" - return '"%s"' % html_escape(string).replace('\n', ' ').replace('\r', ' ').replace('\t', ' ') + """ Escape and quote a string to be used as an HTTP attribute.""" + return '"%s"' % html_escape(string).replace('\n', ' ')\ + .replace('\r', ' ').replace('\t', ' ') def yieldroutes(func): - """Return a generator for routes that match the signature (name, args) + """ Return a generator for routes that match the signature (name, args) of the func parameter. This may yield more than one route if the function takes optional keyword arguments. The output is best described by example:: @@ -3254,22 +3131,19 @@ def yieldroutes(func): def path_shift(script_name, path_info, shift=1): - """Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. + """ Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. - :return: The modified paths. - :param script_name: The SCRIPT_NAME path. - :param script_name: The PATH_INFO path. - :param shift: The number of path fragments to shift. May be negative to - change the shift direction. (default: 1) + :return: The modified paths. + :param script_name: The SCRIPT_NAME path. + :param script_name: The PATH_INFO path. + :param shift: The number of path fragments to shift. May be negative to + change the shift direction. (default: 1) """ - if shift == 0: - return script_name, path_info + if shift == 0: return script_name, path_info pathlist = path_info.strip('/').split('/') scriptlist = script_name.strip('/').split('/') - if pathlist and pathlist[0] == '': - pathlist = [] - if scriptlist and scriptlist[0] == '': - scriptlist = [] + if pathlist and pathlist[0] == '': pathlist = [] + if scriptlist and scriptlist[0] == '': scriptlist = [] if 0 < shift <= len(pathlist): moved = pathlist[:shift] scriptlist = scriptlist + moved @@ -3280,19 +3154,19 @@ def path_shift(script_name, path_info, shift=1): scriptlist = scriptlist[:shift] else: empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO' - raise AssertionError('Cannot shift. Nothing left from %s' % empty) + raise AssertionError("Cannot shift. Nothing left from %s" % empty) new_script_name = '/' + '/'.join(scriptlist) new_path_info = '/' + '/'.join(pathlist) - if path_info.endswith('/') and pathlist: - new_path_info += '/' + if path_info.endswith('/') and pathlist: new_path_info += '/' return new_script_name, new_path_info -def auth_basic(check, realm='private', text='Access denied'): - """Callback decorator to require HTTP auth (basic). - TODO: Add route(check_auth=...) parameter.""" +def auth_basic(check, realm="private", text="Access denied"): + """ Callback decorator to require HTTP auth (basic). + TODO: Add route(check_auth=...) parameter. """ def decorator(func): + @functools.wraps(func) def wrapper(*a, **ka): user, password = request.auth or (None, None) @@ -3306,13 +3180,12 @@ def wrapper(*a, **ka): return decorator - # Shortcuts for common Bottle methods. # They all refer to the current default application. def make_default_app_wrapper(name): - """Return a callable that relays calls to the current default app.""" + """ Return a callable that relays calls to the current default app. """ @functools.wraps(getattr(Bottle, name)) def wrapper(*a, **ka): @@ -3321,18 +3194,18 @@ def wrapper(*a, **ka): return wrapper -route = make_default_app_wrapper('route') -get = make_default_app_wrapper('get') -post = make_default_app_wrapper('post') -put = make_default_app_wrapper('put') -delete = make_default_app_wrapper('delete') -patch = make_default_app_wrapper('patch') -error = make_default_app_wrapper('error') -mount = make_default_app_wrapper('mount') -hook = make_default_app_wrapper('hook') -install = make_default_app_wrapper('install') +route = make_default_app_wrapper('route') +get = make_default_app_wrapper('get') +post = make_default_app_wrapper('post') +put = make_default_app_wrapper('put') +delete = make_default_app_wrapper('delete') +patch = make_default_app_wrapper('patch') +error = make_default_app_wrapper('error') +mount = make_default_app_wrapper('mount') +hook = make_default_app_wrapper('hook') +install = make_default_app_wrapper('install') uninstall = make_default_app_wrapper('uninstall') -url = make_default_app_wrapper('get_url') +url = make_default_app_wrapper('get_url') ############################################################################### @@ -3344,7 +3217,7 @@ def wrapper(*a, **ka): class MultipartError(HTTPError): def __init__(self, msg): - HTTPError.__init__(self, 400, 'MultipartError: ' + msg) + HTTPError.__init__(self, 400, "MultipartError: " + msg) class _MultipartParser(object): @@ -3353,11 +3226,11 @@ def __init__( stream, boundary, content_length=-1, - disk_limit=2**30, - mem_limit=2**20, - memfile_limit=2**18, - buffer_size=2**16, - charset='latin1', + disk_limit=2 ** 30, + mem_limit=2 ** 20, + memfile_limit=2 ** 18, + buffer_size=2 ** 16, + charset="latin1", ): self.stream = stream self.boundary = boundary @@ -3369,22 +3242,22 @@ def __init__( self.charset = charset if not boundary: - raise MultipartError('No boundary.') + raise MultipartError("No boundary.") if self.buffer_size - 6 < len(boundary): # "--boundary--\r\n" - raise MultipartError('Boundary does not fit into buffer_size.') + raise MultipartError("Boundary does not fit into buffer_size.") def _lineiter(self): - """Iterate over a binary file-like object (crlf terminated) line by - line. Each line is returned as a (line, crlf) tuple. Lines larger - than buffer_size are split into chunks where all but the last chunk - has an empty string instead of crlf. Maximum chunk size is twice the - buffer size. + """ Iterate over a binary file-like object (crlf terminated) line by + line. Each line is returned as a (line, crlf) tuple. Lines larger + than buffer_size are split into chunks where all but the last chunk + has an empty string instead of crlf. Maximum chunk size is twice the + buffer size. """ read = self.stream.read maxread, maxbuf = self.content_length, self.buffer_size - partial = b'' # Contains the last (partial) line + partial = b"" # Contains the last (partial) line while True: chunk = read(maxbuf if maxread < 0 else min(maxbuf, maxread)) @@ -3403,20 +3276,20 @@ def _lineiter(self): if i >= 0: yield chunk[scanpos:i], b'\r\n' scanpos = i + 2 - else: # CRLF not found + else: # CRLF not found partial = chunk[scanpos:] if scanpos else chunk break if len(partial) > maxbuf: - yield partial[:-1], b'' + yield partial[:-1], b"" partial = partial[-1:] def parse(self): - """Return a MultiPart iterator. Can only be called once.""" + """ Return a MultiPart iterator. Can only be called once. """ - lines, line = self._lineiter(), '' - separator = b'--' + tob(self.boundary) - terminator = separator + b'--' + lines, line = self._lineiter(), "" + separator = b"--" + tob(self.boundary) + terminator = separator + b"--" mem_used, disk_used = 0, 0 # Track used resources to prevent DoS is_tail = False # True if the last line was incomplete (cutted) @@ -3426,18 +3299,18 @@ def parse(self): if line in (separator, terminator): break else: - raise MultipartError('Stream does not contain boundary') + raise MultipartError("Stream does not contain boundary") # First line is termainating boundary -> empty multipart stream if line == terminator: for _ in lines: - raise MultipartError('Found data after empty multipart stream') + raise MultipartError("Found data after empty multipart stream") return part_options = { - 'buffer_size': self.buffer_size, - 'memfile_limit': self.memfile_limit, - 'charset': self.charset, + "buffer_size": self.buffer_size, + "memfile_limit": self.memfile_limit, + "charset": self.charset, } part = _MultipartPart(**part_options) @@ -3458,9 +3331,9 @@ def parse(self): part.feed(line, nl) if part.is_buffered(): if part.size + mem_used > self.mem_limit: - raise MultipartError('Memory limit reached.') + raise MultipartError("Memory limit reached.") elif part.size + disk_used > self.disk_limit: - raise MultipartError('Disk limit reached.') + raise MultipartError("Disk limit reached.") except MultipartError: part.close() raise @@ -3468,16 +3341,16 @@ def parse(self): part.close() if line != terminator: - raise MultipartError('Unexpected end of multipart stream.') + raise MultipartError("Unexpected end of multipart stream.") class _MultipartPart(object): - def __init__(self, buffer_size=2**16, memfile_limit=2**18, charset='latin1'): + def __init__(self, buffer_size=2 ** 16, memfile_limit=2 ** 18, charset="latin1"): self.headerlist = [] self.headers = None self.file = False self.size = 0 - self._buf = b'' + self._buf = b"" self.disposition = None self.name = None self.filename = None @@ -3486,7 +3359,7 @@ def __init__(self, buffer_size=2**16, memfile_limit=2**18, charset='latin1'): self.memfile_limit = memfile_limit self.buffer_size = buffer_size - def feed(self, line, nl=''): + def feed(self, line, nl=""): if self.file: return self.write_body(line, nl) return self.write_header(line, nl) @@ -3495,18 +3368,18 @@ def write_header(self, line, nl): line = line.decode(self.charset) if not nl: - raise MultipartError('Unexpected end of line in header.') + raise MultipartError("Unexpected end of line in header.") if not line.strip(): # blank line -> end of header segment self.finish_header() - elif line[0] in ' \t' and self.headerlist: + elif line[0] in " \t" and self.headerlist: name, value = self.headerlist.pop() self.headerlist.append((name, value + line.strip())) else: - if ':' not in line: - raise MultipartError('Syntax error in header: No colon.') + if ":" not in line: + raise MultipartError("Syntax error in header: No colon.") - name, value = line.split(':', 1) + name, value = line.split(":", 1) self.headerlist.append((name.strip(), value.strip())) def write_body(self, line, nl): @@ -3518,10 +3391,10 @@ def write_body(self, line, nl): self._buf = nl if self.content_length > 0 and self.size > self.content_length: - raise MultipartError('Size of body exceeds Content-Length header.') + raise MultipartError("Size of body exceeds Content-Length header.") if self.size > self.memfile_limit and isinstance(self.file, BytesIO): - self.file, old = NamedTemporaryFile(mode='w+b'), self.file + self.file, old = NamedTemporaryFile(mode="w+b"), self.file old.seek(0) copied, maxcopy, chunksize = 0, self.size, self.buffer_size @@ -3534,42 +3407,42 @@ def write_body(self, line, nl): def finish_header(self): self.file = BytesIO() self.headers = HeaderDict(self.headerlist) - content_disposition = self.headers.get('Content-Disposition') - content_type = self.headers.get('Content-Type') + content_disposition = self.headers.get("Content-Disposition") + content_type = self.headers.get("Content-Type") if not content_disposition: - raise MultipartError('Content-Disposition header is missing.') + raise MultipartError("Content-Disposition header is missing.") self.disposition, self.options = _parse_http_header(content_disposition)[0] - self.name = self.options.get('name') - if 'filename' in self.options: - self.filename = self.options.get('filename') - if self.filename[1:3] == ':\\' or self.filename[:2] == '\\\\': - self.filename = self.filename.split('\\')[-1] # ie6 bug + self.name = self.options.get("name") + if "filename" in self.options: + self.filename = self.options.get("filename") + if self.filename[1:3] == ":\\" or self.filename[:2] == "\\\\": + self.filename = self.filename.split("\\")[-1] # ie6 bug self.content_type, options = _parse_http_header(content_type)[0] if content_type else (None, {}) - self.charset = options.get('charset') or self.charset + self.charset = options.get("charset") or self.charset - self.content_length = int(self.headers.get('Content-Length', '-1')) + self.content_length = int(self.headers.get("Content-Length", "-1")) def finish(self): if not self.file: - raise MultipartError('Incomplete part: Header section not closed.') + raise MultipartError("Incomplete part: Header section not closed.") self.file.seek(0) def is_buffered(self): - """Return true if the data is fully buffered in memory.""" + """ Return true if the data is fully buffered in memory.""" return isinstance(self.file, BytesIO) @property def value(self): - """Data decoded with the specified charset""" + """ Data decoded with the specified charset """ return self.raw.decode(self.charset) @property def raw(self): - """Data without decoding""" + """ Data without decoding """ pos = self.file.tell() self.file.seek(0) @@ -3583,7 +3456,6 @@ def close(self): self.file.close() self.file = False - ############################################################################### # Server Adapter ############################################################### ############################################################################### @@ -3592,7 +3464,6 @@ def close(self): # - https://github.com/bottlepy/bottle/pull/647#issuecomment-60152870 # - https://github.com/bottlepy/bottle/pull/865#issuecomment-242795341 - class ServerAdapter(object): quiet = False @@ -3605,8 +3476,9 @@ def run(self, handler): # pragma: no cover pass def __repr__(self): - args = ', '.join('%s=%s' % (k, repr(v)) for k, v in self.options.items()) - return '%s(%s)' % (self.__class__.__name__, args) + args = ', '.join('%s=%s' % (k, repr(v)) + for k, v in self.options.items()) + return "%s(%s)" % (self.__class__.__name__, args) class CGIServer(ServerAdapter): @@ -3625,15 +3497,15 @@ def fixed_environ(environ, start_response): class FlupFCGIServer(ServerAdapter): def run(self, handler): # pragma: no cover import flup.server.fcgi - self.options.setdefault('bindAddress', (self.host, self.port)) flup.server.fcgi.WSGIServer(handler, **self.options).run() class WSGIRefServer(ServerAdapter): def run(self, app): # pragma: no cover + from wsgiref.simple_server import make_server + from wsgiref.simple_server import WSGIRequestHandler, WSGIServer import socket - from wsgiref.simple_server import WSGIRequestHandler, WSGIServer, make_server class FixedHandler(WSGIRequestHandler): def address_string(self): # Prevent reverse DNS lookups please. @@ -3652,7 +3524,8 @@ def log_request(*args, **kw): class server_cls(server_cls): address_family = socket.AF_INET6 - self.srv = make_server(self.host, self.port, app, server_cls, handler_cls) + self.srv = make_server(self.host, self.port, app, server_cls, + handler_cls) self.port = self.srv.server_port # update port actual port (0 means random) try: self.srv.serve_forever() @@ -3663,13 +3536,10 @@ class server_cls(server_cls): class CherryPyServer(ServerAdapter): def run(self, handler): # pragma: no cover - depr( - 0, - 13, - "The wsgi server part of cherrypy was split into a new project called 'cheroot'.", - "Use the 'cheroot' server adapter instead of cherrypy.", - ) - from cherrypy import wsgiserver # This will fail for CherryPy >= 9 + depr(0, 13, "The wsgi server part of cherrypy was split into a new " + "project called 'cheroot'.", "Use the 'cheroot' server " + "adapter instead of cherrypy.") + from cherrypy import wsgiserver # This will fail for CherryPy >= 9 self.options['bind_addr'] = (self.host, self.port) self.options['wsgi_app'] = handler @@ -3694,10 +3564,9 @@ def run(self, handler): # pragma: no cover class CherootServer(ServerAdapter): - def run(self, handler): # pragma: no cover + def run(self, handler): # pragma: no cover from cheroot import wsgi from cheroot.ssl import builtin - self.options['bind_addr'] = (self.host, self.port) self.options['wsgi_app'] = handler certfile = self.options.pop('certfile', None) @@ -3705,7 +3574,8 @@ def run(self, handler): # pragma: no cover chainfile = self.options.pop('chainfile', None) server = wsgi.Server(**self.options) if certfile and keyfile: - server.ssl_adapter = builtin.BuiltinSSLAdapter(certfile, keyfile, chainfile) + server.ssl_adapter = builtin.BuiltinSSLAdapter( + certfile, keyfile, chainfile) try: server.start() finally: @@ -3715,7 +3585,6 @@ def run(self, handler): # pragma: no cover class WaitressServer(ServerAdapter): def run(self, handler): from waitress import serve - serve(handler, host=self.host, port=self.port, _quiet=self.quiet, **self.options) @@ -3723,27 +3592,26 @@ class PasteServer(ServerAdapter): def run(self, handler): # pragma: no cover from paste import httpserver from paste.translogger import TransLogger - handler = TransLogger(handler, setup_console_handler=(not self.quiet)) - httpserver.serve(handler, host=self.host, port=str(self.port), **self.options) + httpserver.serve(handler, + host=self.host, + port=str(self.port), **self.options) class MeinheldServer(ServerAdapter): def run(self, handler): from meinheld import server - server.listen((self.host, self.port)) server.run(handler) class FapwsServer(ServerAdapter): - """Extremely fast webserver using libev. See https://github.com/william-os4y/fapws3""" + """ Extremely fast webserver using libev. See https://github.com/william-os4y/fapws3 """ def run(self, handler): # pragma: no cover - depr(0, 13, 'fapws3 is not maintained and support will be dropped.') + depr(0, 13, "fapws3 is not maintained and support will be dropped.") import fapws._evwsgi as evwsgi from fapws import base, config - port = self.port if float(config.SERVER_IDENT[-2:]) > 0.4: # fapws3 silently changed its API in 0.5 @@ -3751,8 +3619,8 @@ def run(self, handler): # pragma: no cover evwsgi.start(self.host, port) # fapws3 never releases the GIL. Complain upstream. I tried. No luck. if 'BOTTLE_CHILD' in os.environ and not self.quiet: - _stderr('WARNING: Auto-reloading does not work with Fapws3.') - _stderr(' (Fapws3 breaks python thread support)') + _stderr("WARNING: Auto-reloading does not work with Fapws3.") + _stderr(" (Fapws3 breaks python thread support)") evwsgi.set_base_module(base) def app(environ, start_response): @@ -3764,13 +3632,10 @@ def app(environ, start_response): class TornadoServer(ServerAdapter): - """The super hyped asynchronous server by facebook. Untested.""" + """ The super hyped asynchronous server by facebook. Untested. """ def run(self, handler): # pragma: no cover - import tornado.httpserver - import tornado.ioloop - import tornado.wsgi - + import tornado.wsgi, tornado.httpserver, tornado.ioloop container = tornado.wsgi.WSGIContainer(handler) server = tornado.httpserver.HTTPServer(container) server.listen(port=self.port, address=self.host) @@ -3778,14 +3643,13 @@ def run(self, handler): # pragma: no cover class AppEngineServer(ServerAdapter): - """Adapter for Google App Engine.""" - + """ Adapter for Google App Engine. """ quiet = True def run(self, handler): - depr(0, 13, 'AppEngineServer no longer required', 'Configure your application directly in your app.yaml') + depr(0, 13, "AppEngineServer no longer required", + "Configure your application directly in your app.yaml") from google.appengine.ext.webapp import util - # A main() function in the handler script enables 'App Caching'. # Lets makes sure it is there. This _really_ improves performance. module = sys.modules.get('__main__') @@ -3795,13 +3659,12 @@ def run(self, handler): class TwistedServer(ServerAdapter): - """Untested.""" + """ Untested. """ def run(self, handler): - from twisted.internet import reactor - from twisted.python.threadpool import ThreadPool from twisted.web import server, wsgi - + from twisted.python.threadpool import ThreadPool + from twisted.internet import reactor thread_pool = ThreadPool() thread_pool.start() reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop) @@ -3812,27 +3675,25 @@ def run(self, handler): class DieselServer(ServerAdapter): - """Untested.""" + """ Untested. """ def run(self, handler): - depr(0, 13, 'Diesel is not tested or supported and will be removed.') + depr(0, 13, "Diesel is not tested or supported and will be removed.") from diesel.protocols.wsgi import WSGIApplication - app = WSGIApplication(handler, port=self.port) app.run() class GeventServer(ServerAdapter): - """Untested. Options: + """ Untested. Options: - * See gevent.wsgi.WSGIServer() documentation for more options. + * See gevent.wsgi.WSGIServer() documentation for more options. """ def run(self, handler): - from gevent import local, pywsgi - + from gevent import pywsgi, local if not isinstance(threading.local(), local.local): - msg = 'Bottle requires gevent.monkey.patch_all() (before import)' + msg = "Bottle requires gevent.monkey.patch_all() (before import)" raise RuntimeError(msg) if self.quiet: self.options['log'] = None @@ -3840,21 +3701,20 @@ def run(self, handler): server = pywsgi.WSGIServer(address, handler, **self.options) if 'BOTTLE_CHILD' in os.environ: import signal - signal.signal(signal.SIGINT, lambda s, f: server.stop()) server.serve_forever() class GunicornServer(ServerAdapter): - """Untested. See http://gunicorn.org/configure.html for options.""" + """ Untested. See http://gunicorn.org/configure.html for options. """ def run(self, handler): from gunicorn.app.base import BaseApplication - if self.host.startswith('unix:'): + if self.host.startswith("unix:"): config = {'bind': self.host} else: - config = {'bind': '%s:%d' % (self.host, self.port)} + config = {'bind': "%s:%d" % (self.host, self.port)} config.update(self.options) @@ -3870,20 +3730,19 @@ def load(self): class EventletServer(ServerAdapter): - """Untested. Options: + """ Untested. Options: - * `backlog` adjust the eventlet backlog parameter which is the maximum - number of queued connections. Should be at least 1; the maximum - value is system-dependent. - * `family`: (default is 2) socket family, optional. See socket - documentation for available families. + * `backlog` adjust the eventlet backlog parameter which is the maximum + number of queued connections. Should be at least 1; the maximum + value is system-dependent. + * `family`: (default is 2) socket family, optional. See socket + documentation for available families. """ def run(self, handler): - from eventlet import listen, patcher, wsgi - + from eventlet import wsgi, listen, patcher if not patcher.is_monkey_patched(os): - msg = 'Bottle requires eventlet.monkey_patch() (before import)' + msg = "Bottle requires eventlet.monkey_patch() (before import)" raise RuntimeError(msg) socket_args = {} for arg in ('backlog', 'family'): @@ -3893,50 +3752,43 @@ def run(self, handler): pass address = (self.host, self.port) try: - wsgi.server(listen(address, **socket_args), handler, log_output=(not self.quiet)) + wsgi.server(listen(address, **socket_args), handler, + log_output=(not self.quiet)) except TypeError: # Fallback, if we have old version of eventlet wsgi.server(listen(address), handler) class BjoernServer(ServerAdapter): - """Fast server written in C: https://github.com/jonashaag/bjoern""" + """ Fast server written in C: https://github.com/jonashaag/bjoern """ def run(self, handler): from bjoern import run - run(handler, self.host, self.port, reuse_port=True) - class AsyncioServerAdapter(ServerAdapter): - """Extend ServerAdapter for adding custom event loop""" - + """ Extend ServerAdapter for adding custom event loop """ def get_event_loop(self): pass - class AiohttpServer(AsyncioServerAdapter): - """Asynchronous HTTP client/server framework for asyncio - https://pypi.python.org/pypi/aiohttp/ - https://pypi.org/project/aiohttp-wsgi/ + """ Asynchronous HTTP client/server framework for asyncio + https://pypi.python.org/pypi/aiohttp/ + https://pypi.org/project/aiohttp-wsgi/ """ def get_event_loop(self): import asyncio - return asyncio.new_event_loop() def run(self, handler): import asyncio - from aiohttp_wsgi.wsgi import serve - self.loop = self.get_event_loop() asyncio.set_event_loop(self.loop) if 'BOTTLE_CHILD' in os.environ: import signal - signal.signal(signal.SIGINT, lambda s, f: self.loop.stop()) serve(handler, host=self.host, port=self.port) @@ -3944,19 +3796,16 @@ def run(self, handler): class AiohttpUVLoopServer(AiohttpServer): """uvloop - https://github.com/MagicStack/uvloop + https://github.com/MagicStack/uvloop """ - def get_event_loop(self): import uvloop - return uvloop.new_event_loop() - class AutoServer(ServerAdapter): - """Untested.""" - - adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, CherootServer, WSGIRefServer] + """ Untested. """ + adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, + CherootServer, WSGIRefServer] def run(self, handler): for sa in self.adapters: @@ -3995,32 +3844,29 @@ def run(self, handler): def load(target, **namespace): - """Import a module or fetch an object from a module. + """ Import a module or fetch an object from a module. - * ``package.module`` returns `module` as a module object. - * ``pack.mod:name`` returns the module variable `name` from `pack.mod`. - * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result. + * ``package.module`` returns `module` as a module object. + * ``pack.mod:name`` returns the module variable `name` from `pack.mod`. + * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result. - The last form accepts not only function calls, but any type of - expression. Keyword arguments passed to this function are available as - local variables. Example: ``import_string('re:compile(x)', x='[a-z]')`` + The last form accepts not only function calls, but any type of + expression. Keyword arguments passed to this function are available as + local variables. Example: ``import_string('re:compile(x)', x='[a-z]')`` """ - module, target = target.split(':', 1) if ':' in target else (target, None) - if module not in sys.modules: - __import__(module) - if not target: - return sys.modules[module] - if target.isalnum(): - return getattr(sys.modules[module], target) + module, target = target.split(":", 1) if ':' in target else (target, None) + if module not in sys.modules: __import__(module) + if not target: return sys.modules[module] + if target.isalnum(): return getattr(sys.modules[module], target) package_name = module.split('.')[0] namespace[package_name] = sys.modules[package_name] return eval('%s.%s' % (module, target), namespace) def load_app(target): - """Load a bottle application from a module and make sure that the import - does not affect the current default application, but returns a separate - application object. See :func:`load` for the target parameter.""" + """ Load a bottle application from a module and make sure that the import + does not affect the current default application, but returns a separate + application object. See :func:`load` for the target parameter. """ global NORUN NORUN, nr_old = True, NORUN tmp = default_app.push() # Create a new "default application" @@ -4035,40 +3881,35 @@ def load_app(target): _debug = debug -def run( - app=None, - server='wsgiref', - host='127.0.0.1', - port=8080, - interval=1, - reloader=False, - quiet=False, - plugins=None, - debug=None, - config=None, - **kargs, -): - """Start a server instance. This method blocks until the server terminates. - - :param app: WSGI application or target string supported by - :func:`load_app`. (default: :func:`default_app`) - :param server: Server adapter to use. See :data:`server_names` keys - for valid names or pass a :class:`ServerAdapter` subclass. - (default: `wsgiref`) - :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on - all interfaces including the external one. (default: 127.0.0.1) - :param port: Server port to bind to. Values below 1024 require root - privileges. (default: 8080) - :param reloader: Start auto-reloading server? (default: False) - :param interval: Auto-reloader interval in seconds (default: 1) - :param quiet: Suppress output to stdout and stderr? (default: False) - :param options: Options passed to the server adapter. - """ - if NORUN: - return +def run(app=None, + server='wsgiref', + host='127.0.0.1', + port=8080, + interval=1, + reloader=False, + quiet=False, + plugins=None, + debug=None, + config=None, **kargs): + """ Start a server instance. This method blocks until the server terminates. + + :param app: WSGI application or target string supported by + :func:`load_app`. (default: :func:`default_app`) + :param server: Server adapter to use. See :data:`server_names` keys + for valid names or pass a :class:`ServerAdapter` subclass. + (default: `wsgiref`) + :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on + all interfaces including the external one. (default: 127.0.0.1) + :param port: Server port to bind to. Values below 1024 require root + privileges. (default: 8080) + :param reloader: Start auto-reloading server? (default: False) + :param interval: Auto-reloader interval in seconds (default: 1) + :param quiet: Suppress output to stdout and stderr? (default: False) + :param options: Options passed to the server adapter. + """ + if NORUN: return if reloader and not os.environ.get('BOTTLE_CHILD'): import subprocess - fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock') environ = os.environ.copy() environ['BOTTLE_CHILD'] = 'true' @@ -4077,7 +3918,7 @@ def run( # If a package was loaded with `python -m`, then `sys.argv` needs to be # restored to the original value, or imports might break. See #1336 if getattr(sys.modules.get('__main__'), '__package__', None): - args[1:1] = ['-m', sys.modules['__main__'].__package__] + args[1:1] = ["-m", sys.modules['__main__'].__package__] try: os.close(fd) # We never write to this file @@ -4097,13 +3938,12 @@ def run( return try: - if debug is not None: - _debug(debug) + if debug is not None: _debug(debug) app = app or default_app() if isinstance(app, basestring): app = load_app(app) if not callable(app): - raise ValueError('Application is not callable: %r' % app) + raise ValueError("Application is not callable: %r" % app) for plugin in plugins or []: if isinstance(plugin, basestring): @@ -4120,16 +3960,18 @@ def run( if isinstance(server, type): server = server(host=host, port=port, **kargs) if not isinstance(server, ServerAdapter): - raise ValueError('Unknown or unsupported server: %r' % server) + raise ValueError("Unknown or unsupported server: %r" % server) server.quiet = server.quiet or quiet if not server.quiet: - _stderr('Bottle v%s server starting up (using %s)...' % (__version__, repr(server))) - if server.host.startswith('unix:'): - _stderr('Listening on %s' % server.host) + _stderr("Bottle v%s server starting up (using %s)..." % + (__version__, repr(server))) + if server.host.startswith("unix:"): + _stderr("Listening on %s" % server.host) else: - _stderr('Listening on http://%s:%d/' % (server.host, server.port)) - _stderr('Hit Ctrl-C to quit.\n') + _stderr("Listening on http://%s:%d/" % + (server.host, server.port)) + _stderr("Hit Ctrl-C to quit.\n") if reloader: lockfile = os.environ.get('BOTTLE_LOCKFILE') @@ -4144,9 +3986,8 @@ def run( pass except (SystemExit, MemoryError): raise - except Exception: - if not reloader: - raise + except: + if not reloader: raise if not getattr(server, 'quiet', quiet): print_exc() time.sleep(interval) @@ -4154,8 +3995,8 @@ def run( class FileCheckerThread(threading.Thread): - """Interrupt main-thread as soon as a changed module file is detected, - the lockfile gets deleted or gets too old.""" + """ Interrupt main-thread as soon as a changed module file is detected, + the lockfile gets deleted or gets too old. """ def __init__(self, lockfile, interval): threading.Thread.__init__(self) @@ -4166,21 +4007,17 @@ def __init__(self, lockfile, interval): def run(self): exists = os.path.exists - - def mtime(p): - return os.stat(p).st_mtime - + mtime = lambda p: os.stat(p).st_mtime files = dict() for module in list(sys.modules.values()): path = getattr(module, '__file__', '') or '' - if path[-4:] in ('.pyo', '.pyc'): - path = path[:-1] - if path and exists(path): - files[path] = mtime(path) + if path[-4:] in ('.pyo', '.pyc'): path = path[:-1] + if path and exists(path): files[path] = mtime(path) while not self.status: - if not exists(self.lockfile) or mtime(self.lockfile) < time.time() - self.interval - 5: + if not exists(self.lockfile)\ + or mtime(self.lockfile) < time.time() - self.interval - 5: self.status = 'error' thread.interrupt_main() for path, lmtime in list(files.items()): @@ -4194,12 +4031,10 @@ def __enter__(self): self.start() def __exit__(self, exc_type, *_): - if not self.status: - self.status = 'exit' # silent exit + if not self.status: self.status = 'exit' # silent exit self.join() return exc_type is not None and issubclass(exc_type, KeyboardInterrupt) - ############################################################################### # Template Adapters ############################################################ ############################################################################### @@ -4210,14 +4045,17 @@ class TemplateError(BottleException): class BaseTemplate(object): - """Base class and minimal API for template adapters""" - + """ Base class and minimal API for template adapters """ extensions = ['tpl', 'html', 'thtml', 'stpl'] - settings = {} # used in prepare() - defaults = {} # used in render() - - def __init__(self, source=None, name=None, lookup=None, encoding='utf8', **settings): - """Create a new template. + settings = {} #used in prepare() + defaults = {} #used in render() + + def __init__(self, + source=None, + name=None, + lookup=None, + encoding='utf8', **settings): + """ Create a new template. If the source parameter (str or buffer) is missing, the name argument is used to guess a template filename. Subclasses can assume that self.source and/or self.filename are set. Both are strings. @@ -4244,33 +4082,27 @@ def __init__(self, source=None, name=None, lookup=None, encoding='utf8', **setti @classmethod def search(cls, name, lookup=None): - """Search name in all directories specified in lookup. - First without, then with common extensions. Return first hit.""" + """ Search name in all directories specified in lookup. + First without, then with common extensions. Return first hit. """ if not lookup: - raise depr(0, 12, 'Empty template lookup path.', 'Configure a template lookup path.') + raise depr(0, 12, "Empty template lookup path.", "Configure a template lookup path.") if os.path.isabs(name): - raise depr( - 0, - 12, - 'Use of absolute path for template name.', - 'Refer to templates with names or paths relative to the lookup path.', - ) + raise depr(0, 12, "Use of absolute path for template name.", + "Refer to templates with names or paths relative to the lookup path.") for spath in lookup: spath = os.path.abspath(spath) + os.sep fname = os.path.abspath(os.path.join(spath, name)) - if not fname.startswith(spath): - continue - if os.path.isfile(fname): - return fname + if not fname.startswith(spath): continue + if os.path.isfile(fname): return fname for ext in cls.extensions: if os.path.isfile('%s.%s' % (fname, ext)): return '%s.%s' % (fname, ext) @classmethod def global_config(cls, key, *args): - """This reads or sets the global settings stored in class.settings.""" + """ This reads or sets the global settings stored in class.settings. """ if args: cls.settings = cls.settings.copy() # Make settings local to class cls.settings[key] = args[0] @@ -4278,14 +4110,14 @@ def global_config(cls, key, *args): return cls.settings[key] def prepare(self, **options): - """Run preparations (parsing, caching, ...). + """ Run preparations (parsing, caching, ...). It should be possible to call this again to refresh a template or to update settings. """ raise NotImplementedError def render(self, *args, **kwargs): - """Render the template with the specified local variables and return + """ Render the template with the specified local variables and return a single byte or unicode string. If it is a byte string, the encoding must match self.encoding. This method must be thread-safe! Local variables may be provided in dictionaries (args) @@ -4296,16 +4128,17 @@ def render(self, *args, **kwargs): class MakoTemplate(BaseTemplate): def prepare(self, **options): - from mako.lookup import TemplateLookup from mako.template import Template - + from mako.lookup import TemplateLookup options.update({'input_encoding': self.encoding}) options.setdefault('format_exceptions', bool(DEBUG)) lookup = TemplateLookup(directories=self.lookup, **options) if self.source: self.tpl = Template(self.source, lookup=lookup, **options) else: - self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options) + self.tpl = Template(uri=self.name, + filename=self.filename, + lookup=lookup, **options) def render(self, *args, **kwargs): for dictarg in args: @@ -4318,7 +4151,6 @@ def render(self, *args, **kwargs): class CheetahTemplate(BaseTemplate): def prepare(self, **options): from Cheetah.Template import Template - self.context = threading.local() self.context.vars = {} options['searchList'] = [self.context.vars] @@ -4340,14 +4172,10 @@ def render(self, *args, **kwargs): class Jinja2Template(BaseTemplate): def prepare(self, filters=None, tests=None, globals={}, **kwargs): from jinja2 import Environment, FunctionLoader - self.env = Environment(loader=FunctionLoader(self.loader), **kwargs) - if filters: - self.env.filters.update(filters) - if tests: - self.env.tests.update(tests) - if globals: - self.env.globals.update(globals) + if filters: self.env.filters.update(filters) + if tests: self.env.tests.update(tests) + if globals: self.env.globals.update(globals) if self.source: self.tpl = self.env.from_string(self.source) else: @@ -4365,14 +4193,16 @@ def loader(self, name): fname = name else: fname = self.search(name, self.lookup) - if not fname: - return - with open(fname, 'rb') as f: + if not fname: return + with open(fname, "rb") as f: return (f.read().decode(self.encoding), fname, lambda: False) class SimpleTemplate(BaseTemplate): - def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka): + def prepare(self, + escape_func=html_escape, + noescape=False, + syntax=None, **ka): self.cache = {} enc = self.encoding self._str = lambda x: touni(x, enc) @@ -4413,30 +4243,28 @@ def _include(self, _env, _name=None, **kwargs): def execute(self, _stdout, kwargs): env = self.defaults.copy() env.update(kwargs) - env.update( - { - '_stdout': _stdout, - '_printlist': _stdout.extend, - 'include': functools.partial(self._include, env), - 'rebase': functools.partial(self._rebase, env), - '_rebase': None, - '_str': self._str, - '_escape': self._escape, - 'get': env.get, - 'setdefault': env.setdefault, - 'defined': env.__contains__, - } - ) + env.update({ + '_stdout': _stdout, + '_printlist': _stdout.extend, + 'include': functools.partial(self._include, env), + 'rebase': functools.partial(self._rebase, env), + '_rebase': None, + '_str': self._str, + '_escape': self._escape, + 'get': env.get, + 'setdefault': env.setdefault, + 'defined': env.__contains__ + }) exec(self.co, env) if env.get('_rebase'): subtpl, rargs = env.pop('_rebase') - rargs['base'] = ''.join(_stdout) # copy stdout + rargs['base'] = ''.join(_stdout) #copy stdout del _stdout[:] # clear stdout return self._include(env, subtpl, **rargs) return env def render(self, *args, **kwargs): - """Render the template using keyword arguments as local variables.""" + """ Render the template using keyword arguments as local variables. """ env = {} stdout = [] for dictarg in args: @@ -4451,14 +4279,13 @@ class StplSyntaxError(TemplateError): class StplParser(object): - """Parser for stpl templates.""" - + """ Parser for stpl templates. """ _re_cache = {} #: Cache for compiled re patterns # This huge pile of voodoo magic splits python code into 8 different tokens. # We use the verbose (?x) regex mode to make this more manageable - _re_tok = r"""( + _re_tok = r'''( [urbURB]* (?: ''(?!') |""(?!") @@ -4469,11 +4296,11 @@ class StplParser(object): |'{3}(?:[^\\]|\\.|\n)+?'{3} |"{3}(?:[^\\]|\\.|\n)+?"{3} ) - )""" + )''' _re_inl = _re_tok.replace(r'|\n', '') # We re-use this string pattern later - _re_tok += r""" + _re_tok += r''' # 2: Comments (until end of line, but not the newline itself) |(\#.*) @@ -4493,18 +4320,19 @@ class StplParser(object): # 9: And finally, a single newline. The 10th token is 'everything else' |(\r?\n) - """ + ''' # Match the start tokens of code areas in a template - _re_split = r"""(?m)^[ \t]*(\\?)((%(line_start)s)|(%(block_start)s))""" + _re_split = r'''(?m)^[ \t]*(\\?)((%(line_start)s)|(%(block_start)s))''' # Match inline statements (may contain python strings) - _re_inl = r"""%%(inline_start)s((?:%s|[^'"\n])*?)%%(inline_end)s""" % _re_inl + _re_inl = r'''%%(inline_start)s((?:%s|[^'"\n])*?)%%(inline_end)s''' % _re_inl # add the flag in front of the regexp to avoid Deprecation warning (see Issue #949) # verbose and dot-matches-newline mode _re_tok = '(?mx)' + _re_tok _re_inl = '(?mx)' + _re_inl + default_syntax = '<% %> % {{ }}' def __init__(self, source, syntax=None, encoding='utf8'): @@ -4516,7 +4344,7 @@ def __init__(self, source, syntax=None, encoding='utf8'): self.paren_depth = 0 def get_syntax(self): - """Tokens as a space separated string (default: <% %> % {{ }})""" + """ Tokens as a space separated string (default: <% %> % {{ }}) """ return self._syntax def set_syntax(self, syntax): @@ -4534,24 +4362,25 @@ def set_syntax(self, syntax): syntax = property(get_syntax, set_syntax) def translate(self): - if self.offset: - raise RuntimeError('Parser is a one time instance.') + if self.offset: raise RuntimeError('Parser is a one time instance.') while True: m = self.re_split.search(self.source, pos=self.offset) if m: - text = self.source[self.offset : m.start()] + text = self.source[self.offset:m.start()] self.text_buffer.append(text) self.offset = m.end() if m.group(1): # Escape syntax - line, sep, _ = self.source[self.offset :].partition('\n') - self.text_buffer.append(self.source[m.start() : m.start(1)] + m.group(2) + line + sep) + line, sep, _ = self.source[self.offset:].partition('\n') + self.text_buffer.append(self.source[m.start():m.start(1)] + + m.group(2) + line + sep) self.offset += len(line + sep) continue self.flush_text() - self.offset += self.read_code(self.source[self.offset :], multiline=bool(m.group(4))) + self.offset += self.read_code(self.source[self.offset:], + multiline=bool(m.group(4))) else: break - self.text_buffer.append(self.source[self.offset :]) + self.text_buffer.append(self.source[self.offset:]) self.flush_text() return ''.join(self.code_buffer) @@ -4565,7 +4394,7 @@ def read_code(self, pysource, multiline): offset = len(pysource) self.write_code(code_line.strip(), comment) break - code_line += pysource[offset : m.start()] + code_line += pysource[offset:m.start()] offset = m.end() _str, _com, _po, _pc, _blk1, _blk2, _end, _cend, _nl = m.groups() if self.paren_depth > 0 and (_blk1 or _blk2): # a if b else c @@ -4594,10 +4423,8 @@ def read_code(self, pysource, multiline): code_line = _blk2 self.indent_mod -= 1 elif _cend: # The end-code-block template token (usually '%>') - if multiline: - multiline = False - else: - code_line += _cend + if multiline: multiline = False + else: code_line += _cend elif _end: self.indent -= 1 self.indent_mod += 1 @@ -4613,23 +4440,19 @@ def read_code(self, pysource, multiline): def flush_text(self): text = ''.join(self.text_buffer) del self.text_buffer[:] - if not text: - return + if not text: return parts, pos, nl = [], 0, '\\\n' + ' ' * self.indent for m in self.re_inl.finditer(text): - prefix, pos = text[pos : m.start()], m.end() + prefix, pos = text[pos:m.start()], m.end() if prefix: parts.append(nl.join(map(repr, prefix.splitlines(True)))) - if prefix.endswith('\n'): - parts[-1] += nl + if prefix.endswith('\n'): parts[-1] += nl parts.append(self.process_inline(m.group(1).strip())) if pos < len(text): prefix = text[pos:] lines = prefix.splitlines(True) - if lines[-1].endswith('\\\\\n'): - lines[-1] = lines[-1][:-3] - elif lines[-1].endswith('\\\\\r\n'): - lines[-1] = lines[-1][:-4] + if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3] + elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4] parts.append(nl.join(map(repr, lines))) code = '_printlist((%s,))' % ', '.join(parts) self.lineno += code.count('\n') + 1 @@ -4637,8 +4460,7 @@ def flush_text(self): @staticmethod def process_inline(chunk): - if chunk[0] == '!': - return '_str(%s)' % chunk[1:] + if chunk[0] == '!': return '_str(%s)' % chunk[1:] return '_escape(%s)' % chunk def write_code(self, line, comment=''): @@ -4664,9 +4486,8 @@ def template(*args, **kwargs): settings = kwargs.pop('template_settings', {}) if isinstance(tpl, adapter): TEMPLATES[tplid] = tpl - if settings: - TEMPLATES[tplid].prepare(**settings) - elif '\n' in tpl or '{' in tpl or '%' in tpl or '$' in tpl: + if settings: TEMPLATES[tplid].prepare(**settings) + elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl: TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings) else: TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings) @@ -4676,22 +4497,24 @@ def template(*args, **kwargs): mako_template = functools.partial(template, template_adapter=MakoTemplate) -cheetah_template = functools.partial(template, template_adapter=CheetahTemplate) +cheetah_template = functools.partial(template, + template_adapter=CheetahTemplate) jinja2_template = functools.partial(template, template_adapter=Jinja2Template) def view(tpl_name, **defaults): - """Decorator: renders a template for a handler. - The handler can control its behavior like that: - - - return a dict of template vars to fill out the template - - return something other than a dict and the view decorator will not - process the template, but return the handler result as is. - This includes returning a HTTPResponse(dict) to get, - for instance, JSON with autojson or other castfilters. + """ Decorator: renders a template for a handler. + The handler can control its behavior like that: + + - return a dict of template vars to fill out the template + - return something other than a dict and the view decorator will not + process the template, but return the handler result as is. + This includes returning a HTTPResponse(dict) to get, + for instance, JSON with autojson or other castfilters. """ def decorator(func): + @functools.wraps(func) def wrapper(*args, **kwargs): result = func(*args, **kwargs) @@ -4724,16 +4547,16 @@ def wrapper(*args, **kwargs): #: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found') HTTP_CODES = httplib.responses.copy() HTTP_CODES[418] = "I'm a teapot" # RFC 2324 -HTTP_CODES[428] = 'Precondition Required' -HTTP_CODES[429] = 'Too Many Requests' -HTTP_CODES[431] = 'Request Header Fields Too Large' -HTTP_CODES[451] = 'Unavailable For Legal Reasons' # RFC 7725 -HTTP_CODES[511] = 'Network Authentication Required' -_HTTP_STATUS_LINES = dict((k, '%d %s' % (k, v)) for (k, v) in HTTP_CODES.items()) +HTTP_CODES[428] = "Precondition Required" +HTTP_CODES[429] = "Too Many Requests" +HTTP_CODES[431] = "Request Header Fields Too Large" +HTTP_CODES[451] = "Unavailable For Legal Reasons" # RFC 7725 +HTTP_CODES[511] = "Network Authentication Required" +_HTTP_STATUS_LINES = dict((k, '%d %s' % (k, v)) + for (k, v) in HTTP_CODES.items()) #: The default template used for error pages. Override with @error() -ERROR_PAGE_TEMPLATE = ( - """ +ERROR_PAGE_TEMPLATE = """ %%try: %%from %s import DEBUG, request @@ -4771,9 +4594,7 @@ def wrapper(*args, **kwargs): ImportError: Could not generate the error page. Please add bottle to the import path. %%end -""" - % __name__ -) +""" % __name__ #: A thread-safe instance of :class:`LocalRequest`. If accessed from within a #: request callback, this instance always refers to the *current* request @@ -4793,7 +4614,8 @@ def wrapper(*args, **kwargs): #: A virtual package that redirects import statements. #: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`. -ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__ + '.ext', 'bottle_%s').module +ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else + __name__ + ".ext", 'bottle_%s').module def _main(argv): # pragma: no coverage @@ -4808,7 +4630,7 @@ def _cli_error(cli_msg): print('Bottle %s' % __version__) sys.exit(0) if not args.app: - _cli_error('No application entry point specified.') + _cli_error("No application entry point specified.") sys.path.insert(0, '.') sys.modules.setdefault('bottle', sys.modules['__main__']) @@ -4830,9 +4652,9 @@ def _cli_error(cli_msg): except configparser.Error as parse_error: _cli_error(parse_error) except IOError: - _cli_error('Unable to read config file %r' % cfile) + _cli_error("Unable to read config file %r" % cfile) except (UnicodeError, TypeError, ValueError) as error: - _cli_error('Unable to parse config file %r: %s' % (cfile, error)) + _cli_error("Unable to parse config file %r: %s" % (cfile, error)) for cval in args.param or []: if '=' in cval: @@ -4840,16 +4662,14 @@ def _cli_error(cli_msg): else: config[cval] = True - run( - args.app, + run(args.app, host=host, port=int(port), server=args.server, reloader=args.reload, plugins=args.plugin, debug=args.debug, - config=config, - ) + config=config) def main(): diff --git a/build/mongodl.py b/build/mongodl.py index c5ac978fa47..b904ea0eb29 100644 --- a/build/mongodl.py +++ b/build/mongodl.py @@ -3,7 +3,6 @@ Use '--help' for more information. """ - import argparse import enum import hashlib @@ -40,14 +39,14 @@ DISTRO_VERSION_MAP = { 'elementary': { - '6': '20.04', + '6': '20.04' }, 'fedora': { '32': '8', '33': '8', '34': '8', '35': '8', - '36': '8', + '36': '8' }, 'linuxmint': { '19': '18.04', @@ -58,7 +57,7 @@ '20.1': '20.04', '20.2': '20.04', '20.3': '20.04', - '21': '22.04', + '21': '22.04' }, } @@ -107,7 +106,8 @@ def infer_target(): for c in cands: if c.is_file(): return _infer_target_os_rel(c) - raise RuntimeError("We don't know how to find the default '--target' option for this system. Please contribute!") + raise RuntimeError("We don't know how to find the default '--target'" + " option for this system. Please contribute!") def _infer_target_os_rel(os_rel_path: Path): @@ -115,7 +115,8 @@ def _infer_target_os_rel(os_rel_path: Path): content = f.read() id_re = re.compile(r'\bID=("?)(.*)\1') mat = id_re.search(content) - assert mat, 'Unable to detect ID from [{}] content:\n{}'.format(os_rel_path, content) + assert mat, 'Unable to detect ID from [{}] content:\n{}'.format( + os_rel_path, content) os_id = mat.group(2) if os_id == 'arch': # There are no Archlinux-specific MongoDB downloads, so we'll just use @@ -124,7 +125,8 @@ def _infer_target_os_rel(os_rel_path: Path): return 'rhel80' ver_id_re = re.compile(r'VERSION_ID=("?)(.*)\1') mat = ver_id_re.search(content) - assert mat, 'Unable to detect VERSION_ID from [{}] content:\n{}'.format(os_rel_path, content) + assert mat, 'Unable to detect VERSION_ID from [{}] content:\n{}'.format( + os_rel_path, content) ver_id = mat.group(2) mapped_id = DISTRO_ID_MAP.get(os_id) if mapped_id: @@ -132,23 +134,21 @@ def _infer_target_os_rel(os_rel_path: Path): ver_mapper = DISTRO_VERSION_MAP.get(os_id) if ver_mapper: mapped_version = ver_mapper[ver_id] - print('Mapping version "{}" to "{}"'.format(ver_id, mapped_version)) + print('Mapping version "{}" to "{}"'.format( + ver_id, mapped_version)) ver_id = mapped_version os_id = mapped_id os_id = os_id.lower() if os_id not in DISTRO_ID_TO_TARGET: - raise RuntimeError( - "We don't know how to map '{}' to a distribution download target. Please contribute!".format(os_id) - ) + raise RuntimeError("We don't know how to map '{}' to a distribution " + "download target. Please contribute!".format(os_id)) ver_table = DISTRO_ID_TO_TARGET[os_id] for pattern, target in ver_table.items(): if fnmatch(ver_id, pattern): return target raise RuntimeError( - "We don't know how to map '{}' version '{}' to a distribution download target. Please contribute!".format( - os_id, ver_id - ) - ) + "We don't know how to map '{}' version '{}' to a distribution " + "download target. Please contribute!".format(os_id, ver_id)) def caches_root(): @@ -180,15 +180,15 @@ def _import_json_data(db, json_file): db.execute('DROP TABLE IF EXISTS components') db.execute('DROP TABLE IF EXISTS downloads') db.execute('DROP TABLE IF EXISTS versions') - db.execute(r""" + db.execute(r''' CREATE TABLE versions ( version_id INTEGER PRIMARY KEY, date TEXT NOT NULL, version TEXT NOT NULL, githash TEXT NOT NULL ) - """) - db.execute(r""" + ''') + db.execute(r''' CREATE TABLE downloads ( download_id INTEGER PRIMARY KEY, version_id INTEGER NOT NULL REFERENCES versions, @@ -198,8 +198,8 @@ def _import_json_data(db, json_file): ar_url TEST NOT NULL, data TEXT NOT NULL ) - """) - db.execute(r""" + ''') + db.execute(r''' CREATE TABLE components ( component_id INTEGER PRIMARY KEY, key TEXT NOT NULL, @@ -207,7 +207,7 @@ def _import_json_data(db, json_file): data TEXT NOT NULL, UNIQUE(key, download_id) ) - """) + ''') with json_file.open('r') as f: data = json.load(f) for ver in data['versions']: @@ -215,10 +215,10 @@ def _import_json_data(db, json_file): githash = ver['githash'] date = ver['date'] db.execute( - r""" + r''' INSERT INTO versions (date, version, githash) VALUES (?, ?, ?) - """, + ''', (date, version, githash), ) version_id = db.lastrowid @@ -228,10 +228,10 @@ def _import_json_data(db, json_file): edition = dl['edition'] ar_url = dl['archive']['url'] db.execute( - r""" + r''' INSERT INTO downloads (version_id, target, arch, edition, ar_url, data) VALUES (?, ?, ?, ?, ?, ?) - """, + ''', (version_id, target, arch, edition, ar_url, json.dumps(dl)), ) dl_id = db.lastrowid @@ -239,10 +239,10 @@ def _import_json_data(db, json_file): if 'url' not in data: continue db.execute( - r""" + r''' INSERT INTO components (key, download_id, data) VALUES (?, ?, ?) - """, + ''', (key, dl_id, json.dumps(data)), ) @@ -269,26 +269,27 @@ def get_dl_db(): caches = cache_dir() _mkdir(caches) db = sqlite3.connect(str(caches / 'downloads.db'), isolation_level=None) - db.executescript(r""" + db.executescript(r''' CREATE TABLE IF NOT EXISTS meta ( etag TEXT, last_modified TEXT ) - """) - db.executescript(r""" + ''') + db.executescript(r''' CREATE TABLE IF NOT EXISTS past_downloads ( url TEXT NOT NULL UNIQUE, etag TEXT, last_modified TEXT ) - """) - changed, full_json = _download_file(db, 'https://downloads.mongodb.org/full.json') + ''') + changed, full_json = _download_file( + db, 'https://downloads.mongodb.org/full.json') if not changed: return db with db: print('Refreshing downloads manifest ...') cur = db.cursor() - cur.execute('begin') + cur.execute("begin") _import_json_data(cur, full_json) return db @@ -296,7 +297,7 @@ def get_dl_db(): def _print_list(db, version, target, arch, edition, component): if version or target or arch or edition or component: matching = db.execute( - r""" + r''' SELECT version, target, arch, edition, key, components.data FROM components, downloads USING(download_id), @@ -306,24 +307,27 @@ def _print_list(db, version, target, arch, edition, component): AND (:arch IS NULL OR arch=:arch) AND (:edition IS NULL OR edition=:edition) AND (:version IS NULL OR version=:version) - """, - dict(version=version, target=target, arch=arch, edition=edition, component=component), + ''', + dict(version=version, + target=target, + arch=arch, + edition=edition, + component=component), ) for version, target, arch, edition, comp_key, comp_data in matching: - print( - 'Download: {}\n\n' - ' Version: {}\n\n' - ' Target: {}\n\n' - ' Arch: {}\n\n' - ' Edition: {}\n\n' - ' Info: {}\n\n'.format(comp_key, version, target, arch, edition, comp_data) - ) + print('Download: {}\n\n' + ' Version: {}\n\n' + ' Target: {}\n\n' + ' Arch: {}\n\n' + ' Edition: {}\n\n' + ' Info: {}\n\n'.format(comp_key, version, target, arch, + edition, comp_data)) print('(Omit filter arguments for a list of available filters)') return arches, targets, editions, versions, components = next( iter( - db.execute(r""" + db.execute(r''' VALUES( (select group_concat(arch, ', ') from (select distinct arch from downloads)), (select group_concat(target, ', ') from (select distinct target from downloads)), @@ -331,16 +335,27 @@ def _print_list(db, version, target, arch, edition, component): (select group_concat(version, ', ') from (select distinct version from versions)), (select group_concat(key, ', ') from (select distinct key from components)) ) - """) - ) - ) - versions = '\n'.join(textwrap.wrap(versions, width=78, initial_indent=' ', subsequent_indent=' ')) - targets = '\n'.join(textwrap.wrap(targets, width=78, initial_indent=' ', subsequent_indent=' ')) - print( - 'Architectures:\n {}\nTargets:\n{}\nEditions:\n {}\nVersions:\n{}\nComponents:\n {}\n'.format( - arches, targets, editions, versions, components - ) - ) + '''))) + versions = '\n'.join( + textwrap.wrap(versions, + width=78, + initial_indent=' ', + subsequent_indent=' ')) + targets = '\n'.join( + textwrap.wrap(targets, + width=78, + initial_indent=' ', + subsequent_indent=' ')) + print('Architectures:\n' + ' {}\n' + 'Targets:\n' + '{}\n' + 'Editions:\n' + ' {}\n' + 'Versions:\n' + '{}\n' + 'Components:\n' + ' {}\n'.format(arches, targets, editions, versions, components)) def infer_arch(): @@ -356,7 +371,10 @@ def infer_arch(): def _download_file(db, url): caches = cache_dir() - info = list(db.execute('SELECT etag, last_modified FROM past_downloads WHERE url=?', [url])) + info = list( + db.execute( + 'SELECT etag, last_modified FROM past_downloads WHERE url=?', + [url])) etag = None modtime = None if info: @@ -367,7 +385,7 @@ def _download_file(db, url): if modtime: headers['If-Modified-Since'] = modtime req = urllib.request.Request(url, headers=headers) - digest = hashlib.md5(url.encode('utf-8')).hexdigest()[:4] + digest = hashlib.md5(url.encode("utf-8")).hexdigest()[:4] dest = caches / 'files' / digest / PurePosixPath(url).name try: resp = urllib.request.urlopen(req) @@ -378,7 +396,7 @@ def _download_file(db, url): else: print('Downloading [{}] ...'.format(url)) _mkdir(dest.parent) - got_etag = resp.getheader('ETag') + got_etag = resp.getheader("ETag") got_modtime = resp.getheader('Last-Modified') with dest.open('wb') as of: buf = resp.read(1024 * 1024 * 4) @@ -387,15 +405,16 @@ def _download_file(db, url): buf = resp.read(1024 * 1024 * 4) db.execute( 'INSERT OR REPLACE INTO past_downloads (url, etag, last_modified) VALUES (?, ?, ?)', - (url, got_etag, got_modtime), - ) + (url, got_etag, got_modtime)) return DLRes(True, dest) -def _dl_component(db, out_dir, version, target, arch, edition, component, pattern, strip_components, test): - print('Download {} v{}-{} for {}-{}'.format(component, version, edition, target, arch)) +def _dl_component(db, out_dir, version, target, arch, edition, component, + pattern, strip_components, test): + print('Download {} v{}-{} for {}-{}'.format(component, version, edition, + target, arch)) matching = db.execute( - r""" + r''' SELECT components.data FROM components, @@ -407,17 +426,26 @@ def _dl_component(db, out_dir, version, target, arch, edition, component, patter AND edition=:edition AND version=:version AND key=:component - """, - dict(version=version, target=target, arch=arch, edition=edition, component=component), + ''', + dict(version=version, + target=target, + arch=arch, + edition=edition, + component=component), ) found = list(matching) if not found: raise ValueError( - 'No download for "{}" was found for the requested version+target+architecture+edition'.format(component) - ) + 'No download for "{}" was found for ' + 'the requested version+target+architecture+edition'.format( + component)) data = json.loads(found[0][0]) cached = _download_file(db, data['url']).path - return _expand_archive(cached, out_dir, pattern, strip_components, test=test) + return _expand_archive(cached, + out_dir, + pattern, + strip_components, + test=test) def pathjoin(items): @@ -468,41 +496,43 @@ class ExpandResult(enum.Enum): def _expand_archive(ar, dest, pattern, strip_components, test): - """ + ''' Expand the archive members from 'ar' into 'dest'. If 'pattern' is not-None, only extracts members that match the pattern. - """ + ''' print('Extract from: [{}]'.format(ar.name)) print(' into: [{}]'.format(dest)) if ar.suffix == '.zip': - n_extracted = _expand_zip(ar, dest, pattern, strip_components, test=test) + n_extracted = _expand_zip(ar, + dest, + pattern, + strip_components, + test=test) elif ar.suffix == '.tgz': - n_extracted = _expand_tgz(ar, dest, pattern, strip_components, test=test) + n_extracted = _expand_tgz(ar, + dest, + pattern, + strip_components, + test=test) else: raise RuntimeError('Unknown archive file extension: ' + ar.suffix) verb = 'would be' if test else 'were' if n_extracted == 0: if pattern and strip_components: - print( - 'NOTE: No files {verb} extracted. Likely all files {verb} ' - 'excluded by "--only={p}" and/or "--strip-components={s}"'.format( - p=pattern, s=strip_components, verb=verb - ) - ) + print('NOTE: No files {verb} extracted. Likely all files {verb} ' + 'excluded by "--only={p}" and/or "--strip-components={s}"'. + format(p=pattern, s=strip_components, verb=verb)) elif pattern: - print( - 'NOTE: No files {verb} extracted. Likely all files {verb} excluded by the "--only={p}" filter'.format( - p=pattern, verb=verb - ) - ) + print('NOTE: No files {verb} extracted. Likely all files {verb} ' + 'excluded by the "--only={p}" filter'.format(p=pattern, + verb=verb)) elif strip_components: - print( - 'NOTE: No files {verb} extracted. Likely all files {verb} excluded by "--strip-components={s}"'.format( - s=strip_components, verb=verb - ) - ) + print('NOTE: No files {verb} extracted. Likely all files {verb} ' + 'excluded by "--strip-components={s}"'.format( + s=strip_components, verb=verb)) else: - print('NOTE: No files {verb} extracted. Empty archive?'.format(verb=verb)) + print('NOTE: No files {verb} extracted. Empty archive?'.format( + verb=verb)) return ExpandResult.Empty elif n_extracted == 1: print('One file {v} extracted'.format(v='would be' if test else 'was')) @@ -513,7 +543,7 @@ def _expand_archive(ar, dest, pattern, strip_components, test): def _expand_tgz(ar, dest, pattern, strip_components, test): - "Expand a tar.gz archive" + 'Expand a tar.gz archive' n_extracted = 0 with tarfile.open(str(ar), 'r:*') as tf: for mem in tf.getmembers(): @@ -531,7 +561,7 @@ def _expand_tgz(ar, dest, pattern, strip_components, test): def _expand_zip(ar, dest, pattern, strip_components, test): - "Expand a .zip archive." + 'Expand a .zip archive.' n_extracted = 0 with zipfile.ZipFile(ar, 'r') as zf: for item in zf.infolist(): @@ -548,7 +578,8 @@ def _expand_zip(ar, dest, pattern, strip_components, test): return n_extracted -def _maybe_extract_member(out, relpath, pattern, strip, is_dir, opener, modebits, test): +def _maybe_extract_member(out, relpath, pattern, strip, is_dir, opener, + modebits, test): """ Try to extract an archive member according to the given arguments. @@ -584,42 +615,48 @@ def _maybe_extract_member(out, relpath, pattern, strip, is_dir, opener, modebits def main(): parser = argparse.ArgumentParser(description=__doc__) grp = parser.add_argument_group('List arguments') - grp.add_argument( - '--list', - action='store_true', - help='List available components, targets, editions, and architectures. Download arguments will act as filters.', - ) + grp.add_argument('--list', + action='store_true', + help='List available components, targets, editions, and ' + 'architectures. Download arguments will act as filters.') dl_grp = parser.add_argument_group( 'Download arguments', description='Select what to download and extract. ' 'Non-required arguments will be inferred ' - 'based on the host system.', - ) - dl_grp.add_argument( - '--target', '-T', help='The target platform for which to download. Use "--list" to list available targets.' - ) - dl_grp.add_argument('--arch', '-A', help='The architecture for which to download') + 'based on the host system.') + dl_grp.add_argument('--target', + '-T', + help='The target platform for which to download. ' + 'Use "--list" to list available targets.') + dl_grp.add_argument('--arch', + '-A', + help='The architecture for which to download') dl_grp.add_argument( '--edition', '-E', help='The edition of the product to download (Default is "enterprise"). ' - 'Use "--list" to list available editions.', - ) - dl_grp.add_argument('--out', '-o', help='The directory in which to download components. (Required)', type=Path) - dl_grp.add_argument( - '--version', '-V', help='The product version to download (Required). Use "--list" to list available versions.' - ) + 'Use "--list" to list available editions.') dl_grp.add_argument( - '--component', '-C', help='The component to download (Required). Use "--list" to list available components.' - ) + '--out', + '-o', + help='The directory in which to download components. (Required)', + type=Path) + dl_grp.add_argument('--version', + '-V', + help='The product version to download (Required). ' + 'Use "--list" to list available versions.') + dl_grp.add_argument('--component', + '-C', + help='The component to download (Required). ' + 'Use "--list" to list available components.') dl_grp.add_argument( '--only', - help='Restrict extraction to items that match the given globbing expression. ' + help= + 'Restrict extraction to items that match the given globbing expression. ' 'The full archive member path is matched, so a pattern like "*.exe" ' 'will only match "*.exe" at the top level of the archive. To match ' 'recursively, use the "**" pattern to match any number of ' - 'intermediate directories.', - ) + 'intermediate directories.') dl_grp.add_argument( '--strip-path-components', '-p', @@ -627,56 +664,55 @@ def main(): metavar='N', default=0, type=int, - help='Strip the given number of path components from archive members before ' + help= + 'Strip the given number of path components from archive members before ' 'extracting into the destination. The relative path of the archive ' 'member will be used to form the destination path. For example, a ' 'member named [bin/mongod.exe] will be extracted to [/bin/mongod.exe]. ' 'Using --strip-components=1 will remove the first path component, extracting ' 'such an item to [/mongod.exe]. If the path has fewer than N components, ' - 'that archive member will be ignored.', - ) + 'that archive member will be ignored.') dl_grp.add_argument( '--test', action='store_true', help='Do not extract or place any files/directories. ' - 'Only print what will be extracted without placing any files.', - ) - dl_grp.add_argument( - '--empty-is-error', - action='store_true', - help='If all files are excluded by other filters, treat that situation as an error and exit non-zero.', - ) + 'Only print what will be extracted without placing any files.') + dl_grp.add_argument('--empty-is-error', + action='store_true', + help='If all files are excluded by other filters, ' + 'treat that situation as an error and exit non-zero.') args = parser.parse_args() db = get_dl_db() if args.list: - _print_list(db, args.version, args.target, args.arch, args.edition, args.component) + _print_list(db, args.version, args.target, args.arch, args.edition, + args.component) return if args.version is None: raise argparse.ArgumentError(None, 'A "--version" is required') if args.component is None: - raise argparse.ArgumentError(None, 'A "--component" name should be provided') + raise argparse.ArgumentError( + None, 'A "--component" name should be provided') if args.out is None: - raise argparse.ArgumentError(None, 'A "--out" directory should be provided') + raise argparse.ArgumentError(None, + 'A "--out" directory should be provided') target = args.target or infer_target() arch = args.arch or infer_arch() edition = args.edition or 'enterprise' out = args.out or Path.cwd() out = out.absolute() - result = _dl_component( - db, - out, - version=args.version, - target=target, - arch=arch, - edition=edition, - component=args.component, - pattern=args.only, - strip_components=args.strip_components, - test=args.test, - ) + result = _dl_component(db, + out, + version=args.version, + target=target, + arch=arch, + edition=edition, + component=args.component, + pattern=args.only, + strip_components=args.strip_components, + test=args.test) if result is ExpandResult.Empty: return 1 return 0 From 3fde50b7d727f374d21ec59fb7375a336db04585 Mon Sep 17 00:00:00 2001 From: Ezra Chung Date: Thu, 25 Sep 2025 12:53:12 -0500 Subject: [PATCH 20/20] Exclude bottle.py and mongodl.py from Python formatting --- tools/ruff-format-all.sh | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tools/ruff-format-all.sh b/tools/ruff-format-all.sh index 4ea8b2621cd..e3321ebb651 100755 --- a/tools/ruff-format-all.sh +++ b/tools/ruff-format-all.sh @@ -10,14 +10,21 @@ set -o errexit set -o pipefail +# Vendored scripts to keep in sync with upstream or external sources. +excludes=( + --exclude build/bottle.py + --exclude build/mongodl.py +) + # Scripts which require a different Python version than the one specified in pyproject.toml. # See: https://github.com/astral-sh/ruff/issues/10457 py312=( src/libbson/tests/validate-tests.py ) -# Python scripts. -# https://github.com/astral-sh/ruff/issues/8232 -uv run --frozen --group format-scripts ruff check --select I --fix --exclude "${py312[@]:?}" +# Format imports: https://github.com/astral-sh/ruff/issues/8232 uv run --frozen --group format-scripts --isolated ruff check --select I --fix --target-version py312 "${py312[@]:?}" -uv run --frozen --group format-scripts ruff format +uv run --frozen --group format-scripts ruff check --select I --fix --exclude "${py312[@]:?}" "${excludes[@]:?}" + +# Format Python scripts. +uv run --frozen --group format-scripts ruff format "${excludes[@]:?}"