diff --git a/Cargo.lock b/Cargo.lock index 18fc55d3e..d7160fcbd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -65,15 +65,6 @@ dependencies = [ "libc", ] -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] - [[package]] name = "anstream" version = "0.6.15" @@ -181,6 +172,12 @@ dependencies = [ "rustc-demangle", ] +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + [[package]] name = "base64" version = "0.13.1" @@ -193,14 +190,20 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + [[package]] name = "basic-cookies" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67bd8fd42c16bdb08688243dc5f0cc117a3ca9efeeaba3a345a18a6159ad96f7" dependencies = [ - "lalrpop 0.20.2", - "lalrpop-util 0.20.2", + "lalrpop", + "lalrpop-util", "regex", ] @@ -327,7 +330,7 @@ dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim 0.11.1", + "strsim", "terminal_size", ] @@ -355,6 +358,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + [[package]] name = "core-foundation" version = "0.9.4" @@ -404,6 +413,18 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + [[package]] name = "crypto-common" version = "0.1.6" @@ -437,11 +458,38 @@ dependencies = [ "libloading", ] +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.75", +] + [[package]] name = "darling" -version = "0.13.4" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ "darling_core", "darling_macro", @@ -449,27 +497,27 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.13.4" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", - "strsim 0.10.0", - "syn 1.0.109", + "strsim", + "syn 2.0.75", ] [[package]] name = "darling_macro" -version = "0.13.4" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 1.0.109", + "syn 2.0.75", ] [[package]] @@ -478,6 +526,17 @@ version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7762d17f1241643615821a8455a0b2c3e803784b058693d990b11f2dce25a0ca" +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + [[package]] name = "deranged" version = "0.3.11" @@ -485,20 +544,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", + "serde", ] -[[package]] -name = "deunicode" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "339544cc9e2c4dc3fc7149fd630c5f22263a4fdf18a98afd0075784968b5cf00" - -[[package]] -name = "diff" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" - [[package]] name = "digest" version = "0.10.7" @@ -506,6 +554,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", + "const-oid", "crypto-common", "subtle", ] @@ -531,12 +580,77 @@ dependencies = [ "winapi", ] +[[package]] +name = "dyn-clone" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest", + "elliptic-curve", + "rfc6979", + "signature", + "spki", +] + +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8", + "signature", +] + +[[package]] +name = "ed25519-dalek" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" +dependencies = [ + "curve25519-dalek", + "ed25519", + "serde", + "sha2", + "subtle", + "zeroize", +] + [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct", + "crypto-bigint", + "digest", + "ff", + "generic-array", + "group", + "hkdf", + "pem-rfc7468", + "pkcs8", + "rand_core", + "sec1", + "subtle", + "zeroize", +] + [[package]] name = "ena" version = "0.14.3" @@ -635,6 +749,22 @@ dependencies = [ "syslog", ] +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + [[package]] name = "fixedbitset" version = "0.4.2" @@ -743,6 +873,7 @@ checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", + "zeroize", ] [[package]] @@ -764,6 +895,17 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core", + "subtle", +] + [[package]] name = "h2" version = "0.4.6" @@ -776,13 +918,19 @@ dependencies = [ "futures-core", "futures-sink", "http 1.1.0", - "indexmap", + "indexmap 2.4.0", "slab", "tokio", "tokio-util", "tracing", ] +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + [[package]] name = "hashbrown" version = "0.14.5" @@ -823,6 +971,15 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + [[package]] name = "hmac" version = "0.12.1" @@ -1014,14 +1171,14 @@ dependencies = [ ] [[package]] -name = "impl-trait-for-tuples" -version = "0.2.2" +name = "indexmap" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", + "autocfg", + "hashbrown 0.12.3", + "serde", ] [[package]] @@ -1031,7 +1188,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.14.5", + "serde", ] [[package]] @@ -1108,18 +1266,6 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" -[[package]] -name = "jmespatch" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7acf91a732ade34d8eda2dee9500a051833f14f0d3d10d77c149845d6ac6a5f0" -dependencies = [ - "lazy_static", - "serde", - "serde_json", - "slug", -] - [[package]] name = "js-sys" version = "0.3.70" @@ -1182,7 +1328,6 @@ dependencies = [ "hyper", "hyper-util", "intervaltree", - "jmespatch", "kmip-protocol", "kvx", "libflate", @@ -1190,7 +1335,6 @@ dependencies = [ "once_cell", "openidconnect", "openssl", - "oso", "percent-encoding", "pin-project-lite", "r2d2", @@ -1260,28 +1404,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "lalrpop" -version = "0.19.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a1cbf952127589f2851ab2046af368fd20645491bb4b376f04b7f94d7a9837b" -dependencies = [ - "ascii-canvas", - "bit-set", - "diff", - "ena", - "is-terminal", - "itertools 0.10.5", - "lalrpop-util 0.19.12", - "petgraph", - "regex", - "regex-syntax 0.6.29", - "string_cache", - "term", - "tiny-keccak", - "unicode-xid", -] - [[package]] name = "lalrpop" version = "0.20.2" @@ -1292,11 +1414,11 @@ dependencies = [ "bit-set", "ena", "itertools 0.11.0", - "lalrpop-util 0.20.2", + "lalrpop-util", "petgraph", "pico-args", "regex", - "regex-syntax 0.8.4", + "regex-syntax", "string_cache", "term", "tiny-keccak", @@ -1304,22 +1426,13 @@ dependencies = [ "walkdir", ] -[[package]] -name = "lalrpop-util" -version = "0.19.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3c48237b9604c5a4702de6b824e02006c3214327564636aef27c1028a8fa0ed" -dependencies = [ - "regex", -] - [[package]] name = "lalrpop-util" version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" dependencies = [ - "regex-automata 0.4.7", + "regex-automata", ] [[package]] @@ -1327,6 +1440,9 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] [[package]] name = "libc" @@ -1354,7 +1470,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6e0d73b369f386f1c44abd9c570d5318f55ccde816ff4b562fa452e5182863d" dependencies = [ "core2", - "hashbrown", + "hashbrown 0.14.5", "rle-decode-fast", ] @@ -1368,6 +1484,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + [[package]] name = "libredox" version = "0.1.3" @@ -1400,27 +1522,12 @@ version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" -[[package]] -name = "maplit" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" - [[package]] name = "match_cfg" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" -[[package]] -name = "matchers" -version = "0.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f099785f7595cc4b4553a174ce30dd7589ef93391ff414dbb67f62392b9e0ce1" -dependencies = [ - "regex-automata 0.1.10", -] - [[package]] name = "maybe-async" version = "0.2.10" @@ -1499,13 +1606,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] -name = "num-bigint" -version = "0.4.6" +name = "num-bigint-dig" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" dependencies = [ + "byteorder", + "lazy_static", + "libm", "num-integer", + "num-iter", "num-traits", + "rand", + "smallvec", + "zeroize", ] [[package]] @@ -1523,6 +1637,17 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -1530,6 +1655,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -1577,19 +1703,23 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "openidconnect" -version = "2.5.1" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98dd5b7049bac4fdd2233b8c9767d42c05da8006fdb79cc903258556d2b18009" +checksum = "f47e80a9cfae4462dd29c41e987edd228971d6565553fbc14b8a11e666d91590" dependencies = [ "base64 0.13.1", "chrono", + "dyn-clone", + "ed25519-dalek", + "hmac", "http 0.2.12", "itertools 0.10.5", "log", - "num-bigint", "oauth2", + "p256", + "p384", "rand", - "ring 0.16.20", + "rsa", "serde", "serde-value", "serde_derive", @@ -1597,6 +1727,7 @@ dependencies = [ "serde_path_to_error", "serde_plain", "serde_with", + "sha2", "subtle", "thiserror", "url", @@ -1666,18 +1797,27 @@ dependencies = [ ] [[package]] -name = "oso" -version = "0.12.4" +name = "p256" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aec41e2da1ce3a82eb807396f802c172f08aa03e1be31e5df49592a04e12c8c7" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" dependencies = [ - "impl-trait-for-tuples", - "lazy_static", - "maplit", - "polar-core", - "thiserror", - "tracing", - "tracing-subscriber", + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", +] + +[[package]] +name = "p384" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70786f51bcc69f6a4c0360e063a4cac5419ef7c5cd5b3c99ad70f3be5ba79209" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", ] [[package]] @@ -1719,6 +1859,15 @@ dependencies = [ "hmac", ] +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + [[package]] name = "percent-encoding" version = "2.3.1" @@ -1732,7 +1881,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap", + "indexmap 2.4.0", ] [[package]] @@ -1801,27 +1950,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] -name = "pkg-config" -version = "0.3.30" +name = "pkcs1" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] [[package]] -name = "polar-core" -version = "0.12.4" +name = "pkcs8" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d2b6ee5b5ff6312ca55e2ba75fbd438c72bc041c799055388d815726eca69b" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ - "js-sys", - "lalrpop 0.19.12", - "lalrpop-util 0.19.12", - "regex", - "serde", - "serde_derive", - "serde_json", - "wasm-bindgen", + "der", + "spki", ] +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + [[package]] name = "postgres" version = "0.19.8" @@ -1901,6 +2055,15 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -2040,17 +2203,8 @@ checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.7", - "regex-syntax 0.8.4", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", + "regex-automata", + "regex-syntax", ] [[package]] @@ -2061,15 +2215,9 @@ checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.4", + "regex-syntax", ] -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - [[package]] name = "regex-syntax" version = "0.8.4" @@ -2120,18 +2268,13 @@ dependencies = [ ] [[package]] -name = "ring" -version = "0.16.20" +name = "rfc6979" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", + "hmac", + "subtle", ] [[package]] @@ -2144,8 +2287,8 @@ dependencies = [ "cfg-if", "getrandom", "libc", - "spin 0.9.8", - "untrusted 0.9.0", + "spin", + "untrusted", "windows-sys 0.52.0", ] @@ -2177,12 +2320,32 @@ dependencies = [ "chrono", "log", "quick-xml", - "ring 0.17.8", + "ring", "serde", - "untrusted 0.9.0", + "untrusted", "uuid", ] +[[package]] +name = "rsa" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core", + "signature", + "spki", + "subtle", + "zeroize", +] + [[package]] name = "rtoolbox" version = "0.0.2" @@ -2229,7 +2392,7 @@ checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044" dependencies = [ "log", "once_cell", - "ring 0.17.8", + "ring", "rustls-pki-types", "rustls-webpki", "subtle", @@ -2258,9 +2421,9 @@ version = "0.102.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e" dependencies = [ - "ring 0.17.8", + "ring", "rustls-pki-types", - "untrusted 0.9.0", + "untrusted", ] [[package]] @@ -2328,6 +2491,20 @@ dependencies = [ "sha2", ] +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + [[package]] name = "secrecy" version = "0.8.0" @@ -2460,24 +2637,32 @@ dependencies = [ [[package]] name = "serde_with" -version = "1.14.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.4.0", "serde", + "serde_derive", + "serde_json", "serde_with_macros", + "time", ] [[package]] name = "serde_with_macros" -version = "1.5.2" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" dependencies = [ "darling", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.75", ] [[package]] @@ -2491,15 +2676,6 @@ dependencies = [ "digest", ] -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - [[package]] name = "shlex" version = "1.3.0" @@ -2515,6 +2691,16 @@ dependencies = [ "libc", ] +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core", +] + [[package]] name = "siphasher" version = "0.3.11" @@ -2530,16 +2716,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "slug" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "882a80f72ee45de3cc9a5afeb2da0331d58df69e4e7d8eeb5d3c7784ae67e724" -dependencies = [ - "deunicode", - "wasm-bindgen", -] - [[package]] name = "smallvec" version = "1.13.2" @@ -2558,15 +2734,19 @@ dependencies = [ [[package]] name = "spin" -version = "0.5.2" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" [[package]] -name = "spin" -version = "0.9.8" +name = "spki" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] [[package]] name = "stderrlog" @@ -2605,12 +2785,6 @@ dependencies = [ "unicode-properties", ] -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - [[package]] name = "strsim" version = "0.11.1" @@ -2933,7 +3107,7 @@ version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ - "indexmap", + "indexmap 2.4.0", "serde", "serde_spanned", "toml_datetime", @@ -2973,23 +3147,10 @@ version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "log", "pin-project-lite", - "tracing-attributes", "tracing-core", ] -[[package]] -name = "tracing-attributes" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.75", -] - [[package]] name = "tracing-core" version = "0.1.32" @@ -2997,50 +3158,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-serde" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" -dependencies = [ - "serde", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" -dependencies = [ - "ansi_term", - "chrono", - "lazy_static", - "matchers", - "regex", - "serde", - "serde_json", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log", - "tracing-serde", ] [[package]] @@ -3099,12 +3216,6 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "229730647fbc343e3a80e463c1db7f78f3855d3f3739bee0dda773c9a037c90a" -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - [[package]] name = "untrusted" version = "0.9.0" @@ -3144,12 +3255,6 @@ dependencies = [ "getrandom", ] -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - [[package]] name = "vcpkg" version = "0.2.15" diff --git a/Cargo.toml b/Cargo.toml index 39d553877..f398222cd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,15 +36,13 @@ http-body-util = "0.1" hyper = { version = "1.3.1", features = ["server"] } hyper-util = { version = "0.1", features = [ "server" ] } intervaltree = "0.2.6" -jmespatch = { version = "0.3", features = ["sync"], optional = true } kmip = { version = "0.4.2", package = "kmip-protocol", features = [ "tls-with-openssl" ], optional = true } kvx = { version = "0.9.3", features = ["macros"] } libflate = "2.1.0" log = "0.4" once_cell = { version = "1.7.2", optional = true } -openidconnect = { version = "2.0.0", optional = true, default-features = false } +openidconnect = { version = "3.5.0", optional = true, default-features = false } openssl = { version = "0.10", features = ["v110"] } -oso = { version = "0.12", optional = true, default-features = false } percent-encoding = "2.3.1" pin-project-lite = "0.2.4" r2d2 = { version = "0.8.9", optional = true } @@ -81,9 +79,7 @@ default = ["multi-user", "hsm"] hsm = ["backoff", "kmip", "once_cell", "cryptoki", "r2d2"] multi-user = [ "basic-cookies", - "jmespatch/sync", "regex", - "oso", "openidconnect", "rpassword", "scrypt", diff --git a/defaults/abac.polar b/defaults/abac.polar deleted file mode 100644 index 5d283e265..000000000 --- a/defaults/abac.polar +++ /dev/null @@ -1,37 +0,0 @@ -################################################################################ -### Attribute Based Access Control (ABAC) -################################################################################ - -# Restricting access to CAs per user: -# =================================== -# As with defining roles per actor, how defining CAs per actor is done depends -# also in the same way on your krill.conf. - -# 1. Assigning CA access to users based on user attributes: -# ========================================================= -# See roles.polar for how the "role" attribute is used, but instead use -# "inc_cas" and "exc_cas" attributes. - -# 2. Assigning CA access through explicit rules that you define here for users -# defined in your krill.conf file: -# ======================================================================== -# You can also assign CA access directly by writing an actor_cannot_access_ca() -# rule per user as shown below: -# -# To deny access to one or more CAs for a specific user create a rule like so in -# THIS FILE: -# -# actor_cannot_access_ca(actor: Actor{name: "some@user.com"}, ca: Handle) if -# ca.name in ["some_ca_handle", "some_other_ca_handle"] and cut; -# -# To grant access ONLY to one or more CAs for a specific user, create rules -# like so in THIS FILE which first block access to all CAs for the user then -# grant access to specified CAs only for that user: -# -# actor_cannot_access_ca(actor: Actor{name: "some@user.com"}, _: Handle) if -# true; -# actor_can_access_ca(actor: Actor{name: "some@user.com"}, ca: Handle) if -# ca.name in ["some_ca_handle", "some_other_ca_handle"]; - -# actor_cannot_access_ca(actor: Actor{name: "admin-token"}, ca: Handle) if -# ca.name in ["ca2"] and cut; \ No newline at end of file diff --git a/defaults/aliases.polar b/defaults/aliases.polar deleted file mode 100644 index c1c2ee61a..000000000 --- a/defaults/aliases.polar +++ /dev/null @@ -1,13 +0,0 @@ -################################################################################ -### Role aliases -################################################################################ - -# Role names can be aliased so that they can be referred to, e.g. in actor -# attributes, via other names. For example the following aliases the "readonly" -# role to the name "Read Only" and does a quick sanity check to show that it -# works. -# -# role_allow("Read Only", action: Permission) if -# role_allow("readonly", action); -# -# ?= role_allow("Read Only", CA_LIST); diff --git a/defaults/krill-multi-user.conf b/defaults/krill-multi-user.conf index 7698d7b18..dc5c8c126 100644 --- a/defaults/krill-multi-user.conf +++ b/defaults/krill-multi-user.conf @@ -1,44 +1,50 @@ -###################################################################################### -# # -# ----==== WEB UI MULTI-USER LOGIN CONFIGURATION ====---- # -# # -# The settings below can be used to permit multiple users with configurable # -# access rights to login to the Krill web interface. # -# # -###################################################################################### +############################################################################## +# # +# ----==== WEB UI MULTI-USER LOGIN CONFIGURATION ====---- # +# # +# The settings below can be used to permit multiple users with configurable # +# access rights to login to the Krill web interface. # +# # +############################################################################## -# # Global auth(entication & authorization) settings # -# These control which auth provider in Krill will be used to authenticate users -# and settings common to all auth providers. See below for more details. +# These control which auth provider in Krill will be used to authenticate +# users and settings common to all auth providers. See below for more details. # # auth_type = "admin-token" -# auth_policies = ["...", ...] -# auth_private_attributes = ["...", ...] # Auth type (optional) # -# Which provider to use for authentication (AuthN), authorization (AuthZ) and -# identity (ID). Also affects which login form the Krill web UI displays, or -# (in the case of auth_type = "openid-connect") the user is redirected to. +# Which provider to use for authentication (AuthN), identity (ID), and +# authorization (AuthZ). Also affects which login form the Krill web UI +# displays, or (in the case of auth_type = "openid-connect") the user is +# redirected to. # # Supported values: "admin-token" (default), "config-file" or "openid-connect". # # At-a-glance comparison: # ======================= -# Setting Value AuthN AuthZ ID -# ---------------------------------------------------------------------------- -# "admin-token" admin_token role = "admin" id = "admin-token" -# ---------------------------------------------------------------------------- -# "openid-connect" provider provider provider -# checked supplied supplied -# ---------------------------------------------------------------------------- -# "config-file" values are taken from the [auth_users] section in this -# config file +# +# Setting Value AuthN ID AuthZ +# -------------------------------------------------------------------------- +# "admin-token" token matches "admin-token" special built-in +# admin_token role with full access +# config value +# -------------------------------------------------------------------------- +# "config-file" login username login username role name from user’s +# appears as key role field in +# in [auth_users] [auth_users] section +# section # ---------------------------------------------------------------------------- +# "openid-connect" provider provider role name provider +# checked supplied supplied +# +# The role names determined by the "openid-connect" and "config-file" types +# are looked up in the [auth_roles] section to determine access permissions. +# See "Auth roles" below. # # NOTE: At present the admin-token provider is used as a fallback provider # when using "openid-connect" or "config-file" as the primary provider. This is @@ -48,61 +54,17 @@ ### auth_type = "admin-token" -# Auth policies (optional) -# -# One or more paths to external authorization policy files to use in addition to -# those built-in to Krill. The files must be in Oso Polar format [*1] and are -# loaded after the built-in Krill policies. -# -# Custom authorization policies are intended to handle requirements that are too -# complex for just the settings available in krill.conf and is an advanced -# topic beyond the scope of this documentation. -# -# The built-in policies treat the following user attributes specially: -# -# - "role" - One of "admin", "readwrite" or "readonly". See the full Krill -# documentation for more information about which permissions are -# associated with each role. -# - "inc_cas" - A comma-separated set of CA handles which should be included -# in the set the user is permitted to see. If present this -# attribute will prevent the user seeing or interacting with any -# CA handle that is not in this set. -# - "exc_cas" - A comma-separated set of CA handles which should be excluded -# from the set the user is permitted to see. Overrides inc_cas. -# If inc_cas is not set, any CA handle NOT in exc_cas will be -# visible to the user who may interact with it according to -# the permissions granted to the user (e.g. through a role -# assignment). -# -# Note: The inc_cas and exc_cas settings only restrict visibility of and -# interaction with specified CAs via the Krill web UI. CA handles are still -# visible in the repository content and metrics output by Krill. -# -# References: -# *1 - https://docs.osohq.com/getting-started/policies/index.html -# -### auth_policies = ["...", ...] - - -# Auth private attributes (optional) -# -# Zero or more user attributes that should not be revealed by (or even sent to) -# the Krill web UI. For example, you may wish to hide "exc_cas" so that a user -# doesn't know which CAs they are prevented from seeing! -# -### auth_private_attributes = ["...", ...] - - # Config File auth provider details (mandatory when auth_type = "config-file") # -# The Config File auth provider allows you to define one or more users which can -# then be used to login to the Krill web UI. +# The Config File auth provider allows you to define one or more users which +# can then be used to login to the Krill web UI. # # Example: # auth_type = "config-file" # # [auth_users] -# "joe@example.com" = { attributes={ role="admin", exc_cas="ca1" }, password_hash="...", salt="..." } +# "joe@example.com" = { role="admin", password_hash="...", salt="..." } +# "jill@example.com" = { role="read-ca1", password_hash="...", salt="..." } # # Syntax: # auth_users = { "some id" = { ... } [, "another id" = { ... }, ...] } @@ -112,33 +74,31 @@ # "some id" = { ... } # "another id" = { ... } # -# Where { ... } can contain the following fields: +# +# The "some id" and "another id" terms indicate the email address or other +# identifier for the user. It will need to be entered in the username form +# field in the web UI when logging in. Krill also shows it in the event +# history as the actor to which the action is attributed. +# +# The { ... } above can contain the following fields: # # Field Mandatory? Notes -# ---------------------------------------------------------------------------- -# id Yes Email address or other identifier for the user. -# To be entered in the username form field in the -# web UI when logging in. Also shown in the Krill -# event history as the actor to which the action is -# attributed. -# -# password_hash Yes Generate these values using the 'krillc config user' -# salt Yes command on the command line. The web UI will hash -# the password entered in the login form and submit -# it to Krill for comparison to this hash, thereby -# ensuring that passwords are neither transmitted -# nor persisted. Per password salts prevents use of -# rainbow table attacks. Dual salting prevents use of -# stolen password hashes from the config file being -# used to login without knowing the passwords. -# -# attributes No Zero or more key=value pairs, e.g. role="admin". -# The built-in authorization policy (see above) -# requires a role attribute with value "admin", -# "readonly" or "readwrite". Attribute key=value -# pairs may be displayed by the Krill web UI. To -# prevent attributes being sent to the UI, use the -# auth_private_attributes setting (see above). +# -------------------------------------------------------------------------- +# +# password_hash Yes Generate these values using the +# 'krillc config user' command on the command +# salt Yes line. The web UI will hash the password entered +# in the login form and submit it to Krill for +# comparison to this hash, thereby ensuring that +# passwords are neither transmitted nor +# persisted. Per password salts prevents use of +# rainbow table attacks. Dual salting prevents +# use of stolen password hashes from the config +# file being used to login without knowing the +# passwords. +# +# role Yes The name of the role which determines the +# user’s access rights. See "Auth roles" below. # ### auth_type = "config-file" ### @@ -146,12 +106,13 @@ ### ... -# OpenID Connect auth provider details (mandatory when auth_type = "openid-connect") +# OpenID Connect auth provider details +# (mandatory when auth_type = "openid-connect") # # The OpenID Connect auth provider delegates authentication of users to an # external provider that implements the OpenID Connect Core 1.0 specification. -# It can also optionally retrieve user attributes (known as "claims" [*1]) from -# the provider, or from an [auth_users] section in the Krill configuration file. +# Krill uses user attributes (known as "claims" [*1]) from the provider to +# determine the user ID and role name for a user. # # Syntax: # auth_openidconnect = { issuer_url="...", client_id="...", client_secret="..." } @@ -167,23 +128,27 @@ # prompt_for_login = false # logout_url = "..." # -# [auth_openidconnect.claims] +# [[auth_openidconnect.id_claims]] # ... # -# Where { ... } can contain the following fields: +# [[auth_openidconnect.role_claims]] +# ... # -# (Sub)Field Mandatory? Notes -# ---------------------------------------------------------------------------- -# issuer_url Yes Provided by your OpenID Connect provider. This is -# the URL of the OpenID Connect provider discovery -# endpoint. "/.well-known/openid_configuration" -# will be appended if not present. Krill will fetch -# the OpenID Connect Discovery 1.0 compliant JSON -# response from this URL when Krill starts up. If -# this URL does not match the "issuer" value in the -# discovery endpoint response or if the discovery -# endpoint cannot be contacted, Krill will fail to -# start. +# Where [auth_openidconnect] can contain the following fields: +# +# Field Mandatory? Notes +# -------------------------------------------------------------------------- +# issuer_url Yes Provided by your OpenID Connect provider. This +# is the URL of the OpenID Connect provider +# discovery endpoint. +# "/.well-known/openid_configuration" +# will be appended if not present. Krill will +# fetch the OpenID Connect Discovery 1.0 +# compliant JSON response from this URL when +# Krill starts up. If this URL does not match the +# "issuer" value in the discovery endpoint +# response or if the discovery endpoint cannot be +# contacted, Krill will fail to start. # # client_id Yes Provided by your OpenID Connect provider. # @@ -192,38 +157,40 @@ # insecure No Defaults to false. Setting this to true will # disable verification of the signature of the # OpenID Connect provider token ID endpoint -# response. Setting this to false may allow attackers -# to modify responses from the provider without -# being detected. Setting this to false is strongly -# discouraged. +# response. Setting this to false may allow +# attackers to modify responses from the provider +# without being detected. Setting this to false +# is strongly discouraged. # # extra_login_scopes No Provider specific. Defaults to "". A # comma-separated list of OAuth 2.0 scopes to be -# passed to the provider when a user is directed to -# login with the provider. Scopes are typically -# used to instruct the provider to send additional -# user details along with provider token responses. -# One common scope is "profile" which often causes -# the server to respond with email addresses and -# other personal details about the user. If the -# OpenID Connect provider discovery endpoint shows -# that "email" is a supported scope then the "email" -# scope will be requested automatically, you don't -# need to specify it here in that case. -# -# extra_login_params No A { key=value, ... } map of additional HTTP query -# parameters to send with the authorization request -# to the provider when redirecting the user to the -# OpenID Connect provider login form. Section -# 3.1.2.1. Authentication Request in the OpenID -# Connect Core 1.0 specification [*2] lists various -# parameters that can be sent but the supported set -# varies by provider. The prompt=login parameter is -# automatically sent by the provider (though this -# behavior can be disabled, see prompt_for_login -# below) and thus does not need to be provided -# using this setting. Can also be specified as a -# separate TOML table, e.g.: +# passed to the provider when a user is directed +# to login with the provider. Scopes are +# typically used to instruct the provider to send +# additional user details along with provider +# token responses. One common scope is "profile" +# which often causes the server to respond with +# email addresses and other personal details +# about the user. If the OpenID Connect provider +# discovery endpoint shows that "email" is a +# supported scope then the "email" scope will be +# requested automatically, you don't need to +# specify it here in that case. +# +# extra_login_params No A { key=value, ... } map of additional HTTP +# query parameters to send with the authorization +# request to the provider when redirecting the +# user to the OpenID Connect provider login form. +# Section 3.1.2.1. Authentication Request in the +# OpenID Connect Core 1.0 specification [*2] +# lists various parameters that can be sent but +# the supported set varies by provider. The +# prompt=login parameter is automatically sent by +# the provider (though this behavior can be +# disabled, see prompt_for_login below) and thus +# does not need to be provided using this +# setting. Can also be specified as a separate +# TOML table, e.g.: # # [openid_connect.extra_login_params] # display=popup @@ -233,146 +200,160 @@ # disable the default behaviour of sending the # prompt=login parameter to the provider. This # also allows a different prompt= to be -# specified using extra_login_params, from the set -# defined in Section 3.1.2.1. Authentication +# specified using extra_login_params, from the +# set defined in Section 3.1.2.1. Authentication # Request in the OpenID Connect Core 1.0 # specification [*2]: "none", "login", "consent" # or "select_account". # -# logout_url No A URL to direct the browser to redirect the user -# to in order to logout. Ideally this is not needed -# as the provider OpenID Connect Discovery response -# should contain the details Krill needs, but for -# some providers a logout_url must be specified -# explicitly. If the provider discovery response -# doesn't announce support for any supported -# mechanisms and no logout_url value is set then -# Krill will default to directing the user back to -# the Krill UI index page from where the user will -# be directed to login again via the OpenID Connect -# provider. -# -# claims No A { ={...}, ... } map used to extract and -# +-- source No optionally transform claim values from the OpenID -# +-- jmespath Yes Connect provider responses [*3, *4]. Each claim -# +-- dest No specification results in zero or one additional -# attribute name=value pairs that can be shown -# in the Krill web UI and can be tested by the -# authorization policy.. Can also be specified as -# a separate TOML table, e.g.: -# -# [openid_connect.claims] -# name = { source="...", jmespath="...", dest="..."} -# name2 = { ... } -# -# An "id" claim is required. If not specified the -# following default "id" claim configuration will -# be used: -# -# id = { jmespath="email" } -# -# To prevent attributes being sent to the UI, use -# the auth_private_attributes setting (see above). -# -# source If the 'source' subfield is not provided, all -# available token and userinfo claim responses from -# the OpenID Connect provider will be searched for -# a field that matches the 'jmespath' expression. +# logout_url No A URL to direct the browser to redirect the +# user to in order to logout. Ideally this is not +# needed as the provider OpenID Connect Discovery +# response should contain the details Krill +# needs, but for some providers a logout_url must +# be specified explicitly. If the provider +# discovery response doesn't announce support for +# any supported mechanisms and no logout_url +# value is set then Krill will default to +# directing the user back to the Krill UI index +# page from where the user will be directed to +# login again via the OpenID Connect provider. +# +# id_claims No A list used to extract the user ID from the +# claim values in the OpenID Connect provider +# response. These will typically given as +# separate TOML array tables. The fields are +# described in the following section. +# +# If this field is missing, the default rule +# is used which uses the value of the "email" +# claim as the user ID. +# +# +# role_claims No A list used to extract the user role from the +# claim values in the OpenID Connect provider +# response. These will typically given as +# separate TOML array tables. The fields are +# described in the following section. +# +# If this field is missing, the default rule +# is used which uses the value of the "role" +# claim as the user’s role. +# +# +# Each [[auth_openidconnect.id_claims]] and [[auth_openidconnect.role_claims]] +# occurence describes one claim transformation rule. Each rule describes a +# test against the claim values contained in the OpenID Connect provider +# response [*3, *4]. If a tests succeeds, the value is transformed and used as +# either the user ID or user role. For each attribute of the two fields, only +# the first succeeding rule is considered. +# +# Field Mandatory? Notes +# -------------------------------------------------------------------------- +# +# source No If the 'source' subfield is not provided, all +# available token and userinfo claim responses +# from the OpenID Connect provider will be +# searched for a field that matches the 'claim' +# value. # # If specified the value identifies a specific # claim set to search and can be one of the # following values: # -# config-file # id-token-standard-claim # id-token-additional-claim # user-info-standard-claim # user-info-additional-claim # -# The source = "config-file" value is special, it -# doesn't refer to an OpenID Connect provider -# response claim set but rather to user attributes -# looked up using the "id" claim value as a key to -# index into the [auth_users] user attribute map. -# -# The "id" claim value cannot therefore itself be -# taken from [auth_users], and password_hash values -# in [auth_users] are ignored as authentication is -# handled by the OpenID Connect provider. -# -# dest The optional "dest" field can be used to set the -# value of an attribute by a different name than -# the claims key used. This can be used to specify -# multiple claim rules that attempt to extract a -# a value for the same claim. The first matching -# rule in such cases will be used. -# -# jmespath The "jmespath" field specifies a JMESPath [*5] -# expression which is used to find a matching field -# in the OpenID Connect provider JSON response. In -# addition to the standard JMESPath functions the -# Krill implementation includes two custom regular -# expression based functions to match and -# optionally replace parts of the value of the -# fieldm matched by the JMESPath expression. These -# two functions are: -# -# recap(, 'capturing regex') -# resub(, 'search regex', replace')) -# -# With these extra functions cases where part of a -# complex string should be matched, extacted and -# (with resub) mapped to a value that matches what -# the authorization policy expects. E.g. it could -# be used to match a substring and then to "output" -# a particular Krill role name. -# -# If the combination of "resub()" and "dest" is -# not powerful enough you can take value matching -# even further using policy file rules. "dest" and -# "resub" may be combined with policy file rules in -# order to simplify the policy file rules needed. -# -# When determining the right "jmespath" expression -# to use, match failures will be logged at "info" -# level (as the auth policy in use may not require -# all configured claims to be found for all users) -# including a list of claims that are available to -# match. Additionally at "debug" level details -# about the claim search process are logged and at -# "trace" level the OpenID HTTP Connect provider -# HTTP/JSON responses are logged. -# -# Escaping: If you need to use double quotes to -# escape a JMESPath identifier you will need to use -# jmespath='...' or jmespath='''...''' instead of -# jmespath="..." in the Krill configuration file. -# See the JMESPath [*6] and TOML [*7] specs for -# more information about quoting and escaping. +# claim No The name of the field that is being looked at. +# If this field is missing, then the 'subst' +# field contains the value to be used for the +# user ID or role, independently of any claims- +# +# match No A regular expression that is applied to the +# value of the claim provided by the 'claim' +# field. +# +# A claim value matches if the regular expression +# matches. This could be a partial match, i.e., +# the rexpression "foo" matches "foo" but also +# "foobar" and "barfoobar". Enclose the text in +# a leading hat and trailing dollar sign for a +# full match, i.e., "^foo$" will only match "foo". +# +# If the expression matches, the claim value will +# be transformed using the expression given in +# the 'subst' field. +# +# Simple claim values are compared using their +# string representation. E.g. a boolean value is +# treated as having the string values "true" or +# "false" and numbers are similarly converted +# using standard JSON rules. +# +# For arrays, each element is matched and the +# first match is used. +# +# Objects never match. +# +# If the 'match' field in missing, any simple +# values matches and is used as is, i.e., the +# expression in the 'subst' field is ignored. +# For array claim values, the first element is +# used. +# +# subst No This field describes a transformation of a +# value matched via the 'match'. It can be a +# simple string or can contain references to +# substrings captured by the 'match' regular +# expression. +# +# All instances of "$ref" in the subst expression +# are replaced with the substring corresponding +# to the capture group identified by "ref". +# +# "ref" may be an integer corresponding to the +# index of the capture group (counted by order +# of opening parenthesis where 0 is the entire +# match) or it can be a name (consisting of +# letters, digits or underscores) corresponding +# to a named capture group. +# If "ref" isn’t a valid capture group (whether +# the name doesn’t exist or isn’t a valid index), +# then it is replaced with the empty string. +# +# The longest possible name is used. For example, +# "$1a" looks up the capture group named "1a" and +# not the capture group at index 1. To exert more +# precise control over the name, use braces, +# e.g., "${1}a". +# +# To write a literal "$" use "$$". # # References: # *1: https://openid.net/specs/openid-connect-core-1_0.html#Claims # *2: https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest # *3: https://openid.net/specs/openid-connect-core-1_0.html#TokenResponse # *4: https://openid.net/specs/openid-connect-core-1_0.html#UserInfoResponse -# *5: https://jmespath.org/ -# *6: https://jmespath.org/specification.html#identifiers -# *7: https://toml.io/en/v1.0.0#string # -# ------------------------------------------------------------------------------ +# +# ---------------------------------------------------------------------------- # Registering Krill with an OpenID Connect provider: -# ------------------------------------------------------------------------------ +# ---------------------------------------------------------------------------- # In order to communicate with an OpenID Connect provider, Krill must first be -# registered with that provider. As a result of registration you will be issued -# a client_id and a client_secret, and possibly also an issuer_url (or you may -# have to consult the provider documentation to determine the issuer_url). +# registered with that provider. As a result of registration you will be +# issued a client_id and a client_secret, and possibly also an issuer_url (or +# you may have to consult the provider documentation to determine the +# issuer_url). # # When registering you will usually need to specify a callback URL. For Krill # this should be auth/callback (replace with the # actual value set above). # -# When auth_type = "openid-connect" the client details MUST be provided to Krill -# via settings in the [auth_openidconnect] section of the configuration file. +# When auth_type = "openid-connect" the client details MUST be provided to +# Krill via settings in the [auth_openidconnect] section of the configuration +# file. # # ------------------------------------------------------------------------------ # Required OpenID Connect provider capabilities: @@ -384,12 +365,12 @@ # https://openid.net/specs/openid-connect-discovery-1_0.html # https://openid.net/specs/openid-connect-rpinitiated-1_0.html # -# At the issuer_url endpoint the provider MUST announce support for at least the -# following: +# At the issuer_url endpoint the provider MUST announce support for at least +# the following: # # "issuer": ".." # "authorization_endpoint": "..", -# "token_endpoint": "..", ("userinfo_endpoint" is also supported if available) +# "token_endpoint": "..", ("userinfo_endpoint" is supported if available) # "jkws_uri": "..", # "scopes_supported": ["openid"] # "response_types_supported": ["code"] @@ -402,33 +383,33 @@ # A note about HTTPS certificates: # ------------------------------------------------------------------------------ # If the provider URLS are HTTPS URLs (which they should be unless this -# deployment of Krill is only for testing) then the HTTPS certificate must have -# been issued by a CA in the O/S CA certificate store, i.e. either a well known -# authority that is included in the store by default, or a custom CA that you -# have added to the store yourself. Krill will fail to connect to a provider -# that uses a self-signed certificate or a certificate from an unknown root -# certificate authority. For more information see for example: +# deployment of Krill is only for testing) then the HTTPS certificate must +# have been issued by a CA in the O/S CA certificate store, i.e. either a well +# known authority that is included in the store by default, or a custom CA +# that you have added to the store yourself. Krill will fail to connect to a +# provider that uses a self-signed certificate or a certificate from an +# unknown root certificate authority. For more information see for example: # http://manpages.ubuntu.com/manpages/xenial/man8/update-ca-certificates.8.html -# ------------------------------------------------------------------------------ +# ---------------------------------------------------------------------------- # # ------------------------------------------------------------------------------ # A note about end_session_endpoint and revocation_endpoint: # ------------------------------------------------------------------------------ # "end_session_endpoint" is defined by various [*1] OpenID Connect draft -# specifications relating to logout. In Krill it is used for the purpose defined -# in the OpenID Connect RP-Initiated Logout 1.0 spec [*1], namely for Krill as -# the RP (OpenID Connect terms Krill a Relying Party in this context, which is -# particularly confusing given that the term Relying Party also has meaning in -# Krill's native RPKI domain) to be able to initiate logout of the user at the -# provider. Krill also requires that the endpoint either honours the -# "post_logout_redirect_uri" HTTP query parameter (defined as OPTIONAL in the -# spec) or that the provider can be configured with corresponding behaviour, -# i.e. to redirect the end-user user-agent (browser) back to Krill after logout -# is completed at the provider. If support for this is lacking it is undefined -# where the user will end up after logout, which is not an issue if the user -# was finished with Krill, but is annoying if the logout was done in order to -# re-login to Krill as a different user. At least one provider has been observed -# which does NOT support this endpoint. +# specifications relating to logout. In Krill it is used for the purpose +# defined in the OpenID Connect RP-Initiated Logout 1.0 spec [*1], namely for +# Krill as the RP (OpenID Connect terms Krill a Relying Party in this context, +# which is particularly confusing given that the term Relying Party also has +# meaning in Krill's native RPKI domain) to be able to initiate logout of the +# user at the provider. Krill also requires that the endpoint either honours +# the "post_logout_redirect_uri" HTTP query parameter (defined as OPTIONAL in +# the spec) or that the provider can be configured with corresponding +# behaviour, i.e. to redirect the end-user user-agent (browser) back to Krill +# after logout is completed at the provider. If support for this is lacking it +# is undefined where the user will end up after logout, which is not an issue +# if the user was finished with Krill, but is annoying if the logout was done +# in order to re-login to Krill as a different user. At least one provider has +# been observed which does NOT support this endpoint. # # As an alternative Krill also supports "revocation_endpoint" # (see https://tools.ietf.org/html/rfc7009 "OAuth 2.0 Token Revocation") which @@ -462,8 +443,8 @@ # Example Azure Active Directory configuration: # ------------------------------------------------------------------------------ # This example is for a Microsoft Azure cloud Active Directory instance that -# permits only read-only and read-write access to users that login via the Krill -# web UI: +# permits only read-only and read-write access to users that login via the +# Krill web UI: # # [auth_openidconnect] # issuer_url = "https://login.microsoftonline.com/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/v2.0" @@ -471,21 +452,29 @@ # client_secret = "zzzzzzzz" # extra_login_scopes = ["offline_access"] # -# [auth_openidconnect.claims] -# id = { jmespath="name" } -# ro_role = { jmespath="resub(roles[?@ == 'gggggggg-gggg-gggg-gggg-gggggggggggg'] | [0], '^.+$', 'readonly')", dest="role" } -# rw_role = { jmespath="resub(roles[?@ == 'hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh'] | [0], '^.+$', 'readwrite')", dest="role" } +# [[auth_openidconnect.id_claims]] +# claim = "name" # -# For this to work you must already have configured in the Azure portal your AD -# tenant, app registration and enterprise application settings (with redirect -# URI), users, group assignments and optional claim configuration (in the above -# example AD was configured to expose groups as roles). +# [[auth_openidconnect.role_claims]] +# claim = "role" +# match = "^gggggggg-gggg-gggg-gggg-gggggggggggg$" +# subst = "readonly" # -# The JMESPath expression matches on Azure AD group GUID values, taking the -# first match it finds and then setting the "role" attribute to either readonly -# or readwrite depending on which GUID was matched. The GUIDs for your groups -# will be different than those used in this example, see your Krill log for the -# GUIDs to match on. +# [[auth_openidconnect.role_claims]] +# claim = "role" +# match = "^hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh$" +# subst = "readwrite" +# +# For this to work you must already have configured in the Azure portal your +# AD tenant, app registration and enterprise application settings (with +# redirect URI), users, group assignments and optional claim configuration +# (in the above example AD was configured to expose groups as roles). +# +# The 'match' expression matches on Azure AD group GUID values, taking the +# first match it finds and then setting the "role" attribute to either +# "readonly" or "readwrite" depending on which GUID was matched. The GUIDs +# for your groups will be different than those used in this example, see your +# Krill log for the GUIDs to match on. # # The offline_access scope is required in order to trigger Azure Active # Directory to issue a refresh token to Krill. @@ -499,15 +488,18 @@ # client_secret = "zzzzzzzz" # logout_url = "https://dddddddd.auth.eu-central-1.amazoncognito.com/logout?client_id=yyyyyyyy&logout_uri=https://your.krill.domain/" # -# [auth_openidconnect.claims] -# role = { jmespath='''resub("cognito:groups"[?@ == 'KrillAdmins'] | [0], '^.+$', 'admin')''' } +# [[auth_openidconnect.role_claims]] +# claim = "cognito:groups" +# match = "^KrillAdmins$" +# subst = "admin" # # For this to work you must already have configured in the AWS Cognito console # a group called KrillAdmins and have added the logging in user to that group. # Otherwise the "cognito:groups" claim will not be present in the ID token -# response issued by AWS Cognito. You also need to have set a "Sign Out URL" for -# in your AWS Cognito "App client settings" which should match the value you -# use for the "logout_uri" query parameter in the logout_url Krill setting. +# response issued by AWS Cognito. You also need to have set a "Sign Out URL" +# for in your AWS Cognito "App client settings" which should match the value +# you use for the "logout_uri" query parameter in the logout_url Krill +# setting. # # logout_url needs to be set because AWS Cognito doesn't advertise support for # any of the OpenID Connect logout mechanisms that Krill understands. @@ -516,8 +508,6 @@ # specified in hte AWS Cognito "App integration" -> "Domain name" console # setting. The regions in the URLs should also match those that you are using. # -# Note the use of ''' which is needed because the Cognito groups claim contains -# a colon which is a reserved character in JMESPath identifiers. # # ------------------------------------------------------------------------------ # Example Google Cloud Platform configuration: @@ -528,29 +518,93 @@ # client_secret = "yyyyyyyy" # extra_login_scopes = ["profile"] # -# [auth_openidconnect.claims] -# role = { jmespath='''recap(resub(picture, '^.+photo\.jpg$', 'admin'), '(admin)')''' } +# [[auth_openidconnect.role_claims]] +# claim = "picture" +# match = "^.+photo\.jpg$" +# subst = "admin" # # For this to work you must already have created Credentials in the Google # developer console and have set the redirect URI to your Krill API # /auth/callback public URL. # -# In this example we have included the ".well-known/..." part of the issuer_url -# to demonstrate that Krill will accept the URL with or without it. -# -# ''' is used to ensure that characters in the regular expression don't conflict -# with JMESPath reserved characters. The JMESPath expression in this example is -# not a useful real world example as it grants "admin" rights to any Google -# account that has an associated picture whose URL ends in photo.jpg. -# -# The JMESPath expression in this example uses an outer recap() call to sanity -# check that the resulting role value is what we expect it to be. Without this -# a URL that doesn't match would pass straight through resub() unchanged. The -# recap() check is needed because you might use resub() to "clean up" values -# that in some cases don't need any cleaning and thus would still be wanted -# even though not modified. -# -# Also note that, while not visible in the configuration above, the GCP OpenID -# Connect provider advertizes an RFC 7009 OAuth 2.0 Token Revocation compatible -# `revocation_endpoint` which Krill will use to revoke the Google login token -# when the user logs out of Krill. \ No newline at end of file +# In this example we have included the ".well-known/..." part of the +# issuer_url to demonstrate that Krill will accept the URL with or without +# it. +# +# The match expression in this example is not a useful real world example as +# it grants "admin" rights to any Google account that has an associated +# picture whose URL ends in photo.jpg. +# +# Note that, while not visible in the configuration above, the GCP OpenID +# Connect provider advertizes an RFC 7009 OAuth 2.0 Token Revocation +# compatible `revocation_endpoint` which Krill will use to revoke the Google +# login token when the user logs out of Krill. + + +# Auth roles (optional) +# +# What an authenticated user has access to is configured through roles. Each +# role contains a set of permissions that are granted to any user having this +# role. Optional, the role allows limiting the CAs that these permissions +# apply to. +# +# Roles are defined through the 'auth_roles' configuration value. +# +# Syntax: +# auth_roles = { "role name": { ... }, ... } +# +# Alternative syntax: +# [auth_roles] +# "role_name" = { ... } +# ... +# +# "role_name" is the name of the role referenced in either the config file +# provider’s user table or the OpenID Connect providers’s role attribute. +# +# The { ... } above can contain the following fields: +# +# Field Mandatory? Notes +# -------------------------------------------------------------------------- +# +# permissions Yes A list of permissions to be granted to the +# role. The following permissions currently +# exist: +# +# login log into the Krill UI +# +# Access to the publication server: +# +# pub-admin, pub-list, pub-read, pub-create, +# pub-delete +# +# Access to CAs +# +# ca-list, ca-read, ca-create, ca-update, +# ca-admin, ca-delete +# +# Access to the ROAs of a CA +# +# routes-read, routes-update, routes-analysis +# +# Access to the ASPAs of a CA +# +# aspas-read, aspas-update, aspas-analysis +# +# Access to the router keys of a CA +# +# bgpsec-read, bgpsec-update +# +# cas No A list of CA handles that the role should +# grant access to. If this field is missing, +# access is granted to all CAs. +# +# If the [auth_roles] section is missing, three default roles will be +# used. These are: +# +# admin Allows full acess to everything +# readonly Allows list and read access to everything. +# readwrite Allows read, create, update, and delete access to everything. +# +### [auth_roles] +### ... + diff --git a/defaults/rbac.polar b/defaults/rbac.polar deleted file mode 100644 index fc13494bd..000000000 --- a/defaults/rbac.polar +++ /dev/null @@ -1,42 +0,0 @@ -################################################################################ -### Role Based Access Control (RBAC) -################################################################################ - -# 1. Assigning roles to users based on user attributes: -# ===================================================== -# Appropriately set the "role" attribute on your users, e.g. if set to "admin" -# for a user it would grant that user the "admin" role. The available roles can -# be seen in the roles.polar file. - -# 1a. With: "auth_type" = "config-file" -# ------------------------------------- -# You can assign roles like so in your _krill.conf_ file (NOT IN THIS FILE): -# (note: to generate the password hash see `krillc config user --help`). -# -# [auth_users] -# "some@user.com" = { attributes={ role="admin" }, password_hash="xxx" } - -# 1b. With: "auth_type" = "openid-connect" -# ---------------------------------------- -# You will need to define a "role" claim in your _krill.conf_ file (NOT IN THIS -# FILE) which identifies a field in the OpenID Connect service JSON ID Token or -# UserInfo responses that is set to a string value equal to the name of one of -# the roles defined in the roles.polar file, e.g.: -# -# [auth_openidconnect.claims] -# role = { jmespath = "some_role_field" } -# -# Your "jmespath" may need to be more complex than this, e.g. if you need -# to use only part of the claim value as the role string. - - -# 2. Assigning roles through explicit rules that you define here for users -# defined in your krill.conf file: -# ======================================================================== -# You can also assign roles directly by writing an actor_has_role() rule per user -# in THIS FILE, e.g. like this: -# -# actor_has_role(actor: Actor, role: "admin") if actor.name = "some@user.com"; -# -# Note: The "some@user.com" value MUST be a key under "[auth_users]" in your -# _krill.conf_ file. \ No newline at end of file diff --git a/defaults/roles.polar b/defaults/roles.polar deleted file mode 100644 index e591bfa18..000000000 --- a/defaults/roles.polar +++ /dev/null @@ -1,142 +0,0 @@ -################################################################################ -### Role mappings -################################################################################ - -# Note: mapping of roles to users is not defined here. -# -# Users that authenticate using .htpasswd credentials a role should be assigned -# to them in the mappings.polar or other .polar file using actor_has_role() (see -# below). - -# Users that authenticate with an OpenID Connect provider should have a role -# assigned to them via an attribute extracted from the OpenID Connect provider -# response, or via an explicit actor_has_role() assignment as mentioned above. - - -################################################################################ -### Role definitions -################################################################################ - -# All roles have the right to login: -# ---------------------------------- -# Actors with a role, any role, can login to the UI and are permitted to use the -# REST API. This is because roles are only assigned to actors if they were able -# to authenticate and a role mapping exists for them. Conversely, actors that -# are able to authenticate but for whom no role mapping exists, will not be -# permitted to login to the UI or to use the REST API. -# - -# If called with Option::None then some_role will be the Oso value nil. -# Otherwise some_role should be a string that we want to contain some value -# other than whitespace, so we check that it is non-empty after trimming any -# leading and/or trailing whitespace. -role_allow(some_role, action: Permission) if - not some_role = nil and - not some_role.trim().is_empty() and - action = LOGIN; - -### TEST: [ -# Actors with a role can login. -?= role_allow("some role", LOGIN); -# Conversely, actors without a role cannot do anything. -?= not role_allow(nil, LOGIN); -?= not role_allow("", LOGIN); -?= not role_allow(" ", LOGIN); -?= not role_allow(nil, nil); -?= not role_allow(nil, _); -### ] - - -# The admin role has the right to do anything with any resource: -# -------------------------------------------------------------- -role_allow("admin", _action: Permission); - -### TEST: [ -?= role_allow("admin", _); -?= not role_allow("admin", "take over the world"); -?= role_allow("admin", CA_CREATE); -### ] - - -# The readonly role has the following rights: -# ------------------------------------------- -role_allow("readonly", action: Permission) if - action in [ - CA_LIST, - CA_READ, - PUB_LIST, - PUB_READ, - ROUTES_READ, - ROUTES_ANALYSIS, - ASPAS_READ, - ASPAS_ANALYSIS, - BGPSEC_READ, - RTA_LIST, - RTA_READ - ]; - -### TEST: [ -?= role_allow("readonly", CA_LIST); -?= role_allow("readonly", CA_READ); -?= not role_allow("readonly", CA_CREATE); -?= not role_allow("readonly", CA_CREATE); -# etc -### ] - - -# The readwrite role has the following rights: -# -------------------------------------------- -role_allow("readwrite", action: Permission) if - action in [ - CA_LIST, - CA_READ, - CA_CREATE, - CA_UPDATE, - PUB_LIST, - PUB_READ, - PUB_CREATE, - PUB_DELETE, - ROUTES_READ, - ROUTES_ANALYSIS, - ROUTES_UPDATE, - ASPAS_READ, - ASPAS_UPDATE, - ASPAS_ANALYSIS, - BGPSEC_READ, - BGPSEC_UPDATE, - RTA_LIST, - RTA_READ, - RTA_UPDATE - ]; - -### TEST: [ -?= role_allow("readwrite", CA_LIST); -?= role_allow("readwrite", CA_READ); -?= role_allow("readwrite", CA_CREATE); -?= role_allow("readwrite", CA_CREATE); -# etc -### ] - - -# The testbed role has the following rights: -# ------------------------------------------ -# Note: The testbed role is a special case which is automatically assigned -# temporarily to anonymous users accessing the testbed UI/API. It should not be -# used outside of this file. -role_allow("testbed", action: Permission) if - action in [ - CA_READ, - CA_UPDATE, - PUB_READ, - PUB_CREATE, - PUB_DELETE, - PUB_ADMIN - ]; - -### TEST: [ -?= role_allow("testbed", CA_READ); -?= role_allow("testbed", CA_UPDATE); -?= role_allow("testbed", PUB_ADMIN); -?= not role_allow("testbed", ROUTES_UPDATE); -# etc -### ] \ No newline at end of file diff --git a/defaults/rules.polar b/defaults/rules.polar deleted file mode 100644 index eb8a2a3c1..000000000 --- a/defaults/rules.polar +++ /dev/null @@ -1,136 +0,0 @@ -################################################################################ -### Access rules -################################################################################ - - -# A dummy rule which can be "overridden" by a more specific match. -# Allows overriding rules that are hard to write a more specific rule for, -# especially because matching on a Permission variant is not considered more -# specific than on any variant of Permission due to this issue: -# https://github.com/osohq/oso/issues/801 -disallow(_, _, _) if false; - - -# note: using = or != with application types results in error: -# "comparison operators are unimplemented in the oso Rust library" -# so we don't compare nil to actor.attr() results to see if an attribute is set. - - -################################################################################ -### Check access to Krill REST APIs by requested action -################################################################################ -# The action belongs to a role and thus to have access the user must have the -# required role that includes the requested action. - -allow(actor: Actor, action: Permission, nil) if - not disallow(actor, action, _resource) and - actor_has_role(actor, role) and - role_allow(role, action); - -### TEST: [ -# Sanity check: verify that the built-in admin-token test actor can login.c -# Exercises the rules above. -?= allow(Actor.builtin("admin-token"), LOGIN, nil); -### ] - - -# Assign roles to users automatically if they have a "role" attribute: -# -------------------------------------------------------------------- -actor_has_role(actor: Actor, role) if role in actor.attr("role"); - - - -################################################################################ -### Check access to Krill CAs by requested action and requested CA handle -################################################################################ -# The action belongs to a role and thus to have access the user must have the -# required role that includes the requested action. Additionally the user must -# have explicit or implicit access to the specified CA handle, either because by -# default access isn't restricted per CA handle, or because the user is neither -# explicitly or implicitly denied access to the CA or is explicitly granted -# access to the CA. -allow(actor: Actor, action: Permission, ca: Handle) if - not disallow(actor, action, ca) and - actor_has_role(actor, role) and - role_allow(role, action) and - actor_can_access_ca(actor, ca); - -### TEST: [ -?= allow(Actor.builtin("admin-token"), CA_READ, _); -### ] - - -# Restrict access to CAs based on user "inc_cas" and "exc_cas" attributes: -# ------------------------------------------------------------------------ -# Attribute values are expected to be comma-separated value strings where each -# value is a CA handle. Excludes override includes. Excludes exclude one or more -# CA handles. Includes include one or more CA handles and consequently exclude -# (deny access to) all other CA handles. -# -# Define a rule that will fail to deny access for any actor for any CA handle. -# This is the default situation, i.e. all actors have access to all CAs. -actor_cannot_access_ca(_: Actor, _: Handle) if false; - -# Next define a rule that will succeed either if: -# 1. There is no rule that explicitly blocks access to the specified CA for -# the specified actor. -# 2a. The actor has no "inc_cas" or "exc_cas" attributes that grant or deny -# access to CAs, _OR_ -# 2ba. The actor has an "exc_cas" attribute which does NOT include the -# specified CA handle (i.e. the CA is not excluded from the set the -# actor has access), _AND_ -# 2bba. The actor does not have an "inc_cas" attribute (i.e. the actor is not -# restricted to certain CAs), _OR_ -# 2bbb. The actor has an "inc_cas" attribute which includes the specified CA -# handle (i.e. the CA is included in the set the actor is explicitly -# given access to). -actor_can_access_ca(actor: Actor, ca: Handle) if - # if an inline rule prevents access to the CA stop processing this rule - not actor_cannot_access_ca(actor, ca) and - - ( - # else, if neither include nor exclude attributes exist for this actor, - # allow access to the CA and stop processing this rule - (not _ in actor.attr("inc_cas") and not _ in actor.attr("exc_cas")) or - - # else, if the exclude attribute exists for this actor AND the given CA - # handle is NOT in the set of excluded CAs (which are defined as - # comma-separated CA handle values in a single string attribute) then do not - # exclude access yet, continue below, otherwise stop and deny access - (_ in actor.attr("exc_cas") and not ca.name in actor.attr("exc_cas").unwrap().split(",")) or - - # else, if the include attribute does not exist for this actor then allow - # access, otherwise only allow access if the given CA handle *IS* in the - # include set. - (_ in actor.attr("inc_cas") and ca.name in actor.attr("inc_cas").unwrap().split(",")) - ); - - -### TEST: [ -# test specific CA access restrictions defined inline using Polar rules -actor_cannot_access_ca(_actor: Actor{name: "dummy-test-actor2"}, ca: Handle) if - ca.name in ["dummy-test-ca2"] and cut; - -actor_cannot_access_ca(_actor: Actor{name: "dummy-test-actor3"}, ca: Handle) if - ca.name in ["dummy-test-ca3"] and cut; - -?= not actor_cannot_access_ca(new Actor("dummy-test-actor1", {}), new Handle("dummy-test-ca1")); -?= not actor_cannot_access_ca(new Actor("dummy-test-actor1", {}), new Handle("dummy-test-ca2")); -?= not actor_cannot_access_ca(new Actor("dummy-test-actor1", {}), new Handle("dummy-test-ca3")); - -?= not actor_cannot_access_ca(new Actor("dummy-test-actor2", {}), new Handle("dummy-test-ca1")); -?= actor_cannot_access_ca(new Actor("dummy-test-actor2", {}), new Handle("dummy-test-ca2")); -?= not actor_cannot_access_ca(new Actor("dummy-test-actor2", {}), new Handle("dummy-test-ca3")); - -?= not actor_cannot_access_ca(new Actor("dummy-test-actor3", {}), new Handle("dummy-test-ca1")); -?= not actor_cannot_access_ca(new Actor("dummy-test-actor3", {}), new Handle("dummy-test-ca2")); -?= actor_cannot_access_ca(new Actor("dummy-test-actor3", {}), new Handle("dummy-test-ca3")); - -# test CA access restrictions based on actor attribute values -?= actor_can_access_ca(new Actor("a", {}), new Handle("ca1")); -?= actor_can_access_ca(new Actor("a", {inc_cas: "ca1"}), new Handle("ca1")); -?= not actor_can_access_ca(new Actor("a", {inc_cas: "ca1"}), new Handle("ca2")); -?= not actor_can_access_ca(new Actor("a", {exc_cas: "ca1"}), new Handle("ca1")); -?= actor_can_access_ca(new Actor("a", {exc_cas: "ca1"}), new Handle("ca2")); - -### ] diff --git a/doc/manual/source/multi-user.rst b/doc/manual/source/multi-user.rst index bbf621d66..3bc4a73d2 100644 --- a/doc/manual/source/multi-user.rst +++ b/doc/manual/source/multi-user.rst @@ -12,17 +12,18 @@ Login with Named Users Checking the currently logged in user and user attributes -By default Krill requires users to authenticate using the configured secret token, -and actions in the event history are attributed to a client using the secret token or -to Krill itself. +By default Krill requires users to authenticate using the configured secret +token, and actions in the event history are attributed to a client using the +secret token or to Krill itself. -Krill also supports authenticating users **of the web user interface** with their -own username and credentials. Actions taken by such logged in users are attributed -in the event history to their username. +Krill also supports authenticating users **of the web user interface** with +their own username and credentials. Actions taken by such logged in users are +attributed in the event history to their username. To login users by username Krill must first be configured either with locally -defined user details and credentials, or with the details necessary to interact with -a separate `OpenID Connect `_ compliant identity provider system. +defined user details and credentials, or with the details necessary to +interact with a separate `OpenID Connect `_ +compliant identity provider system. Further reading: @@ -30,10 +31,9 @@ Further reading: :maxdepth: 1 :name: toc-multi-user - multi-user/authorization + multi-user/roles multi-user/config-file-provider multi-user/openid-connect-provider - multi-user/customization .. history .. authors @@ -41,5 +41,6 @@ Further reading: .. note:: Clients using the Krill REST API directly or via ``krillc`` cannot authenticate using named users, they can only authenticate using the - secret token. If you need this capability `please let us know `_. + secret token. If you need this capability `please let us know + `_. diff --git a/doc/manual/source/multi-user/authorization.rst b/doc/manual/source/multi-user/authorization.rst deleted file mode 100644 index e164772a0..000000000 --- a/doc/manual/source/multi-user/authorization.rst +++ /dev/null @@ -1,58 +0,0 @@ -.. _doc_krill_multi_user_access_control: - -Permissions, Roles & Attributes -=============================== - -.. versionadded:: v0.9.0 - -This page summarizes the different ways that Krill supports for restricting access -to *named users* that login to Krill. For backward compatibility, users that -authenticate with the secret token are given unrestricted access to Krill. - -Permissions ------------ - -Internally within Krill each REST API endpoint requires the logged in user to have -a specific Krill permission in order to execute the request. - - -User Attributes ---------------- - -User attributes are assigned by the identity provider, either in the -``krill.conf`` file for locally defined users, or in the management interface of -the OpenID Connect provider that manages your users. - -.. Warning:: By default, user attributes and their values are shown in the Krill - web user interface and the web user interface stores these - attributes in browser local storage. To prevent sensitive attributes - being revealed in the browser you can mark them as private. One - possible use for this is to restrict access using the ``exc_cas`` - attribute but not reveal the name of the restricted CA by doing - so. See ``auth_private_attributes`` in ``krill.conf`` file for more - information. - -Role Based Access Control -------------------------- - -At the highest level Krill can restrict access based on user roles. A role is a -named collection of internal Krill permissions. - -By default Krill supports three roles which can be assigned to users. A user can -only have one role at a time. A role is assigned to a user via the ``role`` -user attribute (see below for more on attributes). - -The default roles are: - -- ``admin`` : Grants users unrestricted access. -- ``readwrite``: Grants users the right to list, view and modify *existing* - CAs. -- ``readonly`` : Grants users the right to list and view CAs only. - -Attribute Based Access Control ------------------------------- - -Krill supports ``inc_cas`` and ``exc_cas`` user attributes which can be used -to permit or deny access to one or more Certificate Authorities in Krill. User -attributes can also be used to make decisions in :ref:`custom authorization policies `. - diff --git a/doc/manual/source/multi-user/config-file-provider.rst b/doc/manual/source/multi-user/config-file-provider.rst index c62e9ebd0..e61c96faf 100644 --- a/doc/manual/source/multi-user/config-file-provider.rst +++ b/doc/manual/source/multi-user/config-file-provider.rst @@ -12,9 +12,10 @@ Config File Users Introduction ------------ -By setting ``auth_type = "config-file"`` in ``krill.conf`` you can configure Krill -to require users to enter a username and password in the web user interface when -logging in, rather than the secret token that is usually required: +By setting ``auth_type = "config-file"`` in ``krill.conf`` you can configure +Krill to require users to enter a username and password in the web user +interface when logging in, rather than the secret token that is usually +required: .. figure:: img/config-file-login.png :align: center @@ -23,45 +24,47 @@ logging in, rather than the secret token that is usually required: Using config file user credentials to login to Krill -.. Note:: It is important to realize that Krill is not a complete user management - system and that Config File Users therefore have some :ref:`limitations `. +.. Note:: It is important to realize that Krill is not a complete user + management system and that Config File Users therefore have some + :ref:`limitations `. - While Config File Users are useful as a quick way to test named user - support in Krill and may suffice for simple situations, in larger more - critical settings you are strongly advised to consider using - :ref:`doc_krill_multi_user_openid_connect_provider` instead. + While Config File Users are useful as a quick way to test named + user support in Krill and may suffice for simple situations, in + larger more critical settings you are strongly advised to consider + using :ref:`doc_krill_multi_user_openid_connect_provider` instead. How does it work? ----------------- To add a user to the ``krill.conf`` file an administrator uses the ``krillc`` -command to compute a password *hash* for the user and then adds an entry to the -``[auth_users]`` section including their username, password *hash*, salt and any -:ref:`attributes ` that are relevant for that -user. +command to compute a password *hash* for the user and then adds an entry to +the ``[auth_users]`` section including their username, password *hash*, salt +and :ref:`roll `. When a user enters their username and password into the web user interface a -hash of the password is computed and sent with the username to the Krill server. +hash of the password is computed and sent with the username to the Krill +server. The Krill server will verify that the user logging in provided a correct -password and has the ``LOGIN`` permission. On success Krill will respond with a -token which the web user interface should send on subsequent requests to -authenticate itself with Krill. The web user interface will keep a copy of this -token in browser local storage until the user logs out or is timed out due to -inactivity. - -.. tip:: The actual user password is **NEVER** stored on either the Krill server - nor the client browser and is **NEVER** sent by the client browser to - the Krill server. Only password *hashes* are stored and transmitted. - -.. warning:: Do **NOT** serve the Krill web user interface over unencrypted HTTP. - While the password is never transmitted, the authentication token - that the user is subsequently issued is subject to interception - by malicious parties if sent unencrypted from the Krill server to - the web user interface. Note that this is equally true when using - any credential to authenticate with Krill, whether secret token - or password hash or when Krill is configured to interact with an - OpenID Connect provider. +password and has a role that grants the ``login`` permission. On success +Krill will respond with a token which the web user interface should send on +subsequent requests to authenticate itself with Krill. The web user interface +will keep a copy of this token in browser local storage until the user logs +out or is timed out due to inactivity. + +.. tip:: The actual user password is **NEVER** stored on either the Krill + server nor the client browser and is **NEVER** sent by the client + browser to the Krill server. Only password *hashes* are stored and + transmitted. + +.. warning:: Do **NOT** serve the Krill web user interface over unencrypted + HTTP. While the password is never transmitted, the + authentication token that the user is subsequently issued is + subject to interception by malicious parties if sent unencrypted + from the Krill server to the web user interface. Note that this + is equally true when using any credential to authenticate with + Krill, whether secret token or password hash or when Krill is + configured to interact with an OpenID Connect provider. .. _limitations: @@ -69,24 +72,24 @@ Known limitations ----------------- Config File Users are easy to define and give you complete control over who -has access to your Krill instance and what level of access is granted. However, -Krill is not a complete user management system and so there are some things to -remember when using Config File Users: +has access to your Krill instance and what level of access is granted. +However, Krill is not a complete user management system and so there are some +things to remember when using Config File Users: - Krill has no feature for requiring a user to change their password on first login. As such, by issuing users with passwords you become responsible for delivering the new password to them securely. -- OpenID Connect providers often have support for one-time passwords (OTP) - or other secondary lines of defence to protect an account than just a - username and password. Krill does not have this capability. +- OpenID Connect providers often have support for two-factor authentication + to protect an account better than just with a username and password. Krill + does not have this capability. - Krill has no feature for generating cryptographically strong passwords. You are responsible for choosing sufficiently strong passwords for your users. -- Usernames, password hashes and user attributes are sensitive information. By - adding them to your ``krill.conf`` file you become responsible for protecting - them. +- Usernames, password hashes and user attributes are sensitive information. + By adding them to your ``krill.conf`` file you become responsible for + protecting them. - If you lose your ``krill.conf`` file you will also lose the password hashes and will have to reset your users passwords unless you have a (**secure**) @@ -95,10 +98,10 @@ remember when using Config File Users: - If a user forgets their password you will need to issue them with a new one. Krill does not offer a forgotten password or password reset feature. -- Adding or changing users requires a restart of Krill. There is no support in - Krill at present for reloading the user details while Krill is running. - While Krill is restarting the web user interface will be unavailable for your - users. +- Adding or changing users requires a restart of Krill. There is no support + in Krill at present for reloading the user details while Krill is running. + While Krill is restarting the web user interface will be unavailable for + your users. Setting it up ------------- @@ -108,7 +111,7 @@ The following steps are required to use Config File Users in your Krill setup. 1. Decide on the settings to be configured. """"""""""""""""""""""""""""""""""""""""""" -Decide which usernames you are going to configure, and what :ref:`role ` +Decide which usernames you are going to configure, and what :ref:`role ` and password they should have. For this example let's assume we want to configure the following users: @@ -143,9 +146,9 @@ to ``krill.conf``. The end result should look something like this: auth_type = "config-file" [auth_users] - "joe@example.com" = { attributes={ role="admin" }, password_hash="521e....0529", salt="d539....115e" } - "sally" = { attributes={ role="readonly" }, password_hash="...", salt="..." } - "dave_the_octopus" = { attributes={ role="readwrite" }, password_hash="...", salt="..." } + "joe@example.com" = { role="admin", password_hash="521e....0529", salt="d539....115e" } + "sally" = { role="readonly", password_hash="...", salt="..." } + "dave_the_octopus" = { role="readwrite", password_hash="...", salt="..." } ---- @@ -153,7 +156,8 @@ to ``krill.conf``. The end result should look something like this: """""" Restart Krill and deliver the chosen passwords to the respective users to -whom they belong. The users should now be able to login to your Krill instance. +whom they belong. The users should now be able to login to your Krill +instance. .. Warning:: Take whatever steps you think are necessary to ensure that the passwords are delivered **securely** to your users. @@ -162,30 +166,10 @@ Advanced configuration ---------------------- The information above gives you the basic structure for the configuration -file syntax needed to configure local users in Krill. - -See :ref:`doc_krill_multi_user_access_control` for information about -other user attributes and configuration settings that you might want to -use. - -See :ref:`doc_krill_multi_user_custom_policies` for information about -customizing the configuration even further. - -Below is a slightly modified version of the example above that also -uses the ``inc_cas``, ``exc_cas`` and ``auth_private_attributes`` features -and adds a user that has custom team attributes as well. Notice how the -team user does **NOT** have a ``role`` attribute! - -.. code-block:: bash - - auth_type = "config-file" - auth_private_attributes = [ "exc_cas" ] - - [auth_users] - "joe@example.com" = { attributes={ role="admin" }, password_hash="f45d...b25f", salt="..." } - "sally" = { attributes={ role="readonly", inc_cas="ca1,ca3" }, password_hash="...", salt="..." } - "dave_the_octopus" = { attributes={ role="readwrite" }, exc_cas="some_private_ca" }, password_hash="...", salt="..." } - "rob_from_team_one" = { attributes={ team="t1", teamrole="readwrite" }, password_hash="...", salt="..." } +file syntax needed to configure local users in Krill and uses the default +roles provided by Krill. See :ref:`doc_krill_multi_user_roles` for +information how to configure your own set of roles and limit what users +should have access to. Additional sources of information --------------------------------- diff --git a/doc/manual/source/multi-user/customization.rst b/doc/manual/source/multi-user/customization.rst deleted file mode 100644 index 0e7686ba9..000000000 --- a/doc/manual/source/multi-user/customization.rst +++ /dev/null @@ -1,218 +0,0 @@ -.. _doc_krill_multi_user_custom_policies: - -Custom Authorization Policies -============================= - -.. versionadded:: v0.9.0 - -.. contents:: - :local: - :depth: 2 - -Introduction ------------- - -.. note:: This is an advanced topic, you don't need this feature to - get started with Named Users. If you are considering - implementing a custom authorization policy `we'd love to hear from you `_! - -Custom authorization policies are a way of extending Krill by supplying -one or more files containing rules that will be added to those used by -Krill when deciding if a given action by a user should be permitted or -denied. - -Examples --------- - -Some examples showing the power of this can be seen in `doc/policies `_ -directory in the Krill source code repository. - -`role-per-ca-demo` -"""""""""""""""""" - -By default Krill lets you assign a role to a user that will be enforced -for all of the actions that they take irrespective of the CA being -worked with. The `role-per-ca-demo` example extends Krill so that a -user can be given different roles for different CAs. - -The demo also shows how to use new user attributes to influence -authorization decisions, in this case by looking for a user attribute -by the same name as the CA being worked with, and if found it uses the -attribute value as the role that the user should have when working with -that CA. - -Finally, the demo demonstrates how to add new roles to Krill by adding -two new roles that are more limited in power than the default roles in -Krill: - - - A `readonly`-like role that also has the right to update ROAs. - - A role that only permits a user to login and list CAs. - -`team-based-access-demo` -"""""""""""""""""""""""" - -The `team-based-access-demo` shows how one can define teams in the -policy: - - - Users can optionally belong to a team. - - Users can have a different role in the team than outside of it. - - Being a member of a team grants access to the CAs that the team - works with. - -The example works by defining the team names in the policy file. Each -team is given a name and a list of CAs it works with. Krill is then -extended to understand two new user attributes: - - - `team` - which team a user belongs to - - `teamrole` - which role the user has in the team - -Using custom policies ---------------------- - -To use a custom policies there must be an ``auth_policies`` setting -in ``krill.conf`` specifying the path to one ore more custom policy -files to load on startup. - -.. code-block:: none - - auth_type = "..." - auth_policies = [ "doc/policies/role-per-ca-demo.polar" ] - -.. warning:: Krill will fail to start if a custom authorization - policy file is syntactically invalid or if one of the - self-checks in the policy fails. - -.. warning:: Policy files should only be readable by Krill and - trusted operating system user accounts. - - Krill performs some basic sanity checks on startup to - verify that its authorization policies are working as - expected, but a malicious actor could make more subtle - changes to the policy logic which may go undetected, - like granting their own user elevated rights in Krill. - - If a malicious user is able to write to the policy - file they may however already be able to do much more - significant damage than editing a policy file! - -.. note:: Policy files are not reloaded if changed on disk while - Krill is running. - - For policies that only contain rules this is not a - problem as they would not be expected to change - very often, if ever. - - However, for policies that define configuration in the - policy file, such as the `team-based-access-demo`, - changes to the policy configuration will not take effect - until Krill is restarted. - -Writing custom policies ------------------------ - -Policies are written in the Polar language. The following articles -from the Oso website can help you get started with Polar: - - - `The Polar Language `_ - - `Write Oso Policies (30 min) `_ - - `Polar Syntax Reference `_ - - `Rust Types in Polar `_ - -The core policies and permissions that Krill uses are embedded into -Krill itself and cannot be changed. It is however possible to add -new roles and to add new logic based around the value of custom user -attributes. - -Defining new roles -"""""""""""""""""" - -Krill roles are defined by ``role_allow("rolename", action: Permission)`` -Polar rules. The rule is tested if the role of the current user is -"rolename". The current role definitions test if the requested -action is in a set defined to be valid for that role. - -.. tip:: You can see the built-in `role `_ - and `permission `_ - definitions in the Krill GitHub repository. - -To define a new role that grants read only rights plus the right to -update ROAs one could write the following Polar rule: - -.. code-block:: none - - role_allow("roawrite", action: Permission) - role_allow("readonly", action) or - action = ROUTES_UPDATE; - -This example is actually taken from the `role-per-ca-demo.polar` policy. - -Defining new rules -"""""""""""""""""" - -Let's write a rule that completely prevents the update of ROAs. - -When Oso does a permission check the search for a matching rule -starts by matching rules of the form ``allow(actor, action, resource)``. - -.. tip:: "resource" in this context is a Polar term and should not be - confused with the RPKI term "resource". - -The Krill policy delegates from its `allow` rules immediately to a -special ``disallow(actor, action, resource)`` rule. The only definition -of the ``disallow()`` rule in Krill by default says ``if false``, i.e. -nothing is disallowed. - -While technically you can prevent an action by ``cut`` -ing out of an -``allow()`` rule that is more specific than any other ``allow()`` rules, -it's not always possible to ensure that your rule is the most specific -match. That's where ``disallow()`` comes in handy. - -Let's use ``disallow()`` to implement our rule. - -Create a file called ``no_roa_updates.polar`` containing the following -content: - -.. code-block:: none - - # define our new rule: disallow all ROA updates - disallow(_, ROUTES_UPDATE, _); - - # we could also write this more explicitly like so: - # disallow(_, ROUTES_UPDATE, _) if true; - - # add a test to check that our new rule works by - # showing that an admin user can no longer update - # ROAs! - ?= not allow(new Actor("test", { role: "admin" }), ROUTES_UPDATE, new Handle("some_ca")); - -Let's break this down: - - - The ``_`` character is Polar syntax for "match any". - - Lines starting with ``#`` are comments. - - Lines starting with ``?=`` defines self-test inline queries that - will be executed when Krill starts. If a self-test inline query - fails Krill will exit with an error. - -The rule that we have created says that for any actor trying to update -a ROA on any "resource" (i.e. Certificate Authority), succeed (i.e. -disallow the attempt). - -If we now set ``auth_policies = [ "path/to/no_roa_updates.polar" ]`` -in our ``krill.conf`` file and restart Krill it will no longer be -possible for anyone to update ROAs. - -This is obviously not the most useful policy, but it demonstrates -the idea :-) - -Diagnosing issues -""""""""""""""""" - -If a rule doesn't work as expected a good way to investigate is to -add more self-test inline queries. - -If that fails you can set ``log_level = "debug"`` and set O/S -environment variable ``POLAR_LOG=1`` when runnng Krill. This will -cause a huge amount of internal Polar diagnostic logging which -will show exactly which rules Polar evaluated in which order with -which parameters and what the results were. - diff --git a/doc/manual/source/multi-user/openid-connect-provider.rst b/doc/manual/source/multi-user/openid-connect-provider.rst index ebfc72528..e82c29ed7 100644 --- a/doc/manual/source/multi-user/openid-connect-provider.rst +++ b/doc/manual/source/multi-user/openid-connect-provider.rst @@ -56,7 +56,8 @@ From the `OpenID Connect FAQ `_: users’ accounts for their own gain.* OpenID Connect takes the lessons learned from earlier identity protocols -and improves on them. It is `widely implemented `_ +and improves on them. It is +`widely implemented `_ and deployed, and for situations where the primary identity provider does not implement OpenID Connect there are OpenID Connect providers that can act as a bridge to systems that implement other identity protocols. @@ -89,8 +90,8 @@ these topics). The user experience """"""""""""""""""" -When an end user visits the Krill website in their browser they will be -redirected to the login page of the OpenID Connect provider. This is +When an end user visits the Krill user interface in their browser they will +be redirected to the login page of the OpenID Connect provider. This is **NOT** part of Krill. For example, when logging in to a Krill instance connected to the OpenID @@ -130,13 +131,15 @@ What the user doesn't see, except perhaps if their network connection is very slow, is that there are "hidden" intermediate steps occuring in the login flow, between the browser and Krill and between Krill and the OpenID Connect provider. These steps implement the OpenID Connect `"Authorizaton -Code Flow" `_. +Code Flow" +`_. If the user logged in correctly at the OpenID Connect provider login page and Krill was correctly registered with the provider and the provider was correctly setup for Krill, then Krill will receive a temporary Authorization -Code which it exchanges for an OAuth 2.0 `Access Token `_ -(and maybe also an OAuth 2.0 Refresh Token) and an OpenID Connect ID Token. +Code which it exchanges for an OAuth 2.0 `Access Token +`_ (and maybe also an +OAuth 2.0 Refresh Token) and an OpenID Connect ID Token. The ID Token includes so-called OAuth 2.0 **claims**, metadata about the user logging in. These claims are the key to whether or not Krill is able @@ -146,29 +149,41 @@ to login. Known limitations ----------------- -OpenID Connect Users avoid the problems with :ref:`Config File Users ` +OpenID Connect Users avoid the problems with :ref:`Config File Users +` but require more effort to setup and maintain: - Requires operating another service or using a 3rd party service. - Confguring Krill and the OpenID Connect provider is more involved than - setting up :ref:`Config File Users `. + setting up :ref:`Config File Users + `. - If Krill cannot contact the OpenID Connect provider, users will be unable to login to Krill with their OpenID Connect credentials. It will however still be possible to authenticate with Krill using its secret token. -.. warning:: If you encounter HTTP 502 Bad Gateway errors from your HTTP proxy - in front of Krill when logging in, or login loops where you are taken - back to the OpenID Connect provider login page but the Krill logs show - a successful login, you may need to increase the HTTP request and/or - response header buffer sizes used by your proxy. +.. warning:: If you encounter HTTP 502 Bad Gateway errors from your HTTP + proxy in front of Krill when logging in, or login loops where + you are taken back to the OpenID Connect provider login page but + the Krill logs show a successful login, you may need to increase + the HTTP request and/or response header buffer sizes used by + your proxy. - With NGINX this can be done by increasing settings such as `proxy_buffer_size `_, - `proxy_buffers `_, `large_client_header_buffers `_ (or `http2_max_field_size `_ and - `http2_max_header_size `_ - before NGINX v1.19.7). Thanks to GitHub user `racompton `_ for the ``large_client_header_buffers`` tip! - If using Kubernetes use the equivalent NGINX ingress controller ConfigMap - settings, e.g. `http2-max-field-size `_. Thanks to GitHub user `TheEnbyperor `_ for the HTTP/2 and Kubernetes tips! + With NGINX this can be done by increasing settings such as + `proxy_buffer_size `_, + `proxy_buffers `_, + `large_client_header_buffers `_ + (or `http2_max_field_size `_ + and `http2_max_header_size `_ + before NGINX v1.19.7). Thanks to GitHub user + `racompton `_ for the + ``large_client_header_buffers`` tip! + If using Kubernetes use the equivalent NGINX ingress controller + ConfigMap settings, e.g. + `http2-max-field-size `_. + Thanks to GitHub user + `TheEnbyperor `_ for the HTTP/2 + and Kubernetes tips! These issues occur because the size of the HTTP request & response headers on login to Krill when using OpenID Connect @@ -183,19 +198,30 @@ online services that you can create an account with. Any OpenID Connect provider that you choose must implement the following standards: -- `OpenID Connect Core 1.0 `_ -- `OpenID Connect Discovery 1.0 `_ -- `OpenID Connect RP-Initiated Logout 1.0 `_ *(optional)* -- `RFC 7009 OAuth 2.0 Token Revocation `_ *(optional)* - -Krill has been tested with the following OpenID Connect providers (in alphabetical order): - -- `Amazon Cognito `_ -- `Keycloak `_ -- `Microsoft Azure Active Directory `_ -- `Micro Focus NetIQ Access Manager 4.5 `_ - -.. warning:: Krill has been verified to be able to login and logout with `Google Cloud `_ +- `OpenID Connect Core 1.0 + `_ +- `OpenID Connect Discovery 1.0 + `_ +- `OpenID Connect RP-Initiated Logout 1.0 + `_ + *(optional)* +- `RFC 7009 OAuth 2.0 Token Revocation + `_ *(optional)* + +Krill has been tested with the following OpenID Connect providers (in +alphabetical order): + +- `Amazon Cognito + `_ +- `Keycloak + `_ +- `Microsoft Azure Active Directory + `_ +- `Micro Focus NetIQ Access Manager 4.5 + `_ + +.. warning:: Krill has been verified to be able to login and logout with + `Google Cloud `_ accounts. However, it is not advisable to grant access to Google accounts in general. Instead you should use a Google product that permits you to manage your own pool of @@ -246,7 +272,8 @@ steps must be taken: \ - - Is this property available by default as part of the `standard claims `_ + - Is this property available by default as part of the `standard claims + `_ sent by the provider to the client, or is it a provider specific claim or will it need to be configured in the provider as a custom claim? [1]_ @@ -257,10 +284,6 @@ steps must be taken: and `here `__), Amazon Cognito (`here `_) - - If no suitable claim values can be arranged with the provider, - consider using :ref:`hybrid mode ` instead. - - \ 2. **Gain access to the provider** @@ -294,7 +317,8 @@ steps must be taken: requests to other locations. .. [3] A correct URL will either end in /.well-known/openid-configuration - or should have that appended to it, e.g. the Google issuer URL is: https://accounts.google.com/.well-known/openid-configuration + or should have that appended to it, e.g. the Google issuer URL is: + https://accounts.google.com/.well-known/openid-configuration 4. **Create users, groups and/or claims in the provider** @@ -352,7 +376,8 @@ steps must be taken: Using Keycloak """""""""""""" -In this section you will see how to setup `Keycloak `__ +In this section you will see how to setup +`Keycloak `__ as an OpenID Connect provider for Krill. The following steps are required to use OpenID Connect Users in your Krill setup. @@ -374,9 +399,9 @@ sally sally@example.com wdGypnx5 readonly dave_the_octopus dave@example.com qnky8Zuj readwrite ================= ================= ========= ========= -And let's assume that we are going to use a local Docker `Keycloak `__ -container as our OpenID Connect provider which will be running at -https://localhost:8443/. +And let's assume that we are going to use a local Docker +`Keycloak `__ container as our OpenID Connect +provider which will be running at https://localhost:8443/. ---- @@ -445,7 +470,7 @@ Create a realm =================== ====================================== Field Value =================== ====================================== - Name `krill` + Name ``krill`` =================== ====================================== Create a client application @@ -461,7 +486,7 @@ Continuing in the KeyCloak web UI with realm set to `krill`: =================== ====================================== Field Value =================== ====================================== - Client ID `krill` + Client ID ``krill`` =================== ====================================== - On the `Settings` tab that is shown next set the field values as @@ -471,7 +496,7 @@ Continuing in the KeyCloak web UI with realm set to `krill`: Field Value =================== ====================================== Access Type `confidential` [4]_ - Valid Redirect URIs `https://localhost:3000/*` [5]_ + Valid Redirect URIs ``https://localhost:3000/*`` [5]_ =================== ====================================== - Generate credentials for Krill to use: @@ -644,17 +669,36 @@ data. The resulting claims look something like this: Source: https://openid.net/specs/openid-connect-core-1_0.html#id_tokenExample -Thus if you were to configure Krill to use the "given_name" claim -as the ID of the user in Krill, like so: +Krill uses claims to determine two things: the user ID – which is both +shown in the UI and logged in the Krill audit logs –, and the +:ref:`roll ` which determines access +permissions. + +For each rules can be defined in their own section, +``[[auth_openidconnect.id_claims]]`` for the user ID and +``[[auth_openidconnect.role_claims]]`` for the role. + +For instance, if you want to configure Krill to use the "given_name" claim +as the ID of the user in Krill, you can do this like so: .. code-block:: none - [auth_openidconnect.claims] - id = { jmespath="given_name" } + [[auth_openidconnect.id_claims]] + claim = "given_name" -Then in this example Krill would use the value "Jane" as the ID of the +Given the example claims above, would use the value "Jane" as the ID of the user logged in to Krill. + +Default claims configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +By default, Krill uses the value of the “email” claim as the user ID and +the value the “role” for the role. Given that the example claims above don’t +contain a “role” claim, Krill would reject a login with the defaults since +it doesn’t know what role to use. + + Matching claims by name ~~~~~~~~~~~~~~~~~~~~~~~ @@ -667,26 +711,13 @@ This can be achieved using a config section that looks like this in .. code-block:: none - [auth_openidconnect.claims] - id = { jmespath="name" } + [[auth_openidconnect.id_claims]] + claim = "name" This tells Krill to search all of the claim data it receives for a field called `name` and use that as the ID for the user in Krill. This ID will also be logged in the Krill event history as the actor responsible for -any events that they caused.h - -What is JMESPath? According to `https://jmespath.org/ `_: - - *"JMESPath is a query language for JSON."* - -JSON is the format that OpenID Connect claim data is provided in by the -provider. JMESPath can therefore be used to tell Krill which particular -part from within the JSON it should use. - -This is a very trivial example of the power of JMESPath. You can find -out more about it at the `https://jmespath.org/ `_ -website and in ``krill.conf``. Krill comes with a couple of extensions -to JMESPath syntax which are also documented in ``krill.conf``. +any events that they caused. Matching claims by value ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -701,139 +732,73 @@ How do you tell Krill which users should have readonly access and which users should be have readwrite access? This is actually a real situation you can encounter with Azure Active -Directory. JMESPath can also be used to handle this scenario, albeit -with a much more complicated expression: +Directory. The rules in this case are a little more complicated: + .. code-block:: none - [auth_openidconnect.claims] - ro_role = { jmespath="resub(groups[?@ == 'gggggggg-gggg-gggg-gggg-gggggggggggg'] | [0], '^.+$', 'readonly')", dest="role" } - rw_role = { jmespath="resub(groups[?@ == 'hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh'] | [0], '^.+$', 'readwrite')", dest="role" } - -Let's break the `ro_role` claim mapping rule down: - - - `gggg` and `hhhh` values represent the UUIDs of the groups to find in a - claim array called `groups`. - - The `resub` JMESPath function is a Krill extension to JMESPath that performs - regular expression based substitution. - - `groups[?@ == '...']` finds all entries in the `groups` array that match the - specified UUID. - - We then assume that there is only ever zero or one matches and just use the - first match `| [0]` found. - - Then we instruct Krill to take the entire value with `^.+$`. - - And to replace it with the value `readonly`. - - Finally, instead of assigning the value `readonly` to the user attribute - `ro_role`, `dest` is used to instead store `readonly` in a user attribute - called `role`. - -As `role` is the user attribute that the Krill authorization policy engine looks -at by default this will cause the user to be assigned the readonly role if their -user is a member of the group with the UUID value that represents the "readonly" -group! - -If we had only one rule we could write `role` on the left, but as we have two -rules that both try to provide a value for the same user attribute and the keys -on the left of the `=` must be unique, we use the `dest` trick to map any value -found to the `role` user attribute. + [[auth_openidconnect.role_claims]] + claim = "groups" + match = "^gggggggg-gggg-gggg-gggg-gggggggggggg$" + subst = "readonly" + + [[auth_openidconnect.role_claims]] + claim = "groups" + match = "^hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh$" + subst = "readwrite" + +We define two rules for the role claims. These are processed in order and +the first matching rule is used. Let’s break them down: + + - The ``claim`` field is the name of the claim to look for. In both cases + we are looking at the ``"groups"`` claim. + - The ``match`` field contains a regular expression matching the UUIDs of + the groups. Because regular expressions happily match partially, we need + the hat and dollar symbols to force a match of a complete value. + - The ``subst`` field contains a value to substitute the match with. While + you can refer to match groups in the regular expression, we don’t need + this here and just want to replace the value with the names of the roles. + +The ``"groups"`` claim is an array with multiple groups. Each rule will go +over all the values in the array and try and match them. Only if that doesn’t +succeed is the next rule tried. Thus, if a user has both the “g” group and +the “h” group, the first rule will apply and the user will be assigned the +``"readonly"`` role. It is important to keep this ordering in mind when +writing the configuration. Matching claims by partial value ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Now imagine that the group membership is instead expressed not as array elements -that each exactly match some group name or UUID that we can look for, but that -each array element is a long string composed of `key=value` comma separated pairs. +Now imagine that the group membership is instead expressed not as array +elements that each exactly match some group name or UUID that we can look +for, but that each array element is a long string composed of `key=value` +comma separated pairs. -This can happen when the identity provider expresses group memberships in LDAP -X.500 format (see `RFC 2253 Lightweight Directory Access Protocol (v3): +This can happen when the identity provider expresses group memberships in +LDAP X.500 format (see `RFC 2253 Lightweight Directory Access Protocol (v3): UTF-8 String Representation of Distinguished Names `_). -For example you might see something like ``CN=Joe Bloggs,OU=NetworkTeam-Admins,DC=mycorp.com``, +For example you might see something like +``CN=Joe Bloggs,OU=NetworkTeam-Admins,DC=mycorp.com``, representing a user called Joe who is in the administrators group of the networking team of a company called mycorp.com. -Hopefully you'll only need simple rules but also equally hopefully if you need -more powerful matching Krill will be up to the task. For example, here's a more -complicated rule: - -.. code-block:: none - - dynamic_role = { jmespath="resub(memberof[?starts_with(@, 'CN=DL-Krill-')] | [0], '^CN=DL-Krill-(?P[^-,]+).+', '$role')" } - -This rule will match elements of an array called `memberof` whose value starts -with ``CN=DL-Krill-``, and wlll then extract just the part after that upto a -comma or dash, and will use that captured value as the Krill ``role`` user -attribute! - -.. _hybrid-mode: - -Matching claims to config values (aka 'hybrid' mode) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Usually when defining a claim mapping there is no need to define the source of -the claim. Krill will search all of the different OpenID Connect provider -claim sources that it supports (standard and additional claims in both the ID -Token and User Info responses) for a matching claim. - -However, if needed you can specify the claim source explicitly on a per claim -basis. Possible uses for this include: - - - Selecting the right claim when the same claim name exists in more than one - claim source but with different values. - - - Defining user attributes in the Krill configuration when the claim values - cannot be configured in the provider (perhaps due to lack of support by or - access to the provider). This is known as hybrid mode because it causes - Krill to use a hybrid of OpenID Connect provider for authentication and - config file defined user attributes for authorization. - -When defining a claim mapping we have so far seen ``jmespath`` and ``dest`` -settings, but there is also a ``source`` setting. The source can be set to one -of the following values: - - - ``config-file`` - - ``id-token-standard-claim`` - - ``id-token-additional-claim`` - - ``user-info-standard-claim`` - - ``user-info-additional-claim`` - -The first one is the really interesting one. The rest should hopefully never -be needed as by default Krill searches all of the possible OpenID Connect -provider claim sources that it supports. - -When using the ``config-file`` source there are two changes in the way that -Krill looks up the claim value: - - 1. The ``jmespath`` setting is not used. Instead an attribute with the - same name as the TOML key of the claim mapping is looked for on the - user. - - 2. The user attributes are taken from a config file entry with the ``id`` - of the current user is looked up in the ``[auth_users]`` config file - section. - -Note that the ``id`` of the current user is still determined by a normal -OpenID Connect claim lookup, i.e. by default the ``email`` value reported -by the provider for the user is used unless you define a claim mapping for -``id`` explicitly. - -For example, to identify users by the given name reported by the OpenID -Connect provider, and to set their role using entries in ``krill.conf`` -instead of basing the role on provider claim values, you could do something -like this: +Hopefully you'll only need simple rules but also equally hopefully if you +need more powerful matching Krill will be up to the task. For example, here's +a more complicated rule: .. code-block:: none - [auth_users] - "Joe Bloggs" = { attributes={ role="admin" } } - "Sally Alley" = { attributes={ role="readonly" } } + [[auth_openidconnect.role_claims]] + claim = "memberof" + match = "^CN=DL-Krill-(?P[^-,]+).+" + subst = "$role" - [auth_openidconnect.claims] - id = { jmespath="given_name" } - role = { source="config-file" } +This rule will match elements of an array called ``"memberof"`` whose value +starts with ``CN=DL-Krill-``, and wlll then extract just the part after that +upto a comma or dash, and will use that captured value as the role for the +user. -This will cause a user that logs in via the OpenID Connect provider who -has a ``given_name`` claim value of ``Joe Bloggs`` to be granted the -``admin`` role in Krill. Requesting missing claims ~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/manual/source/multi-user/roles.rst b/doc/manual/source/multi-user/roles.rst new file mode 100644 index 000000000..3a500aa08 --- /dev/null +++ b/doc/manual/source/multi-user/roles.rst @@ -0,0 +1,158 @@ +.. _doc_krill_multi_user_roles: + +Roles, Permissions and Resources +================================ + +.. versionadded:: v0.9.0 + +This page summarizes how Krill supports restricting access for *named users* +that login to Krill. For backward compatibility, users that authenticate with +the secret token are given unrestricted access to Krill. + +Roles +----- + +Rather than restricting access to individual users, Krill adds an +intermediary concept of roles. Each user is assigned a role and these roles +in turn define access restrictions. + +Roles can be defined in the config file through the ``[auth_roles]`` section. +Each role has a name, a set of permissions, and optionally a list of CAs +access is restricted to. + +By default, i.e., if you do not provide your own ``[auth_roles]`` in the +config file, Krill uses three roles: + +.. Glossary:: + + ``admin`` + Grants unrestricted access to all CAs. + + ``readwrite`` + Grants the right to list, view and modify all *existing* CAs. + + ``readonly`` + Grants the right to list and view all CAs. + +If you do provide your own roles, these will *not* be present. + + +Permissions +----------- + +Internally within Krill each REST API endpoint requires the logged in user to +have a specific Krill permission in order to execute the request. When +defining your own roles, you can combine these permissions into a specific +set by listing those you wish to grant to the role. + +Currently, the following permissions are defined: + +.. Glossary:: + + ``login`` + required for logging into the Krill UI and for accessing any + resources, + + ``pub-admin`` + required for access to the built-in publication server, + + ``pub-list`` + required for listing the currently configured publishers of the + publication server, + + ``pub-read`` + required to show details of configured publishers of the + publication server, including the publication response to be returned + to a publisher, + + ``pub-create`` + required to add new publishers to the publication server, + + ``pub-delete`` + required to removed publishers from the publication server, + + ``ca-list`` + required to list existing CAs, + + ``ca-read`` + required to show details of existing CAs, + + ``ca-create`` + required to create new CAs, + + ``ca-update`` + required to update configuration of existing CAs as well as adding + and removing child CAs, + + ``ca-admin`` + required for administrative tasks related to all CAs as well as + importing CAs, also required for access to the trust anchor module, + + ``ca-delete`` + required to remove CAs, + + ``routes-read`` + required to show the ROAs configured for a CA, + + ``routes-update`` + required to update the ROAs configured for a CA, + + ``routes-analysis`` + required to perform BGP route analysis for a CA, + + ``aspas-read`` + required to show the ASPA records configured for a CA, + + ``aspas-update`` + required to update the ASPA records configured for a CA, + + ``bgpsec-read`` + required to show the BGPsec router keys configured for a CA, + + ``bgpsec-update`` + required to update the BGPsec router keys configured for a CA. + +In addition, there are two shortcuts that can be used to specify multiple +permission at once: + +.. Glossary:: + + ``any`` + grants all permissions, + + ``read`` + grants the ``ca-read``, ``routes-read``, ``aspas-read``, and + ``bgpsec-read`` permissions, + + ``update`` + grants the ``ca-update``, ``routes-update``, ``aspas-update``, and + ``bgpsec-update`` permissions, + + +Configuring Roles +----------------- + +When the default roles are not sufficient, you can create your own set of +roles in the Krill config file. You do so by creating a new block +``[auth_roles]`` which contains a list of all your roles. Each role needs +to have a mapping of one or two fields: + +* The mandatory field ``permissions`` provides a list of the permissions + to be granted by the role, and + +* the optional field ``cas`` is a list of the CAs that the role grants + access to. + +If the ``"cas"`` field is not present, access to all CAs is granted. + +As an example, here is the definition of the default roles plus a special +role that only allows read access to the ``"example"`` CA. + +.. code-block:: toml + + [auth_roles] + "admin" = { permissions = [ "any" ] } + "readwrite" = { permissions = [ "login", "pub-list", "pub-read", "pub-create", "pub-delete", "ca-list", "ca-create", "ca-delete", "read", "update" ] } + "readonly" = { permissions = [ "login", "pub-read", "ca-list", "read" ] } + "read-example" = { permissions = [ "login", "read" ], cas = [ "example" ] } + diff --git a/src/cli/ta/signer.rs b/src/cli/ta/signer.rs index 3aa224a74..3bc51e1ac 100644 --- a/src/cli/ta/signer.rs +++ b/src/cli/ta/signer.rs @@ -116,7 +116,7 @@ impl TrustAnchorSignerManager { .map_err(KrillError::AggregateStoreError)?; let ta_handle = TrustAnchorHandle::new("ta".into()); let signer = config.signer()?; - let actor = Actor::krillta(); + let actor = crate::constants::ACTOR_DEF_KRILLTA; Ok(TrustAnchorSignerManager { store, diff --git a/src/commons/actor.rs b/src/commons/actor.rs index eaf3207f2..4258dbe58 100644 --- a/src/commons/actor.rs +++ b/src/commons/actor.rs @@ -16,327 +16,91 @@ //! to define the Actor that should be created without needing any knowledge //! of the Authorizer. -#[cfg(feature = "multi-user")] -use oso::ToPolar; -#[cfg(feature = "multi-user")] -use std::fmt::Display; +use std::fmt; +use std::sync::Arc; -use std::{collections::HashMap, fmt, fmt::Debug}; -use crate::{ - commons::{ - error::{ApiAuthError, Error}, - KrillResult, - }, - constants::ACTOR_DEF_ANON, - daemon::auth::{policy::AuthPolicy, Auth}, -}; - -#[derive(Clone, Eq, PartialEq, Debug)] -pub enum ActorName { - AsStaticStr(&'static str), - AsString(String), -} - -impl ActorName { - pub fn as_str(&self) -> &str { - match &self { - ActorName::AsStaticStr(s) => s, - ActorName::AsString(s) => s, - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum Attributes { - None, - RoleOnly(&'static str), - UserDefined(HashMap), -} - -impl Attributes { - pub fn as_map(&self) -> HashMap { - match &self { - Attributes::UserDefined(map) => map.clone(), - Attributes::RoleOnly(role) => { - let mut map = HashMap::new(); - map.insert("role".to_string(), role.to_string()); - map - } - Attributes::None => HashMap::new(), - } - } -} +//------------ Actor --------------------------------------------------------- #[derive(Clone, Debug)] -pub struct ActorDef { - pub name: ActorName, - pub is_user: bool, - pub attributes: Attributes, - pub new_auth: Option, - pub auth_error: Option, -} +pub struct Actor(ActorName); -impl ActorDef { - pub const fn anonymous() -> ActorDef { - ActorDef { - name: ActorName::AsStaticStr("anonymous"), - is_user: false, - attributes: Attributes::None, - new_auth: None, - auth_error: None, - } - } - - pub const fn system(name: &'static str, role: &'static str) -> ActorDef { - ActorDef { - name: ActorName::AsStaticStr(name), - attributes: Attributes::RoleOnly(role), - is_user: false, - new_auth: None, - auth_error: None, - } - } - - pub fn user( - name: String, - attributes: HashMap, - new_auth: Option, - ) -> ActorDef { - ActorDef { - name: ActorName::AsString(name), - is_user: true, - attributes: Attributes::UserDefined(attributes), - new_auth, - auth_error: None, - } - } - - // Takes either a ApiAuthError or a commons::error::Error - pub fn with_auth_error(mut self, api_error: Error) -> Self { - self.auth_error = Some(api_error.into()); - self - } -} - -#[derive(Clone)] -pub struct Actor { - name: ActorName, - is_user: bool, - attributes: Attributes, - new_auth: Option, +#[derive(Clone, Debug)] +enum ActorName { + /// A system actor for the given component. + System(&'static str), - #[cfg_attr(not(feature = "multi-user"), allow(dead_code))] - policy: Option, + /// A user actor that has not been authenticated. + Anonymous, - #[cfg_attr(not(feature = "multi-user"), allow(dead_code))] - auth_error: Option, -} - -impl PartialEq for Actor { - fn eq(&self, other: &Self) -> bool { - self.name == other.name - && self.is_user == other.is_user - && self.attributes == other.attributes - } -} - -impl PartialEq for Actor { - fn eq(&self, other: &ActorDef) -> bool { - self.name == other.name - && self.is_user == other.is_user - && self.attributes == other.attributes - } + /// A user actor with the provided user ID. + User(Arc) } impl Actor { - /// Only for krillta - /// - /// No authorizer framework exists for krillta. It is designed as a - /// CLI. Sysadmins should ensure that only trusted people can execute - /// the CLI (and/or read / write its data). - pub fn krillta() -> Actor { - Self::actor_from_def(crate::constants::ACTOR_DEF_KRILLTA) + /// Creates a system actor for the given component. + pub const fn system(component: &'static str) -> Self { + Self(ActorName::System(component)) } - /// Setup a System Actor - /// - /// This is an admin user used by the system itself. Authorizer frameworks - /// are not relevant to it. - pub fn system_actor() -> Actor { - Self::actor_from_def(crate::constants::ACTOR_DEF_KRILL) + /// Creates the anonymous actor. + pub const fn anonymous() -> Self { + Self(ActorName::Anonymous) } - /// Should only be used for system users, i.e. not for mapping - /// logged in users. - pub fn actor_from_def(actor_def: ActorDef) -> Actor { - Actor { - name: actor_def.name.clone(), - is_user: actor_def.is_user, - attributes: actor_def.attributes, - new_auth: None, - auth_error: None, - policy: None, - } + /// Creates a user actor with the given user ID. + pub fn user(user_id: impl Into>) -> Self { + Self(ActorName::User(user_id.into())) } - /// Only for use in testing - pub fn test_from_details( - name: String, - attrs: HashMap, - ) -> Actor { - Actor { - name: ActorName::AsString(name), - attributes: Attributes::UserDefined(attrs), - is_user: false, - new_auth: None, - auth_error: None, - policy: None, - } - } - - pub fn new(actor_def: ActorDef, policy: AuthPolicy) -> Actor { - Actor { - name: actor_def.name.clone(), - is_user: actor_def.is_user, - attributes: actor_def.attributes.clone(), - new_auth: actor_def.new_auth.clone(), - auth_error: actor_def.auth_error, - policy: Some(policy), - } - } - - pub fn is_user(&self) -> bool { - self.is_user + /// Returns whether the actor is a system actor. + pub fn is_system(&self) -> bool { + matches!(self.0, ActorName::System(_)) } + /// Returns whether the actor is the anonymous actor. pub fn is_anonymous(&self) -> bool { - self == &ACTOR_DEF_ANON - } - - pub fn new_auth(&self) -> Option { - self.new_auth.clone() - } - - pub fn attributes(&self) -> HashMap { - self.attributes.as_map() + matches!(self.0, ActorName::Anonymous) } - pub fn attribute(&self, attr_name: String) -> Option { - match &self.attributes { - Attributes::UserDefined(map) => map.get(&attr_name).cloned(), - Attributes::RoleOnly(role) if &attr_name == "role" => { - Some(role.to_string()) - } - Attributes::RoleOnly(_) => None, - Attributes::None => None, - } + /// Returns whether the actor is a user actor. + pub fn is_user(&self) -> bool { + matches!(self.0, ActorName::User(_)) } + /// Returns the simple name of the actor. + /// + /// For system actors, this is the component name. For the anonymous + /// actor, this is the string `"anonymous"`. For user actors, it is their + /// user ID. pub fn name(&self) -> &str { - self.name.as_str() - } - - #[cfg(not(feature = "multi-user"))] - pub fn is_allowed(&self, _: A, _: R) -> KrillResult { - // When not in multi-user mode we only have two states: authenticated - // or not authenticated (aka anonymous). Only authenticated - // (i.e. not anonymous) actors are permitted to perform restricted - // actions, i.e. those for which this fn is invoked. - Ok(!self.is_anonymous()) - } - - #[cfg(feature = "multi-user")] - pub fn is_allowed( - &self, - action: A, - resource: R, - ) -> KrillResult - where - A: ToPolar + Display + Debug + Clone, - R: ToPolar + Display + Debug + Clone, - { - if log_enabled!(log::Level::Trace) { - trace!( - "Access check: actor={}, action={}, resource={}", - self.name(), - &action, - &resource - ); - } - - if let Some(api_error) = &self.auth_error { - trace!( - "Authentication denied: actor={}, action={}, resource={}: {}", - self.name(), - &action, - &resource, - &api_error - ); - return Err(Error::from(api_error.clone())); + match &self.0 { + ActorName::System(component) => component, + ActorName::Anonymous => "anonymous", + ActorName::User(user_id) => user_id.as_ref(), } + } - match &self.policy { - Some(policy) => match policy.is_allowed( - self.clone(), - action.clone(), - resource.clone(), - ) { - Ok(allowed) => { - if log_enabled!(log::Level::Trace) { - trace!( - "Access {}: actor={:?}, action={:?}, resource={:?}", - if allowed { "granted" } else { "denied" }, - self, - &action, - &resource - ); - } - Ok(allowed) - } - Err(err) => { - error!( - "Access denied: actor={}, action={}, resource={}: {}", - self.name(), - &action, - &resource, - err - ); - Ok(false) - } - }, - None => { - // Auth policy is required, can only be omitted for use by - // test rules inside an Oso policy. We should - // never get here, but we don't want to crash - // Krill by calling unreachable!(). - error!( - "Unable to check access: actor={}, action={}, resource={}: {}", - self.name(), - &action, - &resource, - "Internal error: missing policy" - ); - Ok(false) + /// Returns the audit name of the actor. + /// + /// This is the name stored with each command. For system actors, this + /// is the component name. For the anonymous actor, this is the string + /// `"anonymous"`. For user actors, it is the user ID prefixed with + /// `user:`. + pub fn audit_name(&self) -> String { + match self.0 { + ActorName::System(ref component) => component.to_string(), + ActorName::Anonymous => "anonymous".to_string(), + ActorName::User(ref user_id) => { + format!("user:{}", user_id.as_ref()) } } } } impl fmt::Display for Actor { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.name()) + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(self.name()) } } -impl fmt::Debug for Actor { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "Actor(name={:?}, is_user={}, attr={:?})", - self.name(), - self.is_user, - self.attributes - ) - } -} diff --git a/src/commons/crypto/signing/dispatch/signerinfo.rs b/src/commons/crypto/signing/dispatch/signerinfo.rs index ef78744d8..a63ad003d 100644 --- a/src/commons/crypto/signing/dispatch/signerinfo.rs +++ b/src/commons/crypto/signing/dispatch/signerinfo.rs @@ -11,7 +11,6 @@ use url::Url; use crate::{ commons::{ - actor::Actor, api::CommandSummary, crypto::SignerHandle, error::Error, @@ -237,8 +236,7 @@ impl SignerInfoCommand { *key_id, internal_key_id.to_string(), ); - let actor = Actor::actor_from_def(ACTOR_DEF_KRILL); - Self::new(id, version, details, &actor) + Self::new(id, version, details, &ACTOR_DEF_KRILL) } pub fn remove_key( @@ -247,8 +245,7 @@ impl SignerInfoCommand { key_id: &KeyIdentifier, ) -> Self { let details = SignerInfoCommandDetails::RemoveKey(*key_id); - let actor = Actor::actor_from_def(ACTOR_DEF_KRILL); - Self::new(id, version, details, &actor) + Self::new(id, version, details, &ACTOR_DEF_KRILL) } pub fn change_signer_name( @@ -259,8 +256,7 @@ impl SignerInfoCommand { let details = SignerInfoCommandDetails::ChangeSignerName( signer_name.to_string(), ); - let actor = Actor::actor_from_def(ACTOR_DEF_KRILL); - Self::new(id, version, details, &actor) + Self::new(id, version, details, &ACTOR_DEF_KRILL) } pub fn change_signer_info( @@ -271,8 +267,7 @@ impl SignerInfoCommand { let details = SignerInfoCommandDetails::ChangeSignerInfo( signer_info.to_string(), ); - let actor = Actor::actor_from_def(ACTOR_DEF_KRILL); - Self::new(id, version, details, &actor) + Self::new(id, version, details, &ACTOR_DEF_KRILL) } } @@ -494,7 +489,6 @@ impl SignerMapper { )) })?; - let actor = Actor::system_actor(); let cmd = SignerInfoInitCommand::new( &signer_handle, SignerInfoInitCommandDetails { @@ -504,7 +498,7 @@ impl SignerMapper { public_key: public_key.clone(), private_key_internal_id: private_key_internal_id.to_string(), }, - &actor, + &ACTOR_DEF_KRILL, ); self.store.add(cmd)?; diff --git a/src/commons/error.rs b/src/commons/error.rs index 8f0ecbe6a..901521e66 100644 --- a/src/commons/error.rs +++ b/src/commons/error.rs @@ -6,7 +6,9 @@ use hyper::StatusCode; use rpki::{ ca::{ - idexchange::{CaHandle, ChildHandle, ParentHandle, PublisherHandle}, + idexchange::{ + CaHandle, ChildHandle, MyHandle, ParentHandle, PublisherHandle + }, provisioning, provisioning::ResourceClassName, publication, @@ -18,6 +20,7 @@ use rpki::{ use crate::{ commons::{ + actor::Actor, api::{ rrdp::PublicationDeltaError, CustomerAsn, ErrorResponse, RoaPayload, @@ -27,6 +30,7 @@ use crate::{ util::httpclient, }, daemon::{ca::RoaPayloadJsonMapKey, http::tls_keys}, + daemon::auth::Permission, ta, upgrades::UpgradeError, }; @@ -127,6 +131,30 @@ pub enum ApiAuthError { ApiInsufficientRights(String), } +impl ApiAuthError { + pub fn insufficient_rights( + actor: &Actor, perm: Permission, resource: Option<&MyHandle> + ) -> Self { + Self::ApiInsufficientRights( + match resource { + Some(res) => { + format!( + "User '{}' does not have permission '{}' \ + on resource '{}'", + actor, perm, res, + ) + }, + None => { + format!( + "User '{}' does not have permission '{}'", + actor, perm, + ) + } + } + ) + } +} + impl Display for ApiAuthError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { diff --git a/src/commons/eventsourcing/cmd.rs b/src/commons/eventsourcing/cmd.rs index bd213a43c..efcd5eaaf 100644 --- a/src/commons/eventsourcing/cmd.rs +++ b/src/commons/eventsourcing/cmd.rs @@ -183,17 +183,11 @@ impl SentCommand { details: C, actor: &Actor, ) -> Self { - let actor_name = if actor.is_user() { - format!("user:{}", actor.name()) - } else { - actor.name().to_string() - }; - SentCommand { handle: id.clone(), version, details, - actor: actor_name, + actor: actor.audit_name(), } } diff --git a/src/commons/eventsourcing/mod.rs b/src/commons/eventsourcing/mod.rs index c3e6d2d3a..f606af8a2 100644 --- a/src/commons/eventsourcing/mod.rs +++ b/src/commons/eventsourcing/mod.rs @@ -41,7 +41,6 @@ mod tests { use crate::{ commons::{ - actor::Actor, api::{CommandHistoryCriteria, CommandSummary}, }, constants::ACTOR_DEF_TEST, @@ -74,11 +73,10 @@ mod tests { impl PersonInitCommand { fn make(id: &MyHandle, name: String) -> Self { - let actor = Actor::actor_from_def(ACTOR_DEF_TEST); PersonInitCommand::new( id, PersonInitCommandDetails { name }, - &actor, + &ACTOR_DEF_TEST, ) } } @@ -194,12 +192,11 @@ mod tests { impl PersonCommand { pub fn go_around_sun(id: &MyHandle, version: Option) -> Self { - let actor = Actor::actor_from_def(ACTOR_DEF_TEST); Self::new( id, version, PersonCommandDetails::GoAroundTheSun, - &actor, + &ACTOR_DEF_TEST, ) } @@ -209,8 +206,7 @@ mod tests { s: &str, ) -> Self { let details = PersonCommandDetails::ChangeName(s.to_string()); - let actor = Actor::actor_from_def(ACTOR_DEF_TEST); - Self::new(id, version, details, &actor) + Self::new(id, version, details, &ACTOR_DEF_TEST) } } diff --git a/src/constants.rs b/src/constants.rs index 4b4f6ec71..b7f721c74 100644 --- a/src/constants.rs +++ b/src/constants.rs @@ -1,9 +1,6 @@ use kvx::Namespace; - -use crate::{ - commons::{actor::ActorDef, eventsourcing::namespace}, - daemon::auth::common::NoResourceType, -}; +use crate::commons::actor::Actor; +use crate::commons::eventsourcing::namespace; pub const KRILL_VERSION: &str = env!("CARGO_PKG_VERSION"); pub const KRILL_VERSION_MAJOR: &str = env!("CARGO_PKG_VERSION_MAJOR"); @@ -103,18 +100,14 @@ pub const HTTP_CLIENT_TIMEOUT_SECS: u64 = 120; pub const HTTP_USER_AGENT_TRUNCATE: usize = 256; // Will truncate received user-agent values at this size. pub const OPENID_CONNECT_HTTP_CLIENT_TIMEOUT_SECS: u64 = 30; -pub const NO_RESOURCE: NoResourceType = NoResourceType; - -pub const ACTOR_DEF_KRILL: ActorDef = ActorDef::system("krill", "admin"); -pub const ACTOR_DEF_KRILLTA: ActorDef = ActorDef::system("krillta", "admin"); -pub const ACTOR_DEF_ANON: ActorDef = ActorDef::anonymous(); -pub const ACTOR_DEF_ADMIN_TOKEN: ActorDef = - ActorDef::system("admin-token", "admin"); -pub const ACTOR_DEF_TESTBED: ActorDef = - ActorDef::system("testbed", "testbed"); +pub const ACTOR_DEF_KRILL: Actor = Actor::system("krill"); +pub const ACTOR_DEF_KRILLTA: Actor = Actor::system("krillta"); +pub const ACTOR_DEF_ANON: Actor = Actor::anonymous(); +pub const ACTOR_DEF_ADMIN_TOKEN: Actor = Actor::system("admin-token"); +pub const ACTOR_DEF_TESTBED: Actor = Actor::system("testbed"); #[cfg(test)] -pub const ACTOR_DEF_TEST: ActorDef = ActorDef::system("test", "admin"); +pub const ACTOR_DEF_TEST: Actor = Actor::system("test"); // Note: These must match the values used by Lagosta. #[cfg(feature = "multi-user")] diff --git a/src/daemon/auth/authorizer.rs b/src/daemon/auth/authorizer.rs index 976b0f42d..9f443de8d 100644 --- a/src/daemon/auth/authorizer.rs +++ b/src/daemon/auth/authorizer.rs @@ -1,38 +1,26 @@ //! Authorization for the API -use std::{any::Any, collections::HashMap, fmt, str::FromStr, sync::Arc}; - -use rpki::ca::idexchange::{InvalidHandle, MyHandle}; - -use crate::{ - commons::{ - actor::{Actor, ActorDef}, - api::Token, - error::Error, - KrillResult, - }, - constants::{ACTOR_DEF_ANON, NO_RESOURCE}, - daemon::{ - auth::{ - common::permissions::Permission, policy::AuthPolicy, - providers::AdminTokenAuthProvider, - }, - config::Config, - http::{HttpResponse, HyperRequest}, - }, -}; - +use std::sync::Arc; +use rpki::ca::idexchange::MyHandle; +use serde::Serialize; +use crate::commons::KrillResult; +use crate::commons::actor::Actor; +use crate::commons::api::Token; +use crate::commons::error::ApiAuthError; +use crate::daemon::config::{AuthType, Config}; +use crate::daemon::http::{HttpResponse, HyperRequest}; +use super::{Permission, Role}; +use super::providers::admin_token; #[cfg(feature = "multi-user")] -use crate::daemon::auth::providers::{ - ConfigFileAuthProvider, OpenIDConnectAuthProvider, -}; +use super::providers::{config_file, openid_connect}; -//------------ Authorizer ---------------------------------------------------- + +//------------ AuthProvider -------------------------------------------------- /// An AuthProvider authenticates and authorizes a given token. /// /// An AuthProvider is expected to configure itself using the global Krill -/// [`CONFIG`] object. This avoids propagation of potentially many provider +/// from configuration. This avoids propagation of potentially many provider /// specific configuration values from the calling code to the provider /// implementation. /// @@ -43,41 +31,53 @@ use crate::daemon::auth::providers::{ /// * discovery - as an interactive client where should I send my users /// to login and logout? /// * introspection - who is the currently "logged in" user? -pub enum AuthProvider { - Token(AdminTokenAuthProvider), +/// +/// This type is a wrapper around the available backend specific auth +/// providers that can be found in the [super::providers] module. +enum AuthProvider { + Token(admin_token::AuthProvider), #[cfg(feature = "multi-user")] - ConfigFile(ConfigFileAuthProvider), + ConfigFile(config_file::AuthProvider), #[cfg(feature = "multi-user")] - OpenIdConnect(OpenIDConnectAuthProvider), + OpenIdConnect(openid_connect::AuthProvider), } -impl From for AuthProvider { - fn from(provider: AdminTokenAuthProvider) -> Self { +impl From for AuthProvider { + fn from(provider: admin_token::AuthProvider) -> Self { AuthProvider::Token(provider) } } #[cfg(feature = "multi-user")] -impl From for AuthProvider { - fn from(provider: ConfigFileAuthProvider) -> Self { +impl From for AuthProvider { + fn from(provider: config_file::AuthProvider) -> Self { AuthProvider::ConfigFile(provider) } } #[cfg(feature = "multi-user")] -impl From for AuthProvider { - fn from(provider: OpenIDConnectAuthProvider) -> Self { +impl From for AuthProvider { + fn from(provider: openid_connect::AuthProvider) -> Self { AuthProvider::OpenIdConnect(provider) } } impl AuthProvider { + /// Authenticates a user from information included in an HTTP request. + /// + /// Returns `Ok(None)` to indicate that no authentication information + /// was present in the request and the request should thus be treated + /// as not anonymous. + /// + /// If authentication succeeded, returns the auth info. If it failed, + /// it either returns an auth info created via [`AuthInfo::error`] or + /// just a plain error which the caller needs to convert. pub async fn authenticate( &self, request: &HyperRequest, - ) -> KrillResult> { + ) -> Result, ApiAuthError> { match &self { AuthProvider::Token(provider) => provider.authenticate(request), #[cfg(feature = "multi-user")] @@ -91,6 +91,7 @@ impl AuthProvider { } } + /// Returns an HTTP text response with the login URL. pub async fn get_login_url(&self) -> KrillResult { match &self { AuthProvider::Token(provider) => provider.get_login_url(), @@ -103,6 +104,7 @@ impl AuthProvider { } } + /// Establishes a client session from credentials in an HTTP request. pub async fn login( &self, request: &HyperRequest, @@ -118,6 +120,7 @@ impl AuthProvider { } } + /// Returns an HTTP text response with the logout URL. pub async fn logout( &self, request: &HyperRequest, @@ -132,261 +135,319 @@ impl AuthProvider { } } } + + /// Sweeps out client session information. + /// + /// This method should be called regularly to remove expired sessions + /// from the cache. + pub fn sweep(&self) -> KrillResult<()> { + match self { + AuthProvider::Token(_) => Ok(()), + #[cfg(feature = "multi-user")] + AuthProvider::ConfigFile(provider) => provider.sweep(), + #[cfg(feature = "multi-user")] + AuthProvider::OpenIdConnect(provider) => provider.sweep(), + } + } + + /// Returns the size of the login session cache. + pub fn login_session_cache_size(&self) -> usize { + match self { + AuthProvider::Token(_) => 0, + #[cfg(feature = "multi-user")] + AuthProvider::ConfigFile(provider) => provider.cache_size(), + #[cfg(feature = "multi-user")] + AuthProvider::OpenIdConnect(provider) => provider.cache_size(), + } + } } -/// This type is responsible for checking authorizations when the API is -/// accessed. + +//------------ Authorizer ---------------------------------------------------- + +/// Checks authorizations when the API is accessed. pub struct Authorizer { + /// The auth provider configured by the user. primary_provider: AuthProvider, - legacy_provider: Option, - policy: AuthPolicy, - private_attributes: Vec, + + /// A fallback token auth provider when it isn’t the primary provider. + /// + /// This is necessary to support the command line client which only + /// supports admin token authentication. + legacy_provider: Option, } impl Authorizer { /// Creates an instance of the Authorizer. /// - /// The given [AuthProvider] will be used to verify API access requests, - /// to handle direct login attempts (if supported) and to determine - /// the URLs to pass on to clients (e.g. Lagosta) that want to know - /// where to direct end-users to login and logout. - /// - /// # Legacy support for krillc - /// - /// As krillc only supports [AdminTokenAuthProvider] based authentication, - /// if `P` an instance of some other provider, an instance of - /// [AdminTokenAuthProvider] will also be created. This will be used as a - /// fallback when Lagosta is configured to use some other [AuthProvider]. + /// The authorizer will be created according to information provided via + /// `config`. pub fn new( config: Arc, - primary_provider: AuthProvider, ) -> KrillResult { - let value_any = &primary_provider as &dyn Any; - let is_admin_token_provider = - value_any.downcast_ref::().is_some(); - - let legacy_provider = if is_admin_token_provider { - // the configured provider is the admin token provider so no - // admin token provider is needed for backward compatibility - None - } else { - // the configured provider is not the admin token provider so we - // also need an instance of the admin token provider in order to - // provider backward compatibility for krillc and other API - // clients that only understand the original, legacy, - // admin token based authentication. - Some(AdminTokenAuthProvider::new(config.clone())) + let (primary_provider, legacy_provider) = match config.auth_type { + AuthType::AdminToken => { + (admin_token::AuthProvider::new(config).into(), None) + } + #[cfg(feature = "multi-user")] + AuthType::ConfigFile => { + ( + config_file::AuthProvider::new(&config)?.into(), + Some(admin_token::AuthProvider::new(config)) + ) + } + #[cfg(feature = "multi-user")] + AuthType::OpenIDConnect => { + ( + openid_connect::AuthProvider::new(config.clone())?.into(), + Some(admin_token::AuthProvider::new(config)) + ) + } }; - #[cfg(feature = "multi-user")] - let private_attributes = config.auth_private_attributes.clone(); - #[cfg(not(feature = "multi-user"))] - let private_attributes = vec!["role".to_string()]; - Ok(Authorizer { primary_provider, legacy_provider, - policy: AuthPolicy::new(config)?, - private_attributes, }) } - pub async fn actor_from_request(&self, request: &HyperRequest) -> Actor { + /// Authenticates an HTTP request. + /// + /// The method will always return authentication information. + /// + /// If there was no authentiation information in the request, the returned + /// auth info will indicate an anonymous user which will fail all + /// permission checks with “insufficient permissions.” + /// + /// If authentication failed, the returned auth info will also indicate + /// an anonymous user but it will fail permission checks with appropriate + /// error information. + pub async fn authenticate_request( + &self, request: &HyperRequest + ) -> AuthInfo { trace!("Determining actor for request {:?}", &request); - // Try the legacy provider first, if any - let mut authenticate_res = match &self.legacy_provider { + // Try the legacy provider first, if any. + let authenticate_res = match &self.legacy_provider { Some(provider) => provider.authenticate(request), None => Ok(None), }; // Try the real provider if we did not already successfully - // authenticate - authenticate_res = match authenticate_res { + // authenticate. This ignores any possible errors thrown by the + // legacy provider. + let authenticate_res = match authenticate_res { Ok(Some(res)) => Ok(Some(res)), _ => self.primary_provider.authenticate(request).await, }; // Create an actor based on the authentication result - let actor = match authenticate_res { + let res = match authenticate_res { // authentication success - Ok(Some(actor_def)) => self.actor_from_def(actor_def), + Ok(Some(res)) => res, // authentication failure - Ok(None) => self.actor_from_def(ACTOR_DEF_ANON), + Ok(None) => AuthInfo::anonymous(), // error during authentication - Err(err) => { - // receives a commons::error::Error, but we need an - // ApiAuthError - self.actor_from_def(ACTOR_DEF_ANON.with_auth_error(err)) - } + Err(err) => AuthInfo::error(err), }; - trace!("Actor determination result: {:?}", &actor); - - actor - } + trace!("Actor determination result: {:?}", res); - pub fn actor_from_def(&self, def: ActorDef) -> Actor { - Actor::new(def, self.policy.clone()) + res } - /// Return the URL at which an end-user should be directed to login with - /// the configured provider. + /// Returns an HTTP text response with the login URL. pub async fn get_login_url(&self) -> KrillResult { self.primary_provider.get_login_url().await } - /// Submit credentials directly to the configured provider to establish a - /// login session, if supported by the configured provider. + /// Establishes a client session from credentials in an HTTP request. pub async fn login( - &self, - request: &HyperRequest, + &self, request: &HyperRequest ) -> KrillResult { let user = self.primary_provider.login(request).await?; - // The user has passed authentication, but may still not be - // authorized to login as that requires a check against the policy - // which cannot be done by the AuthProvider. Check that now. - let actor_def = - ActorDef::user(user.id.clone(), user.attributes.clone(), None); - let actor = self.actor_from_def(actor_def); - if !actor.is_allowed(Permission::LOGIN, NO_RESOURCE)? { - let reason = format!("Login denied for user '{}': User is not permitted to 'LOGIN'", user.id); - warn!("{}", reason); - return Err(Error::ApiInsufficientRights(reason)); - } - - // Exclude private attributes before passing them to Lagosta to be - // shown in the web UI. - let visible_attributes = user - .attributes - .clone() - .into_iter() - .filter(|(k, _)| !self.private_attributes.contains(k)) - .collect::>(); - - let filtered_user = LoggedInUser { - token: user.token, - id: user.id, - attributes: visible_attributes, - }; - if log_enabled!(log::Level::Trace) { - trace!("User logged in: {:?}", &filtered_user); + trace!("User logged in: {:?}", &user); } else { - info!("User logged in: {}", &filtered_user.id); + info!("User logged in: {}, role: {}", user.id(), user.role()); } - Ok(filtered_user) + Ok(user) } - /// Return the URL at which an end-user should be directed to logout with - /// the configured provider. + /// Returns an HTTP text response with the logout URL. pub async fn logout( &self, request: &HyperRequest, ) -> KrillResult { self.primary_provider.logout(request).await } + + /// Sweeps out session information. + /// + /// This method should be called regularly to remove expired sessions + /// from the cache. + pub fn sweep(&self) -> KrillResult<()> { + self.primary_provider.sweep() + } + + /// Returns the size of the login session cache. + pub fn login_session_cache_size(&self) -> usize { + self.primary_provider.login_session_cache_size() + } } + +//------------ LoggedInUser -------------------------------------------------- + +/// Information to be returned to the caller after login. +/// +/// This may be serialized into a JSON response. #[derive(Serialize, Debug)] pub struct LoggedInUser { - pub token: Token, - pub id: String, - pub attributes: HashMap, + /// The API token to use in subsequent calls. + token: Token, + + /// The user ID. + id: Arc, + + /// The user attributes. + /// + /// This used to be a hash map with values decided upon by the auth + /// provider but we now only and always have a role attribute. However, + /// in order to serialize into the JSON expected by the UI, this still + /// needs to be a struct. + attributes: LoggedInUserAttributes, } -#[derive(Clone, Debug)] -pub enum Auth { - Bearer(Token), - AuthorizationCode { - code: Token, - state: String, - nonce: String, - csrf_token_hash: String, - }, - UsernameAndPassword { - username: String, - password: String, - }, +#[derive(Serialize, Debug)] +pub struct LoggedInUserAttributes { + role: Arc, } -impl Auth { - pub fn bearer(token: Token) -> Self { - Auth::Bearer(token) - } - pub fn authorization_code( - code: Token, - state: String, - nonce: String, - csrf_token_hash: String, - ) -> Self { - Auth::AuthorizationCode { - code, - state, - nonce, - csrf_token_hash, +impl LoggedInUser { + pub fn new(token: Token, id: Arc, role: Arc) -> Self { + LoggedInUser { + token, + id, + attributes: LoggedInUserAttributes { role } } } - pub fn username_and_password_hash( - username: String, - password: String, - ) -> Self { - Auth::UsernameAndPassword { username, password } + pub fn token(&self) -> &Token { + &self.token } -} -//------------ Handle -------------------------------------------------------- + pub fn id(&self) -> &str { + &self.id + } -/// Handle for Authorization purposes. -// This type is a wrapper so the we can implement the PolarClass trait which -// is required when multi-user is enabled. We always need to pass the handle -// into the authorization macro, even if multi-user is not enabled. So we need -// this type even then. -#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq)] -pub struct Handle(MyHandle); + pub fn role(&self) -> &str { + self.attributes.role.as_ref() + } -impl fmt::Display for Handle { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) + pub fn attributes(&self) -> &impl Serialize { + &self.attributes } } -impl From<&MyHandle> for Handle { - fn from(h: &MyHandle) -> Self { - Handle(h.clone()) - } + +//------------ AuthInfo ------------------------------------------------------ + +/// Information about the result of trying to authenticate a request. +#[derive(Clone, Debug)] +pub struct AuthInfo { + /// The actor for the authenticated user. + actor: Actor, + + /// Optional updated bearer token. + new_token: Option, + + /// Access permissions. + /// + /// This is either a role which we consult to determine access + /// permissions or an authentication error to return instead. + permissions: Result, ApiAuthError>, } -impl FromStr for Handle { - type Err = InvalidHandle; +impl AuthInfo { + /// Creates auth info for the given user ID and role. + pub fn user( + user_id: impl Into>, + role: Arc, + ) -> Self { + Self { + actor: Actor::user(user_id), + new_token: None, + permissions: Ok(role), + } + } - fn from_str(s: &str) -> Result { - MyHandle::from_str(s).map(Handle) + /// Creates auth info for the testbed actor. + pub fn testbed() -> Self { + Self::user("testbed", Role::testbed().into()) } -} -impl AsRef for Handle { - fn as_ref(&self) -> &MyHandle { - &self.0 + /// Creates auth info for the anonymous actor. + /// + /// This actor fails all permission checks with insufficient permissions. + fn anonymous() -> Self { + Self { + actor: Actor::anonymous(), + new_token: None, + permissions: Ok(Role::anonymous().into()), + } } -} -#[cfg(feature = "multi-user")] -impl oso::PolarClass for Handle { - fn get_polar_class() -> oso::Class { - Self::get_polar_class_builder() - .set_constructor(|name: String| Handle::from_str(&name).unwrap()) - .set_equality_check(|left: &Handle, right: &Handle| left == right) - .add_attribute_getter("name", |instance| instance.to_string()) - .build() + /// Creates auth info for an authentication failure. + fn error(err: ApiAuthError) -> Self { + Self { + actor: Actor::anonymous(), + new_token: None, + permissions: Err(err) + } + } + + /// Sets the updated bearer token. + /// + /// If set, this new token needs to be included in an HTTP response. + pub fn set_new_token(&mut self, new_token: Token) { + self.new_token = Some(new_token); + } + + /// Takes out an updated bearer token if presnet + pub fn take_new_token(&mut self) -> Option { + self.new_token.take() } - fn get_polar_class_builder() -> oso::ClassBuilder { - oso::Class::builder() + /// Returns a reference to the actor. + pub fn actor(&self) -> &Actor { + &self.actor + } + + /// Checks permissions for an operation. + /// + /// Returns an authentication error if either the request was not + /// authenticated or it was but the authenticated user does not have + /// sufficient permissions. + pub fn check_permission( + &self, + permission: Permission, + resource: Option<&MyHandle> + ) -> Result<(), ApiAuthError> { + if self.permissions.as_ref().map_err(Clone::clone)? + .is_allowed(permission, resource) + { + Ok(()) + } + else { + Err(ApiAuthError::insufficient_rights( + &self.actor, permission, resource + )) + } } } + diff --git a/src/daemon/auth/common/mod.rs b/src/daemon/auth/common/mod.rs deleted file mode 100644 index ff8a7f68c..000000000 --- a/src/daemon/auth/common/mod.rs +++ /dev/null @@ -1,15 +0,0 @@ -#[cfg(feature = "multi-user")] -pub mod crypt; - -pub mod permissions; - -#[derive(Debug, Clone)] -pub struct NoResourceType; -impl std::fmt::Display for NoResourceType { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "") - } -} - -#[cfg(feature = "multi-user")] -pub mod session; diff --git a/src/daemon/auth/common/permissions.rs b/src/daemon/auth/common/permissions.rs deleted file mode 100644 index 7c5a4224c..000000000 --- a/src/daemon/auth/common/permissions.rs +++ /dev/null @@ -1,81 +0,0 @@ -// Based on https://github.com/rust-lang/rfcs/issues/284#issuecomment-277871931 -// Use a macro to build the Permission enum so that we can iterate over the -// enum variants when adding them as Polar constants in struct AuthPolicy. -// This ensures that we don't accidentally miss one. We can also implement the -// Display trait that we need Actor::is_allowed() and the FromStr trait and -// avoid labour intensive and error prone duplication of the enum variants -// that would be needed when implementing the traits manually. -macro_rules! iterable_enum { - ($name:ident { $($variant:ident),* }) => ( - #[allow(non_camel_case_types)] - #[derive(Clone, Debug, Eq, PartialEq)] - pub enum $name { $($variant),* } - - impl std::fmt::Display for $name { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - $( Self::$variant => write!(f, stringify!($variant)) ),+ - } - } - } - - impl std::str::FromStr for $name { - type Err = String; - - fn from_str(input: &str) -> Result { - match input { - $( stringify!($variant) => { Ok($name::$variant) } - ),+ - _ => Err(format!("Unknown {} '{}'", stringify!($name), input)) - } - } - } - - impl $name { - pub fn iter() -> Iter { - Iter(None) - } - } - - pub struct Iter(Option<$name>); - - impl Iterator for Iter { - type Item = $name; - - fn next(&mut self) -> Option { - match self.0 { - None => $( { self.0 = Some($name::$variant); Some($name::$variant) }, - Some($name::$variant) => )* None, - } - } - } - ); -} - -iterable_enum! { - Permission { - LOGIN, - PUB_ADMIN, - PUB_LIST, - PUB_READ, - PUB_CREATE, - PUB_DELETE, - CA_LIST, - CA_READ, - CA_CREATE, - CA_UPDATE, - CA_ADMIN, - CA_DELETE, - ROUTES_READ, - ROUTES_UPDATE, - ROUTES_ANALYSIS, - ASPAS_READ, - ASPAS_UPDATE, - ASPAS_ANALYSIS, - BGPSEC_READ, - BGPSEC_UPDATE, - RTA_LIST, - RTA_READ, - RTA_UPDATE - } -} diff --git a/src/daemon/auth/common/crypt.rs b/src/daemon/auth/crypt.rs similarity index 89% rename from src/daemon/auth/common/crypt.rs rename to src/daemon/auth/crypt.rs index b3bd7a0e5..3785fdba2 100644 --- a/src/daemon/auth/common/crypt.rs +++ b/src/daemon/auth/crypt.rs @@ -22,13 +22,11 @@ // 4: https://github.com/NLnetLabs/krill/issues/382 use std::sync::atomic::{AtomicU64, Ordering}; - use kvx::{namespace, segment, Key, Namespace, Segment}; - -use crate::{ - commons::{error::Error, util::ext_serde, KrillResult}, - daemon::config::Config, -}; +use crate::commons::KrillResult; +use crate::commons::error::{ApiAuthError, Error}; +use crate::commons::util::ext_serde; +use crate::daemon::config::Config; const CHACHA20_KEY_BIT_LEN: usize = 256; const CHACHA20_KEY_BYTE_LEN: usize = CHACHA20_KEY_BIT_LEN / 8; @@ -100,17 +98,6 @@ impl CryptState { nonce: NonceState::new()?, }) } - - pub fn from_key_vec(key_vec: Vec) -> KrillResult { - let boxed_array: Box<[u8; CHACHA20_KEY_BYTE_LEN]> = - key_vec.into_boxed_slice().try_into().map_err(|_| { - Error::custom( - "Unable to process session encryption key".to_string(), - ) - })?; - - Self::from_key_bytes(*boxed_array) - } } // Returns nonce + tag + cipher text, or an error. @@ -145,11 +132,13 @@ pub(crate) fn encrypt( // `payload` should be of the form nonce + tag + cipher text. // Returns the plain text resulting from decryption, or an error. -pub(crate) fn decrypt(key: &[u8], payload: &[u8]) -> KrillResult> { +pub(crate) fn decrypt( + key: &[u8], payload: &[u8] +) -> Result, ApiAuthError> { // TODO: Do we need to get the cipher each time or could we do this just // once? if payload.len() <= CLEARTEXT_PREFIX_LEN { - return Err(Error::Custom( + return Err(ApiAuthError::ApiInvalidCredentials( "Decryption error: Insufficient data".to_string(), )); } @@ -167,7 +156,11 @@ pub(crate) fn decrypt(key: &[u8], payload: &[u8]) -> KrillResult> { cipher_text, tag, ) - .map_err(|err| Error::Custom(format!("Decryption error: {}", &err))) + .map_err(|err| { + ApiAuthError::ApiInvalidCredentials( + format!("Decryption error: {}", &err) + ) + }) } pub(crate) fn crypt_init(config: &Config) -> KrillResult { diff --git a/src/daemon/auth/mod.rs b/src/daemon/auth/mod.rs index c7efec63a..40316795c 100644 --- a/src/daemon/auth/mod.rs +++ b/src/daemon/auth/mod.rs @@ -1,23 +1,14 @@ -pub mod authorizer; -pub mod providers; -pub mod common; -#[cfg(feature = "multi-user")] -pub mod policy; -#[cfg(not(feature = "multi-user"))] -pub mod policy { - use std::sync::Arc; +pub use self::authorizer::{AuthInfo, Authorizer, LoggedInUser}; +pub use self::permission::{Permission, PermissionSet}; +pub use self::roles::{Role, RoleMap}; - use crate::{commons::KrillResult, daemon::config::Config}; +pub mod providers; - #[derive(Clone)] - pub struct AuthPolicy {} - impl AuthPolicy { - pub fn new(_: Arc) -> KrillResult { - Ok(AuthPolicy {}) - } - } -} +mod authorizer; +#[cfg(feature = "multi-user")] mod crypt; +mod permission; +mod roles; +#[cfg(feature = "multi-user")] mod session; -pub use authorizer::{Auth, AuthProvider, Authorizer, Handle, LoggedInUser}; diff --git a/src/daemon/auth/permission.rs b/src/daemon/auth/permission.rs new file mode 100644 index 000000000..0140ff745 --- /dev/null +++ b/src/daemon/auth/permission.rs @@ -0,0 +1,257 @@ +//! Permissions and permission sets. +//! +//! This is a private module. Its public items are re-exported by the parent. + +use std::{fmt, str}; +use std::str::FromStr; +use serde::{Deserialize, Serialize}; + + +//------------ Permission ---------------------------------------------------- + +macro_rules! define_permission { + ( $( ($variant:ident, $text:expr), )* ) => { + /// The set of available permissions. + /// + /// Each API request requires for the actor to have exactly one of these + /// permissions. + #[derive(Clone, Copy, Debug, Deserialize, Serialize)] + #[repr(u32)] + pub enum Permission { + $( + #[serde(rename = $text)] + $variant, + )* + } + + impl Permission { + pub fn iter() -> impl Iterator { + ALL_PERMISSIONS.iter().copied() + } + } + + impl str::FromStr for Permission { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + match s { + $( $text => Ok(Self::$variant), )* + _ => Err("unknown permission") + } + } + } + + impl fmt::Display for Permission { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str( + match *self { + $( + Self::$variant => ($text), + )* + } + ) + } + } + + const ALL_PERMISSIONS: &'static[Permission] = &[ + $( Permission::$variant, )* + ]; + } +} + +define_permission! { + (Login, "login"), + (PubAdmin, "pub-admin"), + (PubList, "pub-list"), + (PubRead, "pub-read"), + (PubCreate, "pub-create"), + (PubDelete, "pub-delete"), + (CaList, "ca-list"), + (CaRead, "ca-read"), + (CaCreate, "ca-create"), + (CaUpdate, "ca-update"), + (CaAdmin, "ca-admin"), + (CaDelete, "ca-delete"), + (RoutesRead, "routes-read"), + (RoutesUpdate, "routes-update"), + (RoutesAnalysis, "routes-analysis"), + (AspasRead, "aspas-read"), + (AspasUpdate, "aspas-update"), + (BgpsecRead, "bgpsec-read"), + (BgpsecUpdate, "bgpsec-update"), + (RtaList, "rta-list"), + (RtaRead, "rta-read"), + (RtaUpdate, "rta-update"), +} + + +//------------ ConfPermission ------------------------------------------------ + +/// A named permission as given in the config file. +/// +/// This includes all the permissions themselves plus the three “glob” +/// permissions `"list"`, `"read"`, `"create"`, `"delete"`, and `"admin"` +/// which include all the respective permissions for all components. +#[derive(Clone, Copy, Debug, Deserialize)] +#[serde(try_from = "String")] +pub enum ConfPermission { + Single(Permission), + Any, + Read, + Update, +} + +impl ConfPermission { + fn add(self, set: PermissionSet) -> PermissionSet { + let self_set = match self { + Self::Single(perm) => { + return set.add(perm) + } + Self::Any => PermissionSet::ANY, + Self::Read => PermissionSet::CONF_READ, + Self::Update => PermissionSet::CONF_UPDATE, + }; + set.add_set(self_set) + } +} + +impl TryFrom for ConfPermission { + type Error = String; + + fn try_from(src: String) -> Result { + if let Ok(res) = Permission::from_str(&src) { + return Ok(Self::Single(res)) + } + + match src.as_str() { + "any" => Ok(Self::Any), + "read" => Ok(Self::Read), + "update" => Ok(Self::Update), + _ => Err(format!("unknown permission {src}")) + } + } +} + + +//------------ PermissionSet ------------------------------------------------- + +/// A set of permissions. +#[derive(Clone, Copy, Debug, Default, Deserialize, Eq, PartialEq)] +#[serde(from = "Vec")] +pub struct PermissionSet(u32); + +impl PermissionSet { + + const fn mask(permission: Permission) -> u32 { + 1u32 << (permission as u32) + } + + pub const fn add(self, permission: Permission) -> Self { + Self(self.0 | Self::mask(permission)) + } + + pub const fn add_set(self, other: PermissionSet) -> Self { + Self(self.0 | other.0) + } + + pub const fn remove(self, permission: Permission) -> Self { + Self(self.0 & !Self::mask(permission)) + } + + pub fn has(self, permission: Permission) -> bool { + self.0 & Self::mask(permission) != 0 + } + + pub fn iter(self) -> impl Iterator { + Permission::iter().filter(move |perm| self.has(*perm)) + } + + const fn from_permissions(mut slice: &[Permission]) -> Self { + let mut res = PermissionSet(0); + while let Some((head, tail)) = slice.split_first() { + res = res.add(*head); + slice = tail; + } + res + } +} + +impl From> for PermissionSet { + fn from(src: Vec) -> Self { + let mut res = Self(0); + for item in src { + res = item.add(res) + } + res + } +} + + +mod policy { + use super::PermissionSet; + use super::Permission::*; + + impl PermissionSet { + pub const ANY: Self = Self(u32::MAX); + + pub const NONE: Self = Self(0); + + pub const READONLY: Self = Self::from_permissions(&[ + Login, + CaList, + CaRead, + PubList, + PubRead, + RoutesRead, + RoutesAnalysis, + AspasRead, + BgpsecRead, + RtaList, + RtaRead + ]); + + pub const READWRITE: Self = Self::from_permissions(&[ + Login, + CaList, + CaRead, + CaCreate, + CaUpdate, + PubList, + PubRead, + PubCreate, + PubDelete, + RoutesRead, + RoutesAnalysis, + RoutesUpdate, + AspasRead, + AspasUpdate, + BgpsecRead, + BgpsecUpdate, + RtaList, + RtaRead, + RtaUpdate + ]); + + pub const TESTBED: Self = Self::from_permissions(&[ + CaRead, + CaUpdate, + PubRead, + PubCreate, + PubDelete, + PubAdmin + ]); + + pub const CONF_READ: Self = Self::from_permissions(&[ + CaRead, + RoutesRead, RoutesAnalysis, + AspasRead, + BgpsecRead, + RtaRead, + ]); + + pub const CONF_UPDATE: Self = Self::from_permissions(&[ + RoutesUpdate, BgpsecUpdate, RtaUpdate, + ]); + } +} + diff --git a/src/daemon/auth/policy.rs b/src/daemon/auth/policy.rs deleted file mode 100644 index 28e5438f3..000000000 --- a/src/daemon/auth/policy.rs +++ /dev/null @@ -1,299 +0,0 @@ -use std::{io::Read, str::FromStr, sync::Arc}; - -use oso::{Oso, PolarClass, PolarValue, ToPolar}; - -use crate::{ - commons::{ - actor::Actor, - error::{Error, KrillIoError}, - KrillResult, - }, - constants::{ - ACTOR_DEF_ADMIN_TOKEN, ACTOR_DEF_ANON, ACTOR_DEF_KRILL, - ACTOR_DEF_TESTBED, - }, - daemon::{ - auth::{ - common::{permissions::Permission, NoResourceType}, - Handle, - }, - config::Config, - }, -}; - -#[derive(Clone)] -pub struct AuthPolicy { - oso: Arc, -} - -impl std::ops::Deref for AuthPolicy { - type Target = Arc; - - fn deref(&self) -> &Self::Target { - &self.oso - } -} - -impl AuthPolicy { - pub fn new(config: Arc) -> KrillResult { - let mut oso = Oso::new(); - oso.register_class(Actor::get_polar_class()).unwrap(); - oso.register_class(Handle::get_polar_class()).unwrap(); - - // Register both the Permission enum as a Polar class and its variants - // as Polar constants. The former is useful for writing Polar - // rules that only match on actual Krill Permissions, not on arbitrary - // strings, e.g. `allow(actor, action: Permission, resource)`. - // The latter is useful when writing rules that depend on a - // specific permission, e.g. `if action = CA_READ`. Without the - // variants as constants we would have to create a - // new Permission each time, converting from a string to the - // Permission type, e.g. `action = new Permission("CA_READ")`. - oso.register_class(Permission::get_polar_class()).unwrap(); - for permission in Permission::iter() { - let name = format!("{}", permission); - oso.register_constant(permission, &name).unwrap(); - } - - // Load built-in Polar authorization policy rules from embedded - // strings - Self::load_internal_policy( - &mut oso, - include_bytes!("../../../defaults/roles.polar"), - "roles", - )?; - Self::load_internal_policy( - &mut oso, - include_bytes!("../../../defaults/rules.polar"), - "rules", - )?; - Self::load_internal_policy( - &mut oso, - include_bytes!("../../../defaults/aliases.polar"), - "aliases", - )?; - Self::load_internal_policy( - &mut oso, - include_bytes!("../../../defaults/rbac.polar"), - "rbac", - )?; - Self::load_internal_policy( - &mut oso, - include_bytes!("../../../defaults/abac.polar"), - "abac", - )?; - - // Load additional policy rules from files optionally provided by the - // customer - Self::load_user_policy(config, &mut oso)?; - - // Sanity check: Verify the roles assigned to the built-in actors are - // as expected. - debug!("Running Polar self checks"); - - // The "krill" built-in actor is used to attribute internal actions by - // Krill that were not directly triggered by a user. This user should - // have the "admin" role. - Self::exec_query( - &mut oso, - r#"actor_has_role(Actor.builtin("krill"), "admin")"#, - )?; - - // The "admin-token" built-in actor is used for logins using the admin - // token (aka the "admin_token" set in the config file or via env - // var). This actor should have the "admin" role. - Self::exec_query( - &mut oso, - r#"actor_has_role(Actor.builtin("admin-token"), "admin")"#, - )?; - - // The built-in test actor "anon" represents a not-logged-in user and - // as such lacks a role. We should be able to test that it the actor - // does not have any role (represented by the _ placeholder in Oso - // Polar syntax). - Self::exec_query( - &mut oso, - r#"not actor_has_role(Actor.builtin("anon"), _)"#, - )?; - - // The built-in test actor "testbed" represents an anonymous user that - // is using the testbed UI/API and is temporarily upgraded with the - // necessary rights to perform the testbed related actions. These - // actions are grouped into a "testbed" role. The "testbed" actor - // should have the "testbed" role. - Self::exec_query( - &mut oso, - r#"actor_has_role(Actor.builtin("testbed"), "testbed")"#, - )?; - - Ok(AuthPolicy { oso: Arc::new(oso) }) - } - - pub fn is_allowed( - &self, - actor: U, - action: A, - resource: R, - ) -> Result - where - U: ToPolar, - A: ToPolar, - R: ToPolar, - { - self.oso.is_allowed(actor, action, resource).map_err(|err| { - Error::custom(format!( - "Internal error while checking access against policy: {}", - err - )) - }) - } - - fn load_internal_policy( - oso: &mut Oso, - bytes: &[u8], - fname: &str, - ) -> KrillResult<()> { - trace!("Loading Polar policy '{}'", fname); - oso.load_str(std::str::from_utf8(bytes).map_err(|err| { - Error::custom(format!( - "Internal Polar policy '{}' is not valid UTF-8: {}", - fname, err - )) - })?) - .map_err(|err| { - Error::custom(format!( - "Internal Polar policy '{}' is not valid Polar syntax: {}", - fname, err - )) - }) - } - - fn exec_query(oso: &mut Oso, query: &str) -> KrillResult<()> { - oso.query(query).map_err(|err| { - Error::custom(format!( - "The Polar self check query '{}' failed: {}", - query, err - )) - })?; - Ok(()) - } - - fn load_user_policy( - config: Arc, - oso: &mut Oso, - ) -> KrillResult<()> { - for policy in config.auth_policies.iter() { - info!( - "Loading user-defined authorization policy file {:?}", - policy - ); - let fname = policy.file_name().unwrap().to_str().unwrap(); - let mut buffer = Vec::new(); - std::fs::File::open(policy.as_path()) - .map_err(|e| { - KrillIoError::new( - format!( - "Could not open policy file '{}'", - policy.to_string_lossy() - ), - e, - ) - })? - .read_to_end(&mut buffer) - .map_err(|e| { - KrillIoError::new( - format!( - "Could not read policy file '{}'", - policy.to_string_lossy() - ), - e, - ) - })?; - AuthPolicy::load_internal_policy(oso, &buffer, fname)?; - } - - Ok(()) - } -} - -// Allow our "no resource" type to match the "nil" in Oso policy rules by -// making it convertible to the Rust type Oso uses when registering the nil -// constant. We can't use Option::::None directly as it doesn't -// implement the Display trait which we depend on in non-trace level logging -// in `fn Actor::is_allowed()`. -// -// Note: for now it is not possible to use 'nil' directly due to https://github.com/osohq/oso/issues/788. Instead you -// have to do something like this: -// -// allow(actor: Actor, action: Permission, _resource: Option) if -// _resource = nil and -// ... -// -// WHen the bug is fixed you should then be able to do this: -// -// allow(actor: Actor, action: Permission, nil) if -// ... -impl ToPolar for NoResourceType { - #[allow(clippy::wrong_self_convention)] - fn to_polar(self) -> oso::PolarValue { - Option::::None.to_polar() - } -} - -impl PolarClass for Actor { - fn get_polar_class() -> oso::Class { - Self::get_polar_class_builder() - .set_constructor(Actor::test_from_details) - .set_equality_check(|left: &Actor, right: &Actor| { - left.name() == right.name() - }) - .add_attribute_getter("name", |instance| { - instance.name().to_string() - }) - .add_class_method("builtin", |name: String| -> Actor { - match name.as_str() { - "anon" => Actor::actor_from_def(ACTOR_DEF_ANON), - "krill" => Actor::actor_from_def(ACTOR_DEF_KRILL), - "admin-token" => { - Actor::actor_from_def(ACTOR_DEF_ADMIN_TOKEN) - } - "testbed" => Actor::actor_from_def(ACTOR_DEF_TESTBED), - _ => panic!("Unknown built-in actor name '{}'", name), - } - }) - // method to do a "contains" test, either get rid of this if the - // Oso Polar "in" operator will suffice or move this - // to a separate Polar Class called Util and name the - // method "contains". - .add_class_method( - "is_in", - |name: String, names: Vec| -> bool { - names.contains(&name) - }, - ) - .add_method("attr", Actor::attribute) - .add_method("attrs", Actor::attributes) - .build() - } - - fn get_polar_class_builder() -> oso::ClassBuilder { - oso::Class::builder() - } -} - -impl PolarClass for Permission { - fn get_polar_class() -> oso::Class { - Self::get_polar_class_builder() - .set_constructor(|perm_name: String| -> Permission { - Permission::from_str(&perm_name).unwrap() - }) - .set_equality_check(|left: &Permission, right: &Permission| { - *left == *right - }) - .build() - } - - fn get_polar_class_builder() -> oso::ClassBuilder { - oso::Class::builder() - } -} diff --git a/src/daemon/auth/providers/admin_token.rs b/src/daemon/auth/providers/admin_token.rs index 02af62091..da5ef7d3e 100644 --- a/src/daemon/auth/providers/admin_token.rs +++ b/src/daemon/auth/providers/admin_token.rs @@ -1,48 +1,71 @@ -use std::sync::Arc; +//! Auth provider using a pre-defined token. +use std::sync::Arc; +use crate::commons::KrillResult; +use crate::commons::api::Token; +use crate::commons::error::{ApiAuthError, Error}; +use crate::commons::util::httpclient; +use crate::daemon::auth::{AuthInfo, LoggedInUser, Role}; +use crate::daemon::config::Config; use crate::daemon::http::{HttpResponse, HyperRequest}; -use crate::{ - commons::{ - actor::ActorDef, api::Token, error::Error, util::httpclient, - KrillResult, - }, - constants::ACTOR_DEF_ADMIN_TOKEN, - daemon::{auth::LoggedInUser, config::Config}, -}; - -// This is NOT an actual relative path to redirect to. Instead it is the path -// string of an entry in the Vue router routes table to "route" to (in the -// Lagosta single page application). See the routes array in router.js of the -// Lagosta source code. Ideally we could instead return a route name and then -// Lagosta could change this path without requiring that we update to match. + + +//------------ Constants ----------------------------------------------------- + +/// The path defined in Krill UI for the login view. const LAGOSTA_LOGIN_ROUTE_PATH: &str = "/login"; -pub struct AdminTokenAuthProvider { + +//------------ AuthProvider -------------------------------------------------- + +/// The admin token auth provider. +/// +/// This auth provider takes a single token from the configuration and +/// only allows requests that carry this token as a bearer token. +/// +/// Currently, this provider is hard-coded to translate this token into +/// a user named “admin” having the admin special role which allows +/// everything everywhere all at once. +pub struct AuthProvider { + /// The configured token to compare with. required_token: Token, + + /// The user name of the actor if authentication succeeds. + user_id: Arc, + + /// The role to use if authentication succeeds. + role: Arc, } -impl AdminTokenAuthProvider { +impl AuthProvider { + /// Creates a new admin token auth provider from the given config. pub fn new(config: Arc) -> Self { - AdminTokenAuthProvider { + AuthProvider { required_token: config.admin_token.clone(), + user_id: "admin-token".into(), + role: Role::admin().into(), } } -} - -impl AdminTokenAuthProvider { + + /// Authenticates a user from information included in an HTTP request. + /// + /// If there request has a bearer token, returns `Ok(Some(_))` if it + /// matches the configured token or `Err(_)` otherwise. If there is no + /// bearer token, returns `Ok(None)`. pub fn authenticate( - &self, - request: &HyperRequest, - ) -> KrillResult> { + &self, request: &HyperRequest, + ) -> Result, ApiAuthError> { if log_enabled!(log::Level::Trace) { trace!("Attempting to authenticate the request.."); } let res = match httpclient::get_bearer_token(request) { Some(token) if token == self.required_token => { - Ok(Some(ACTOR_DEF_ADMIN_TOKEN)) + Ok(Some(AuthInfo::user( + self.user_id.clone(), self.role.clone() + ))) } - Some(_) => Err(Error::ApiInvalidCredentials( + Some(_) => Err(ApiAuthError::ApiInvalidCredentials( "Invalid bearer token".to_string(), )), None => Ok(None), @@ -55,30 +78,33 @@ impl AdminTokenAuthProvider { res } + /// Returns an HTTP text response with the login URL. pub fn get_login_url(&self) -> KrillResult { // Direct Lagosta to show the user the Lagosta API token login form Ok(HttpResponse::text_no_cache(LAGOSTA_LOGIN_ROUTE_PATH.into())) } + /// Establishes a client session from credentials in an HTTP request. pub fn login(&self, request: &HyperRequest) -> KrillResult { match self.authenticate(request)? { - Some(actor_def) => Ok(LoggedInUser { - token: self.required_token.clone(), - id: actor_def.name.as_str().to_string(), - attributes: actor_def.attributes.as_map(), - }), + Some(_actor) => Ok(LoggedInUser::new( + self.required_token.clone(), + self.user_id.as_ref().into(), + "admin".into(), + )), None => Err(Error::ApiInvalidCredentials( "Missing bearer token".to_string(), )), } } + /// Returns an HTTP text response with the logout URL. pub fn logout( &self, request: &HyperRequest, ) -> KrillResult { - if let Ok(Some(actor)) = self.authenticate(request) { - info!("User logged out: {}", actor.name.as_str()); + if let Ok(Some(info)) = self.authenticate(request) { + info!("User logged out: {}", info.actor().name()); } // Logout is complete, direct Lagosta to show the user the Lagosta @@ -86,3 +112,4 @@ impl AdminTokenAuthProvider { Ok(HttpResponse::text_no_cache(b"/".to_vec())) } } + diff --git a/src/daemon/auth/providers/config_file.rs b/src/daemon/auth/providers/config_file.rs new file mode 100644 index 000000000..935a9824b --- /dev/null +++ b/src/daemon/auth/providers/config_file.rs @@ -0,0 +1,386 @@ +//! Auth provider using user information from the configuration. + +use std::collections::HashMap; +use std::sync::Arc; +use base64::engine::general_purpose::STANDARD as BASE64_ENGINE; +use base64::engine::Engine as _; +use unicode_normalization::UnicodeNormalization; +use crate::commons::KrillResult; +use crate::commons::api::Token; +use crate::commons::error::{ApiAuthError, Error}; +use crate::commons::util::httpclient; +use crate::constants::{PW_HASH_LOG_N, PW_HASH_P, PW_HASH_R}; +use crate::daemon::auth::crypt; +use crate::daemon::auth::{AuthInfo, LoggedInUser, Permission, RoleMap}; +use crate::daemon::auth::session::{ClientSession, LoginSessionCache}; +use crate::daemon::config::Config; +use crate::daemon::http::{HttpResponse, HyperRequest}; + + +//------------ Constants ----------------------------------------------------- + +/// The location of the login page in Krill UI. +const UI_LOGIN_ROUTE_PATH: &str = "/login?withId=true"; + +/// A password hash used to prolong operation when a user doesn’t exist. +const FAKE_PASSWORD_HASH: &str = "66616B652070617373776F72642068617368"; + +/// A salt value used to prolong operation when a user doesn’t exist. +const FAKE_SALT: &str = "66616B652073616C74"; + + +//------------ AuthProvider -------------------------------------------------- + +/// The config file auth provider. +/// +/// This auth provider uses user and role information provided via the Krill +/// config and authenticates requests using HTTP Basic Authorization headers. +pub struct AuthProvider { + /// The user directory. + users: HashMap, + + /// The role directory. + roles: Arc, + + /// The session key for encrypting client session information. + session_key: crypt::CryptState, + + /// The client session cache. + session_cache: SessionCache, +} + +impl AuthProvider { + /// Creates an auth provider from the given config. + pub fn new( + config: &Config, + ) -> KrillResult { + let users = config.auth_users.as_ref().ok_or_else(|| { + Error::ConfigError("Missing [auth_users] config section!".into()) + })?.clone(); + let roles = config.auth_roles.clone(); + let session_key = Self::init_session_key(config)?; + + Ok(Self { + users, + roles, + session_key, + session_cache: SessionCache::new(), + }) + } + + fn init_session_key(config: &Config) -> KrillResult { + debug!("Initializing login session encryption key"); + crypt::crypt_init(config) + } + + /// Parse HTTP Basic Authorization header + fn get_auth(&self, request: &HyperRequest) -> Option { + let header = + request.headers().get(hyper::http::header::AUTHORIZATION)?; + let auth = header.to_str().ok()?.strip_prefix("Basic ")?; + let auth = BASE64_ENGINE.decode(auth).ok()?; + let auth = String::from_utf8(auth).ok()?; + let (username, password) = auth.split_once(':')?; + + Some(Auth { + username: username.to_string(), + password: password.to_string(), + }) + } + + fn auth_from_session( + &self, session: &Session + ) -> Result { + self.roles.get(&session.secrets.role).map(|role| { + AuthInfo::user(session.user_id.clone(), role) + }).ok_or_else(|| { + ApiAuthError::ApiAuthPermanentError( + format!( + "user '{}' with undefined role '{}' \ + not caught by config check", + session.user_id, session.secrets.role + ) + ) + }) + } +} + +impl AuthProvider { + pub fn authenticate( + &self, + request: &HyperRequest, + ) -> Result, ApiAuthError> { + if log_enabled!(log::Level::Trace) { + trace!("Attempting to authenticate the request.."); + } + + let res = match httpclient::get_bearer_token(request) { + Some(token) => { + // see if we can decode, decrypt and deserialize the users + // token into a login session structure + let session = self.session_cache.decode( + token, + &self.session_key, + true, + )?; + + trace!("user_id={}", session.user_id); + + Ok(Some(self.auth_from_session(&session)?)) + } + _ => Ok(None), + }; + + if log_enabled!(log::Level::Trace) { + trace!("Authentication result: {:?}", res); + } + + res + } + + pub fn get_login_url(&self) -> KrillResult { + // Direct Lagosta to show the user the Lagosta API token login form + Ok(HttpResponse::text_no_cache(UI_LOGIN_ROUTE_PATH.into())) + } + + pub fn login(&self, request: &HyperRequest) -> KrillResult { + use scrypt::scrypt; + + let auth = match self.get_auth(request) { + Some(auth) => auth, + None => { + trace!("Missing or incomplete credentials for login attempt"); + return Err(Error::ApiInvalidCredentials( + "Missing credentials".to_string(), + )) + } + }; + + // Do NOT bail out if the user is not known because then the + // unknown user path would return very quickly + // compared to the known user path and timing differences can aid + // attackers. + let (user_password_hash, user_salt) = + match self.users.get(&auth.username) { + Some(user) => { + (user.password_hash.as_ref(), user.salt.as_ref()) + } + None => (FAKE_PASSWORD_HASH, FAKE_SALT), + }; + + let username = auth.username.trim().nfkc().collect::(); + let password = auth.password.trim().nfkc().collect::(); + + // hash twice with two different salts + // legacy hashing strategy to be compatible with lagosta + let params = scrypt::Params::new( + PW_HASH_LOG_N, + PW_HASH_R, + PW_HASH_P, + scrypt::Params::RECOMMENDED_LEN, + ) + .unwrap(); + let weak_salt = format!("krill-lagosta-{username}"); + let weak_salt = weak_salt.nfkc().collect::(); + + let mut interim_hash: [u8; 32] = [0; 32]; + scrypt( + password.as_bytes(), + weak_salt.as_bytes(), + ¶ms, + &mut interim_hash, + ) + .unwrap(); + + let strong_salt: Vec = hex::decode(user_salt).unwrap(); + let mut hashed_hash: [u8; 32] = [0; 32]; + scrypt( + &interim_hash, + strong_salt.as_slice(), + ¶ms, + &mut hashed_hash, + ) + .unwrap(); + + let encoded_hash = hex::encode(hashed_hash); + + // And now finally check the user, so that both known and + // unknown user code paths do the same work + // and don't result in an obvious timing difference between + // the two scenarios which could potentially + // be used to discover user names. + if encoded_hash != user_password_hash { + trace!("Unknown user {}", username); + return Err(Error::ApiInvalidCredentials( + "Incorrect credentials".to_string(), + )) + } + + let user = match self.users.get(username.as_str()) { + Some(user) => user, + None => { + trace!("Incorrect password for user {}", username); + return Err(Error::ApiInvalidCredentials( + "Incorrect credentials".to_string(), + )); + } + }; + + // Check that the user is allowed to log in. + let role = self.roles.get(&user.role).ok_or_else(|| { + ApiAuthError::ApiAuthPermanentError( + format!( + "user '{}' with undefined role '{}' \ + not caught by config check", + username, user.role, + ) + ) + })?; + + if !role.is_allowed(Permission::Login, None) { + let reason = format!( + "Login denied for user '{}': \ + User is not permitted to 'login'", + username, + ); + warn!("{}", reason); + return Err(Error::ApiInsufficientRights(reason)); + } + + let username = Arc::::from(username); + + // All good: create a token and return. + let api_token = self.session_cache.encode( + username.clone(), + SessionSecret { role: user.role.clone() }, + &self.session_key, + None, + )?; + + Ok(LoggedInUser::new(api_token, username, user.role.clone())) + } + + pub fn logout( + &self, + request: &HyperRequest, + ) -> KrillResult { + match httpclient::get_bearer_token(request) { + Some(token) => { + self.session_cache.remove(&token); + + if let Ok(Some(info)) = self.authenticate(request) { + info!("User logged out: {}", info.actor().name()); + } + } + _ => { + warn!( + "Unexpectedly received a logout request \ + without a session token." + ); + } + } + + // Logout is complete, direct Lagosta to show the user the Lagosta + // index page + Ok(HttpResponse::text_no_cache("/".into())) + } + + pub fn sweep(&self) -> KrillResult<()> { + self.session_cache.sweep() + } + + pub fn cache_size(&self) -> usize { + self.session_cache.size() + } +} + + +//------------ ConfigAuthUsers ----------------------------------------------- + +pub type ConfigAuthUsers = HashMap; + + +//------------ LegacyUserDetails --------------------------------------------- + +/// The actual user details type used in the config file. +/// +/// Previous versions of Krill used a concept of user-defined attributes. This +/// has now been simplified to just a singled attribute “role.” In order to +/// allow tranistioning from the old world to the new, we allow the role name +/// to be in an “attributes” hash map or its own field. In the former case, +/// we will accept the config file but warn. We will also accept additional +/// attributes but warn about those, too. +/// +/// However, the password-related fields are now mandatory since we are not +/// using this configuration for the OpenID Connect provider any more. +/// +/// This is all implemented by using the `try_from` Serde container attribute. +#[derive(Clone, Debug, Deserialize)] +struct LegacyUserDetails { + password_hash: String, + salt: String, + role: Option, + attributes: Option>, +} + + +//------------ UserDetails --------------------------------------------------- + +#[derive(Clone, Debug, Deserialize)] +#[serde(try_from = "LegacyUserDetails")] +pub struct UserDetails { + password_hash: Token, + salt: String, + role: Arc, +} + +impl TryFrom for UserDetails { + type Error = String; + + fn try_from(src: LegacyUserDetails) -> Result { + let role = if let Some(mut attributes) = src.attributes { + warn!( + "The 'attributes' auth_user field is deprecated. \ + Please use the 'role' field directly." + ); + match attributes.remove("role") { + Some(role) => role, + None => { + return Err("missing 'role' attribute".into()); + } + } + } + else { + match src.role { + Some(role) => role, + None => { + return Err("missing 'role' field".into()); + } + } + }; + Ok(Self { + password_hash: src.password_hash.into(), + salt: src.salt, + role: role.into() + }) + } +} + + +//------------ SessionSecret et al ------------------------------------------- + +#[derive(Clone, Debug, Deserialize, Serialize)] +struct SessionSecret { + role: Arc, +} + +type SessionCache = LoginSessionCache; +type Session = ClientSession; + + +//------------ Auth ---------------------------------------------------------- + +struct Auth { + username: String, + password: String, +} diff --git a/src/daemon/auth/providers/config_file/config.rs b/src/daemon/auth/providers/config_file/config.rs deleted file mode 100644 index a1bff175f..000000000 --- a/src/daemon/auth/providers/config_file/config.rs +++ /dev/null @@ -1,16 +0,0 @@ -use std::collections::HashMap; - -pub type ConfigAuthUsers = HashMap; - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct ConfigUserDetails { - #[serde(default)] - pub attributes: HashMap, - - // optional so that OpenIDConnectAuthProvider can also use config file - // user defined attributes without requiring a dummy password hash - // and salt - pub password_hash: Option, - - pub salt: Option, -} diff --git a/src/daemon/auth/providers/config_file/mod.rs b/src/daemon/auth/providers/config_file/mod.rs deleted file mode 100644 index 84c6e892e..000000000 --- a/src/daemon/auth/providers/config_file/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod config; -pub mod provider; diff --git a/src/daemon/auth/providers/config_file/provider.rs b/src/daemon/auth/providers/config_file/provider.rs deleted file mode 100644 index f657ebc12..000000000 --- a/src/daemon/auth/providers/config_file/provider.rs +++ /dev/null @@ -1,283 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use base64::engine::general_purpose::STANDARD as BASE64_ENGINE; -use base64::engine::Engine as _; -use unicode_normalization::UnicodeNormalization; - -use crate::daemon::http::{HttpResponse, HyperRequest}; -use crate::{ - commons::{ - actor::ActorDef, api::Token, error::Error, util::httpclient, - KrillResult, - }, - constants::{PW_HASH_LOG_N, PW_HASH_P, PW_HASH_R}, - daemon::{ - auth::common::{ - crypt::{self, CryptState}, - session::*, - }, - auth::providers::config_file::config::ConfigUserDetails, - auth::{Auth, LoggedInUser}, - config::Config, - }, -}; - -const UI_LOGIN_ROUTE_PATH: &str = "/login?withId=true"; - -struct UserDetails { - password_hash: Token, - salt: String, - attributes: HashMap, -} - -fn get_checked_config_user( - id: &str, - user: &ConfigUserDetails, -) -> KrillResult { - let password_hash = user - .password_hash - .as_ref() - .ok_or_else(|| { - Error::ConfigError(format!( - "Password hash missing for user '{}'", - id - )) - })? - .to_string(); - - let salt = user - .salt - .as_ref() - .ok_or_else(|| { - Error::ConfigError(format!( - "Password salt missing for user '{}'", - id - )) - })? - .to_string(); - - Ok(UserDetails { - password_hash: Token::from(password_hash), - salt, - attributes: user.attributes.clone(), - }) -} - -pub struct ConfigFileAuthProvider { - users: HashMap, - session_key: CryptState, - session_cache: Arc, - fake_password_hash: String, - fake_salt: String, -} - -impl ConfigFileAuthProvider { - pub fn new( - config: Arc, - session_cache: Arc, - ) -> KrillResult { - match &config.auth_users { - Some(auth_users) => { - let mut users = HashMap::new(); - for (k, v) in auth_users.iter() { - users.insert(k.clone(), get_checked_config_user(k, v)?); - } - - let session_key = Self::init_session_key(&config)?; - - Ok(ConfigFileAuthProvider { - users, - session_key, - session_cache, - fake_password_hash: hex::encode("fake password hash"), - fake_salt: hex::encode("fake salt"), - }) - } - None => Err(Error::ConfigError( - "Missing [auth_users] config section!".into(), - )), - } - } - - fn init_session_key(config: &Config) -> KrillResult { - debug!("Initializing login session encryption key"); - crypt::crypt_init(config) - } - - /// Parse HTTP Basic Authorization header - fn get_auth(&self, request: &HyperRequest) -> Option { - let header = - request.headers().get(hyper::http::header::AUTHORIZATION)?; - let auth = header.to_str().ok()?.strip_prefix("Basic ")?; - let auth = BASE64_ENGINE.decode(auth).ok()?; - let auth = String::from_utf8(auth).ok()?; - let (username, password) = auth.split_once(':')?; - - Some(Auth::UsernameAndPassword { - username: username.to_string(), - password: password.to_string(), - }) - } -} - -impl ConfigFileAuthProvider { - pub fn authenticate( - &self, - request: &HyperRequest, - ) -> KrillResult> { - if log_enabled!(log::Level::Trace) { - trace!("Attempting to authenticate the request.."); - } - - let res = match httpclient::get_bearer_token(request) { - Some(token) => { - // see if we can decode, decrypt and deserialize the users - // token into a login session structure - let session = self.session_cache.decode( - token, - &self.session_key, - true, - )?; - - trace!( - "id={}, attributes={:?}", - &session.id, - &session.attributes - ); - - Ok(Some(ActorDef::user(session.id, session.attributes, None))) - } - _ => Ok(None), - }; - - if log_enabled!(log::Level::Trace) { - trace!("Authentication result: {:?}", res); - } - - res - } - - pub fn get_login_url(&self) -> KrillResult { - // Direct Lagosta to show the user the Lagosta API token login form - Ok(HttpResponse::text_no_cache(UI_LOGIN_ROUTE_PATH.into())) - } - - pub fn login(&self, request: &HyperRequest) -> KrillResult { - if let Some(Auth::UsernameAndPassword { username, password }) = - self.get_auth(request) - { - use scrypt::scrypt; - - // Do NOT bail out if the user is not known because then the - // unknown user path would return very quickly - // compared to the known user path and timing differences can aid - // attackers. - let (user_password_hash, user_salt) = - match self.users.get(&username) { - Some(user) => { - (user.password_hash.to_string(), user.salt.clone()) - } - None => ( - self.fake_password_hash.clone(), - self.fake_salt.clone(), - ), - }; - - let username = username.trim().nfkc().collect::(); - let password = password.trim().nfkc().collect::(); - - // hash twice with two different salts - // legacy hashing strategy to be compatible with lagosta - let params = scrypt::Params::new( - PW_HASH_LOG_N, - PW_HASH_R, - PW_HASH_P, - scrypt::Params::RECOMMENDED_LEN, - ) - .unwrap(); - let weak_salt = format!("krill-lagosta-{username}"); - let weak_salt = weak_salt.nfkc().collect::(); - - let mut interim_hash: [u8; 32] = [0; 32]; - scrypt( - password.as_bytes(), - weak_salt.as_bytes(), - ¶ms, - &mut interim_hash, - ) - .unwrap(); - - let strong_salt: Vec = hex::decode(user_salt).unwrap(); - let mut hashed_hash: [u8; 32] = [0; 32]; - scrypt( - &interim_hash, - strong_salt.as_slice(), - ¶ms, - &mut hashed_hash, - ) - .unwrap(); - - let encoded_hash = hex::encode(hashed_hash); - - if encoded_hash == user_password_hash { - // And now finally check the user, so that both known and - // unknown user code paths do the same work - // and don't result in an obvious timing difference between - // the two scenarios which could potentially - // be used to discover user names. - if let Some(user) = self.users.get(&username) { - let api_token = self.session_cache.encode( - &username, - &user.attributes, - HashMap::new(), - &self.session_key, - None, - )?; - - Ok(LoggedInUser { - token: api_token, - id: username.to_string(), - attributes: user.attributes.clone(), - }) - } else { - trace!("Incorrect password for user {}", username); - Err(Error::ApiInvalidCredentials( - "Incorrect credentials".to_string(), - )) - } - } else { - trace!("Unknown user {}", username); - Err(Error::ApiInvalidCredentials( - "Incorrect credentials".to_string(), - )) - } - } else { - trace!("Missing pr incomplete credentials for login attempt"); - Err(Error::ApiInvalidCredentials( - "Missing credentials".to_string(), - )) - } - } - - pub fn logout( - &self, - request: &HyperRequest, - ) -> KrillResult { - match httpclient::get_bearer_token(request) { - Some(token) => { - self.session_cache.remove(&token); - - if let Ok(Some(actor)) = self.authenticate(request) { - info!("User logged out: {}", actor.name.as_str()); - } - } - _ => { - warn!("Unexpectedly received a logout request without a session token."); - } - } - - // Logout is complete, direct Lagosta to show the user the Lagosta - // index page - Ok(HttpResponse::text_no_cache("/".into())) - } -} diff --git a/src/daemon/auth/providers/mod.rs b/src/daemon/auth/providers/mod.rs index 3e8e8a489..ef1048882 100644 --- a/src/daemon/auth/providers/mod.rs +++ b/src/daemon/auth/providers/mod.rs @@ -5,9 +5,3 @@ pub mod config_file; #[cfg(feature = "multi-user")] pub mod openid_connect; -pub use admin_token::AdminTokenAuthProvider; - -#[cfg(feature = "multi-user")] -pub use config_file::provider::ConfigFileAuthProvider; -#[cfg(feature = "multi-user")] -pub use openid_connect::provider::OpenIDConnectAuthProvider; diff --git a/src/daemon/auth/providers/openid_connect/claims.rs b/src/daemon/auth/providers/openid_connect/claims.rs new file mode 100644 index 000000000..a38f027c2 --- /dev/null +++ b/src/daemon/auth/providers/openid_connect/claims.rs @@ -0,0 +1,463 @@ +//! Processing OpenID Connect claims. + +use std::sync::Arc; +use regex::{Regex, Replacer}; +use serde::de::{Deserialize, Deserializer, Error as _}; +use serde_json::{Number as JsonNumber, Value as JsonValue}; +use crate::commons::KrillResult; +use crate::commons::error::Error; +use super::util::{FlexibleIdTokenClaims, FlexibleUserInfoClaims}; + + +//------------ Claims -------------------------------------------------------- + +#[derive(Debug)] +pub struct Claims<'a> { + id_token_claims: &'a FlexibleIdTokenClaims, + user_info_claims: Option, + + id_standard: Option, + id_additional: Option, + user_standard: Option, + user_additional: Option, +} + +impl<'a> Claims<'a> { + pub fn new( + id_token_claims: &'a FlexibleIdTokenClaims, + user_info_claims: Option, + ) -> Self { + Self { + id_token_claims, user_info_claims, + id_standard: None, id_additional: None, + user_standard: None, user_additional: None, + } + } + + pub fn extract_claims( + &mut self, conf: &[TransformationRule], + ) -> KrillResult>> { + for rule in conf { + match rule { + TransformationRule::Fixed(subst) => { + return Ok(Some(subst.clone())) + } + TransformationRule::Match(rule) => { + if let Some(res) = self.process_match_rule(rule)? { + return Ok(Some(res)) + } + } + } + } + + Ok(None) + } + + fn process_match_rule( + &mut self, conf: &MatchRule, + ) -> KrillResult>> { + use self::ClaimSource::*; + + match conf.source { + Some(IdTokenStandardClaim) => { + Self::process_claim_json(conf, self.id_standard()?) + } + Some(IdTokenAdditionalClaim) => { + Self::process_claim_json(conf, self.id_additional()?) + } + Some(UserInfoStandardClaim) => { + self.user_standard()?.and_then(|json| { + Self::process_claim_json(conf, json).transpose() + }).transpose() + } + Some(UserInfoAdditionalClaim) => { + self.user_additional()?.and_then(|json| { + Self::process_claim_json(conf, json).transpose() + }).transpose() + } + None => { + if let Some(res) = Self::process_claim_json( + conf, self.id_standard()? + )? { + return Ok(Some(res)) + } + if let Some(res) = Self::process_claim_json( + conf, self.id_additional()? + )? { + return Ok(Some(res)) + } + if let Some(res) = self.user_standard()?.and_then(|json| { + Self::process_claim_json(conf, json).transpose() + }).transpose()? { + return Ok(Some(res)) + } + self.user_additional()?.and_then(|json| { + Self::process_claim_json(conf, json).transpose() + }).transpose() + } + } + } + + fn id_standard(&mut self) -> KrillResult<&JsonValue> { + if self.id_standard.is_none() { + self.id_standard = Some( + serde_json::to_value(self.id_token_claims).map_err(|_| { + Self::internal_error( + "OpenID Connect: \ + failed to generate standard ID token claims", + None + ) + })? + ) + } + Ok(self.id_standard.as_ref().unwrap()) + } + + fn id_additional(&mut self) -> KrillResult<&JsonValue> { + if self.id_additional.is_none() { + self.id_additional = Some( + serde_json::to_value( + self.id_token_claims.additional_claims() + ).map_err(|_| { + Self::internal_error( + "OpenID Connect: \ + failed to generate additional ID token claims", + None + ) + })? + ) + } + Ok(self.id_additional.as_ref().unwrap()) + } + + fn user_standard(&mut self) -> KrillResult> { + let claims = match self.user_info_claims.as_ref() { + Some(claims) => claims, + None => return Ok(None) + }; + if self.user_standard.is_none() { + self.user_standard = Some( + serde_json::to_value(claims).map_err(|_| { + Self::internal_error( + "OpenID Connect: \ + failed to generate standard user info claims", + None + ) + })? + ) + } + Ok(self.user_standard.as_ref()) + } + + fn user_additional(&mut self) -> KrillResult> { + let claims = match self.user_info_claims.as_ref() { + Some(claims) => claims, + None => return Ok(None) + }; + if self.user_additional.is_none() { + self.user_additional = Some( + serde_json::to_value(claims.additional_claims()).map_err(|_| { + Self::internal_error( + "OpenID Connect: \ + failed to generate standard user info claims", + None + ) + })? + ) + } + Ok(self.user_additional.as_ref()) + } + + fn process_claim_json( + conf: &MatchRule, + json: &JsonValue, + ) -> KrillResult>> { + let object = match json { + JsonValue::Object(object) => object, + _ => return Ok(None) + }; + let value = match object.get(&conf.claim) { + Some(value) => value, + None => return Ok(None) + }; + match value { + JsonValue::Array(array) => Self::process_claim_array(conf, array), + JsonValue::Bool(true) => Self::process_claim_str(conf, "true"), + JsonValue::Bool(false) => Self::process_claim_str(conf, "false"), + JsonValue::String(s) => Self::process_claim_str(conf, s), + JsonValue::Number(num) => Self::process_claim_number(conf, num), + _ => Ok(None) + } + } + + fn process_claim_array( + conf: &MatchRule, + array: &[JsonValue], + ) -> KrillResult>> { + for item in array { + let res = match item { + JsonValue::Bool(true) => { + Self::process_claim_str(conf, "true")? + } + JsonValue::Bool(false) => { + Self::process_claim_str(conf, "false")? + } + JsonValue::String(s) => { + Self::process_claim_str(conf, s)? + } + JsonValue::Number(num) => { + Self::process_claim_number(conf, num)? + } + _ => None + }; + if let Some(res) = res { + return Ok(Some(res)) + } + } + Ok(None) + } + + fn process_claim_number( + conf: &MatchRule, + num: &JsonNumber + ) -> KrillResult>> { + Self::process_claim_str(conf, &num.to_string()) + } + + fn process_claim_str( + conf: &MatchRule, + s: &str, + ) -> KrillResult>> { + if let Some(expr) = conf.match_expr.as_ref() { + match conf.subst.as_ref() { + Some(subst) => { + if subst.no_expansion { + match expr.0.find(s) { + Some(m) => { + let mut res = String::with_capacity(s.len()); + res.push_str(&s[..m.start()]); + res.push_str(&subst.expr); + res.push_str(&s[m.end()..]); + Ok(Some(res.into())) + } + None => Ok(None) + } + } + else { + match expr.0.captures(s) { + Some(c) => { + let mut res = String::with_capacity( + subst.expr.len() + ); + c.expand(&subst.expr, &mut res); + Ok(Some(res.into())) + } + None => Ok(None) + } + } + } + None => { + if expr.0.is_match(s) { + Ok(Some(s.into())) + } + else { + Ok(None) + } + } + } + } + else { + // If there is no match expression, the value always matches and + // we return it (even if there is a subst expression -- we just + // ignore it). + Ok(Some(s.into())) + } + } + + + /// Log and convert the given error such that the detailed, possibly + /// sensitive details are logged and only the high level statement + /// about the error is passed back to the caller. + fn internal_error(msg: S, additional_info: Option) -> Error + where + S: Into, + { + let msg: String = msg.into(); + match additional_info { + Some(additional_info) => { + warn!("{} [additional info: {}]", msg, additional_info.into()) + } + None => warn!("{}", msg), + }; + Error::ApiLoginError(msg) + } +} + + +//------------ TransformationRule -------------------------------------------- + +/// Transformation rule for a claim. +#[derive(Clone, Debug, Deserialize)] +#[serde(try_from = "TransformationRuleConf")] +pub enum TransformationRule { + /// Fixed rule. + /// + /// This rule matches always and returns the provided string. + Fixed(Arc), + + /// Matching rule. + /// + /// This rule tries to match the provided claim and optionally replaces + /// the value with the given subst expression. + /// + /// The rule matches string values, number and boolean values with their + /// JSON representation. It also matches arrays item by item with the + /// first match being used. + Match(MatchRule), +} + + +//------------ MatchRule ----------------------------------------------------- + +#[derive(Clone, Debug)] +pub struct MatchRule { + pub source: Option, + pub claim: String, + pub match_expr: Option, + pub subst: Option, +} + + +//------------ TransformationRuleConf ---------------------------------------- + +#[derive(Clone, Debug, Deserialize)] +pub struct TransformationRuleConf { + pub source: Option, + pub claim: Option, + #[serde(rename = "match")] + pub match_expr: Option, + pub subst: Option, +} + +impl TryFrom for TransformationRule { + type Error = String; + + fn try_from(src: TransformationRuleConf) -> Result { + if let Some(claim) = src.claim { + Ok(TransformationRule::Match(MatchRule { + source: src.source, + claim, + match_expr: src.match_expr, + subst: src.subst.map(Into::into) + })) + } + else { + let subst = match src.subst { + Some(subst) => subst, + None => { + return Err( + "'subst' is mandatory if 'claim' is missing".into() + ) + } + }; + + // Complain if we have 'match' to avoid possible errors. All + // the other things are probably fine. + if src.match_expr.is_some() { + return Err( + "'claim' is mandatory if 'match' is present".into() + ) + } + + Ok(TransformationRule::Fixed(subst.into())) + } + } +} + + +//------------ MatchExpression ----------------------------------------------- + +#[derive(Clone, Debug)] +pub struct MatchExpression(Regex); + +impl<'de> Deserialize<'de> for MatchExpression { + fn deserialize>( + deserializer: D + ) -> Result { + String::deserialize(deserializer).and_then(|s| { + Regex::try_from(s).map_err(D::Error::custom) + }).map(Self) + } +} + + +//------------ SubstExpression ----------------------------------------------- + +#[derive(Clone, Debug)] +pub struct SubstExpression { + expr: String, + no_expansion: bool, +} + +impl From for SubstExpression { + fn from(mut expr: String) -> Self { + let no_expansion = expr.no_expansion().is_some(); + Self { expr, no_expansion } + } +} + + +//------------ ClaimSource --------------------------------------------------- + +#[derive(Clone, Copy, Debug)] +#[allow(clippy::enum_variant_names)] +pub enum ClaimSource { + IdTokenStandardClaim, + IdTokenAdditionalClaim, + UserInfoStandardClaim, + UserInfoAdditionalClaim, +} + +impl std::fmt::Display for ClaimSource { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + use self::ClaimSource::*; + + f.write_str( + match self { + IdTokenStandardClaim => "id-token-standard-claim", + IdTokenAdditionalClaim => "id-token-additional-claim", + UserInfoStandardClaim => "user-info-standard-claim", + UserInfoAdditionalClaim => "user-info-additional-claim", + } + ) + } +} + +impl<'de> Deserialize<'de> for ClaimSource { + fn deserialize( + d: D, + ) -> Result + where + D: Deserializer<'de>, + { + use self::ClaimSource::*; + + match <&'de str>::deserialize(d)? { + "id-token-standard-claim" => Ok(IdTokenStandardClaim), + "id-token-additional-claim" => Ok(IdTokenAdditionalClaim), + "user-info-standard-claim" => Ok(UserInfoStandardClaim), + "user-info-additional-claim" => Ok(UserInfoAdditionalClaim), + s => { + Err(serde::de::Error::custom( + format!( + "expected \"id-token-additional-claim\", \ + \"id-token-standard-claim\", \ + \"user-info-standard-claim\", or \ + \"user-info-additional-claim\", found : \"{}\"", + s + ))) + } + } + } +} + diff --git a/src/daemon/auth/providers/openid_connect/config.rs b/src/daemon/auth/providers/openid_connect/config.rs index fc20adbbd..2906c6919 100644 --- a/src/daemon/auth/providers/openid_connect/config.rs +++ b/src/daemon/auth/providers/openid_connect/config.rs @@ -1,11 +1,6 @@ use std::collections::HashMap; - -use serde::{de, Deserialize, Deserializer}; - -pub type ConfigAuthOpenIDConnectClaims = - HashMap; - -pub struct ConfigDefaults {} +use serde::Deserialize; +use super::claims::{MatchRule, TransformationRule}; #[derive(Clone, Debug, Deserialize)] pub struct ConfigAuthOpenIDConnect { @@ -15,7 +10,11 @@ pub struct ConfigAuthOpenIDConnect { pub client_secret: String, - pub claims: Option, + #[serde(default = "default_id_claims")] + pub id_claims: Vec, + + #[serde(default = "default_role_claims")] + pub role_claims: Vec, #[serde(default)] pub extra_login_scopes: Vec, @@ -34,66 +33,30 @@ pub struct ConfigAuthOpenIDConnect { } fn default_prompt_for_login() -> bool { - // On by default for backward compatability. See: https://github.com/NLnetLabs/krill/issues/614 + // On by default for backward compatability. + // See: https://github.com/NLnetLabs/krill/issues/614 true } -#[derive(Clone, Debug, Deserialize)] -pub struct ConfigAuthOpenIDConnectClaim { - pub source: Option, - pub jmespath: Option, - pub dest: Option, -} - -#[derive(Clone, Debug)] -pub enum ConfigAuthOpenIDConnectClaimSource { - ConfigFile, - IdTokenStandardClaim, - IdTokenAdditionalClaim, - UserInfoStandardClaim, - UserInfoAdditionalClaim, +fn default_id_claims() -> Vec { + vec![ + TransformationRule::Match(MatchRule { + source: None, + claim: "email".into(), + match_expr: None, + subst: None, + }), + ] } -impl std::fmt::Display for ConfigAuthOpenIDConnectClaimSource { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match self { - ConfigAuthOpenIDConnectClaimSource::ConfigFile => { - write!(f, "config-file") - } - ConfigAuthOpenIDConnectClaimSource::IdTokenStandardClaim => { - write!(f, "id-token-standard-claim") - } - ConfigAuthOpenIDConnectClaimSource::IdTokenAdditionalClaim => { - write!(f, "id-token-additional-claim") - } - ConfigAuthOpenIDConnectClaimSource::UserInfoStandardClaim => { - write!(f, "user-info-standard-claim") - } - ConfigAuthOpenIDConnectClaimSource::UserInfoAdditionalClaim => { - write!(f, "user-info-additional-claim") - } - } - } +fn default_role_claims() -> Vec { + vec![ + TransformationRule::Match(MatchRule { + source: None, + claim: "role".into(), + match_expr: None, + subst: None, + }), + ] } -impl<'de> Deserialize<'de> for ConfigAuthOpenIDConnectClaimSource { - fn deserialize( - d: D, - ) -> Result - where - D: Deserializer<'de>, - { - let string = String::deserialize(d)?; - match string.as_str() { - "config-file" => Ok(ConfigAuthOpenIDConnectClaimSource::ConfigFile), - "id-token-standard-claim" => Ok(ConfigAuthOpenIDConnectClaimSource::IdTokenStandardClaim), - "id-token-additional-claim" => Ok(ConfigAuthOpenIDConnectClaimSource::IdTokenAdditionalClaim), - "user-info-standard-claim" => Ok(ConfigAuthOpenIDConnectClaimSource::UserInfoStandardClaim), - "user-info-additional-claim" => Ok(ConfigAuthOpenIDConnectClaimSource::UserInfoAdditionalClaim), - _ => Err(de::Error::custom(format!( - "expected \"config-file\", \"id-token-additional-claim\", \"id-token-standard-claim\", \"user-info-standard-claim\", or \"user-info-additional-claim\", found : \"{}\"", - string - ))), - } - } -} diff --git a/src/daemon/auth/providers/openid_connect/jmespathext.rs b/src/daemon/auth/providers/openid_connect/jmespathext.rs deleted file mode 100644 index 93169d5b3..000000000 --- a/src/daemon/auth/providers/openid_connect/jmespathext.rs +++ /dev/null @@ -1,173 +0,0 @@ -use std::sync::Arc; - -use jmespatch as jmespath; - -use jmespath::{ - functions::{ArgumentType, CustomFunction, Signature}, - Context, ErrorReason, JmespathError, Rcvar, Runtime, -}; - -use regex::Regex; - -/// Create a customized instance of the JMESPath runtime with support for the -/// standard functions and two additional custom functions: recap and resub. -pub fn init_runtime() -> Runtime { - let mut runtime = Runtime::new(); - - runtime.register_builtin_functions(); - runtime.register_function("recap", make_recap_fn()); - runtime.register_function("resub", make_resub_fn()); - - runtime -} - -/// Custom JMESPath recap(haystack, regex) function that returns the value of -/// the first capture group of the first match in the haystack by the -/// specified regex. -/// -/// Returns an empty string if no match is found. -fn make_recap_fn() -> Box { - let fn_signature = - Signature::new(vec![ArgumentType::Any, ArgumentType::String], None); - - let fn_impl = Box::new(|args: &[Rcvar], _: &mut Context| { - trace!("jmespath recap() arguments: {:?}", args); - - let mut res = String::new(); - - if let jmespath::Variable::String(str) = &*args[0] { - if let jmespath::Variable::String(re_str) = &*args[1] { - match Regex::new(re_str) { - Ok(re) => { - let mut iter = re.captures_iter(str); - if let Some(captures) = iter.next() { - // captures[0] is the entire match - // captures[1] is the value of the first capture - // group match - res = captures[1].to_string(); - } - } - Err(err) => { - return Err(JmespathError::new( - re_str, - 0, - ErrorReason::Parse(format!( - "Invalid regular expression: {}", - err - )), - )); - } - } - } - } - - trace!("jmespath recap() result: {}", &res); - Ok(Arc::new(jmespath::Variable::String(res))) - }); - - Box::new(CustomFunction::new(fn_signature, fn_impl)) -} - -/// Custom JMESPath resub(haystack, needle regex, replacement value) function -/// that returns the result of replacing the first text in the haystack that -/// matches the needle regex with the given replacement value. -/// -/// Returns the given string unchanged if no match is found to replace. -fn make_resub_fn() -> Box { - let fn_signature = Signature::new( - vec![ - ArgumentType::Any, - ArgumentType::String, - ArgumentType::String, - ], - None, - ); - - let fn_impl = Box::new(|args: &[Rcvar], _: &mut Context| { - trace!("jmespath fn resub() arguments: {:?}", args); - - if let jmespath::Variable::String(str) = &*args[0] { - let mut res = String::new(); - if let jmespath::Variable::String(re_str) = &*args[1] { - if let jmespath::Variable::String(newval) = &*args[2] { - match Regex::new(re_str) { - Ok(re) => { - res = re - .replace(str.as_str(), newval.as_str()) - .to_string(); - } - Err(err) => { - return Err(JmespathError::new( - re_str, - 0, - ErrorReason::Parse(format!( - "Invalid regular expression: {}", - err - )), - )); - } - } - } - } - trace!("jmespath resub() result: {}", &res); - return Ok(Arc::new(jmespath::Variable::String(res))); - } - - Ok(Arc::new(jmespath::Variable::Null)) - }); - - Box::new(CustomFunction::new(fn_signature, fn_impl)) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn resub_should_handle_null_input() { - let runtime = init_runtime(); - - // Parse some JSON data into a JMESPath variable - let json_str = r#" - { - "groups":["a", "b"] - } - "#; - let jmespath_var = jmespath::Variable::from_json(json_str).unwrap(); - - // Create an expression that should yield null when evaluated - let null_expr = "groups[?@ == 'idontexist'] | [0]"; - let should_yield_null = runtime.compile(null_expr).unwrap(); - let result = should_yield_null.search(&jmespath_var).unwrap(); - assert_eq!(jmespath::Variable::Null, *result); - - // Now use that expression as input to the resub() function and verify - // that it returns null too - let should_also_yield_null = runtime - .compile(&format!("resub({}, '^.+$', 'admin')", null_expr)) - .unwrap(); - let result = should_also_yield_null.search(&jmespath_var).unwrap(); - assert_eq!(jmespath::Variable::Null, *result); - } - - #[test] - fn resub_should_return_error_when_given_an_invalid_regex() { - let runtime = init_runtime(); - - // an opening square bracket without matching closing square bracket - // is an invalid regular expression - let should_also_yield_null = runtime - .compile("resub('dummy input', '[', 'admin')") - .unwrap(); - - // Parse some JSON data into a JMESPath variable - let json_str = r#" - { - "groups":["a", "b"] - } - "#; - let jmespath_var = jmespath::Variable::from_json(json_str).unwrap(); - - assert!(should_also_yield_null.search(&jmespath_var).is_err()); - } -} diff --git a/src/daemon/auth/providers/openid_connect/mod.rs b/src/daemon/auth/providers/openid_connect/mod.rs index 8212cfd71..c7f40c9fe 100644 --- a/src/daemon/auth/providers/openid_connect/mod.rs +++ b/src/daemon/auth/providers/openid_connect/mod.rs @@ -1,9 +1,13 @@ +//! An authentication provider using OpenID Connect. + +pub use self::config::ConfigAuthOpenIDConnect; +pub use self::provider::AuthProvider; + #[macro_use] -pub mod util; +mod util; -pub mod config; -pub mod httpclient; -pub mod jmespathext; -pub mod provider; +mod claims; +mod config; +mod httpclient; +mod provider; -pub use config::ConfigAuthOpenIDConnect; diff --git a/src/daemon/auth/providers/openid_connect/provider.rs b/src/daemon/auth/providers/openid_connect/provider.rs index f335cfd20..4df3376a6 100644 --- a/src/daemon/auth/providers/openid_connect/provider.rs +++ b/src/daemon/auth/providers/openid_connect/provider.rs @@ -28,10 +28,6 @@ //! [openid-connect-rpinitiated-1_0]: https://openid.net/specs/openid-connect-rpinitiated-1_0.html use std::{ - collections::{ - hash_map::Entry::{Occupied, Vacant}, - HashMap, - }, ops::Deref, sync::Arc, time::Instant, @@ -43,8 +39,6 @@ use base64::engine::general_purpose::URL_SAFE_NO_PAD as URL_BASE64_ENGINE; use base64::engine::Engine as _; use basic_cookies::Cookie; use hyper::header::{HeaderValue, SET_COOKIE}; -use jmespatch as jmespath; -use jmespath::ToJmespath; use openidconnect::{ core::{ @@ -63,39 +57,34 @@ use urlparse::{urlparse, GetQuery}; use crate::daemon::http::{HttpResponse, HyperRequest}; use crate::{ commons::{ - actor::ActorDef, api::Token, - error::Error, + error::{ApiAuthError, Error}, util::{httpclient, sha256}, KrillResult, }, daemon::{ auth::{ - common::{ - crypt::{self, CryptState}, - session::*, - }, - providers::config_file::config::ConfigUserDetails, + crypt::{self, CryptState}, providers::openid_connect::{ - config::{ - ConfigAuthOpenIDConnect, ConfigAuthOpenIDConnectClaim, - ConfigAuthOpenIDConnectClaimSource as ClaimSource, - ConfigAuthOpenIDConnectClaims, - }, httpclient::logging_http_client, - jmespathext, util::{ FlexibleClient, FlexibleIdTokenClaims, FlexibleTokenResponse, FlexibleUserInfoClaims, LogOrFail, WantedMeta, }, }, - Auth, LoggedInUser, + session::*, + AuthInfo, LoggedInUser, Permission, }, config::Config, http::auth::{url_encode, AUTH_CALLBACK_ENDPOINT}, }, }; +use super::claims::Claims; +use super::config::ConfigAuthOpenIDConnect; + + +//------------ Constants ----------------------------------------------------- // On modern browsers (Chrome >= 51, Edge >= 16, Firefox >= 60 & Safari >= 12) // the "__Host" prefix is a defence-in-depth measure that causes the browser @@ -105,32 +94,9 @@ use crate::{ const NONCE_COOKIE_NAME: &str = "__Host-krill_login_nonce"; const CSRF_COOKIE_NAME: &str = "__Host-krill_login_csrf_hash"; -#[allow(clippy::enum_variant_names)] -enum TokenKind { - AccessToken, - RefreshToken, - IdToken, -} -impl From for String { - fn from(token_kind: TokenKind) -> Self { - match token_kind { - TokenKind::AccessToken => String::from("access_token"), - TokenKind::RefreshToken => String::from("refresh_token"), - TokenKind::IdToken => String::from("id_token"), - } - } -} +//------------ LogoutMode ---------------------------------------------------- -impl From for &'static str { - fn from(token_kind: TokenKind) -> Self { - match token_kind { - TokenKind::AccessToken => "access_token", - TokenKind::RefreshToken => "refresh_token", - TokenKind::IdToken => "id_token", - } - } -} enum LogoutMode { OAuth2TokenRevocation { revocation_url: String, @@ -148,6 +114,9 @@ enum LogoutMode { }, } + +//------------ ProivderConnectionProperties ---------------------------------- + pub struct ProviderConnectionProperties { client: FlexibleClient, email_scope_supported: bool, @@ -156,23 +125,62 @@ pub struct ProviderConnectionProperties { logout_mode: LogoutMode, } -pub struct OpenIDConnectAuthProvider { + +//------------ SessionSecrets ------------------------------------------------ + +#[derive(Clone, Debug, Deserialize, Serialize)] +struct SessionSecrets { + role: Arc, + access_token: String, + refresh_token: Option, + id_token: Option +} + +impl SessionSecrets { + fn new( + role: impl Into>, + token_response: &FlexibleTokenResponse, + ) -> Self { + Self { + role: role.into(), + access_token: token_response.access_token().secret().clone(), + refresh_token: token_response.refresh_token().as_ref().map(|t| { + t.secret().clone() + }), + id_token: { + token_response.extra_fields().id_token().as_ref().map(|t| { + t.to_string() + }) + }, + } + } +} + + +//------------ SessionCache -------------------------------------------------- + +type SessionCache = LoginSessionCache; +type Session = ClientSession; + + +//------------ AuthProvider -------------------------------------------------- + +pub struct AuthProvider { config: Arc, - session_cache: Arc, + session_cache: SessionCache, session_key: CryptState, conn: Arc>>, } -impl OpenIDConnectAuthProvider { +impl AuthProvider { pub fn new( config: Arc, - session_cache: Arc, ) -> KrillResult { let session_key = Self::init_session_key(&config)?; - Ok(OpenIDConnectAuthProvider { + Ok(Self { config, - session_cache, + session_cache: SessionCache::new(), session_key, conn: Arc::new(RwLock::new(None)), }) @@ -541,7 +549,7 @@ impl OpenIDConnectAuthProvider { async fn try_revoke_token( &self, - session: &ClientSession, + session: &Session, ) -> Result<(), RevocationErrorResponseType> { // Connect to the OpenID Connect provider OAuth 2.0 token revocation // endpoint to terminate the provider session @@ -550,22 +558,19 @@ impl OpenIDConnectAuthProvider { // and SHOULD support the revocation of access tokens (see // Implementation Note)." let token_to_revoke = if let Some(token) = - session.get_secret(TokenKind::RefreshToken.into()) + session.secrets.refresh_token.as_ref().cloned() { CoreRevocableToken::from(RefreshToken::new(token.clone())) - } else if let Some(token) = - session.get_secret(TokenKind::AccessToken.into()) - { - CoreRevocableToken::from(AccessToken::new(token.clone())) - } else { - return Err(RevocationErrorResponseType::Basic(CoreErrorResponseType::Extension( - "Internal error: Token revocation attempted without a token".to_string(), - ))); + } + else { + CoreRevocableToken::from( + AccessToken::new(session.secrets.access_token.clone()) + ) }; trace!( "OpenID Connect: Revoking token for user: \"{}\"", - &session.id + &session.user_id ); trace!("OpenID Connect: Submitting RFC-7009 section 2 Token Revocation request"); let lock_guard = self.get_connection().await.map_err(|err| { @@ -630,17 +635,19 @@ impl OpenIDConnectAuthProvider { /// logging and (optionally) retrying. async fn try_refresh_token( &self, - session: &ClientSession, - ) -> Result { - let refresh_token = &session.secrets.get(TokenKind::RefreshToken.into()).ok_or_else(|| { - CoreErrorResponseType::Extension( - "Internal error: Token refresh attempted without a refresh token".to_string(), - ) - })?; + session: &Session, + ) -> Result { + let refresh_token = + &session.secrets.refresh_token.as_ref().ok_or_else(|| { + CoreErrorResponseType::Extension( + "Internal error: Token refresh attempted without \ + a refresh token".to_string(), + ) + })?; debug!( "OpenID Connect: Refreshing token for user: \"{}\"", - &session.id + &session.user_id ); trace!("OpenID Connect: Submitting RFC-6749 section 6 Access Token Refresh request"); @@ -660,9 +667,11 @@ impl OpenIDConnectAuthProvider { match token_response { Ok(token_response) => { let new_token_res = self.session_cache.encode( - &session.id, - &session.attributes, - secrets_from_token_response(&token_response), + session.user_id.clone(), + SessionSecrets::new( + session.secrets.role.clone(), + &token_response + ), &self.session_key, token_response.expires_in(), ); @@ -671,7 +680,7 @@ impl OpenIDConnectAuthProvider { Ok(new_token) => { // The new token was successfully acquired from the OpenID Connect Provider, // and early returned. - Ok(Auth::Bearer(new_token)) + Ok(new_token) } Err(err) => Err(CoreErrorResponseType::Extension(format!( "Internal error: Error while encoding the refreshed token {}", @@ -730,156 +739,6 @@ impl OpenIDConnectAuthProvider { } } - fn extract_claim( - &self, - claim_conf: &ConfigAuthOpenIDConnectClaim, - id_token_claims: &FlexibleIdTokenClaims, - user_info_claims: Option<&FlexibleUserInfoClaims>, - ) -> KrillResult> { - let searchable_claims = match &claim_conf.source { - Some(ClaimSource::ConfigFile) => return Ok(None), - Some(ClaimSource::IdTokenStandardClaim) => { - Some(id_token_claims.to_jmespath()) - } - Some(ClaimSource::IdTokenAdditionalClaim) => { - Some(id_token_claims.additional_claims().to_jmespath()) - } - Some(ClaimSource::UserInfoStandardClaim) - if user_info_claims.is_some() => - { - Some(user_info_claims.unwrap().to_jmespath()) - } - Some(ClaimSource::UserInfoAdditionalClaim) - if user_info_claims.is_some() => - { - Some( - user_info_claims - .unwrap() - .additional_claims() - .to_jmespath(), - ) - } - _ => None, - }; - - // optional because it's not needed when looking up a value in the - // config file instead - let jmespath_string = claim_conf - .jmespath - .as_ref() - .ok_or_else(|| { - OpenIDConnectAuthProvider::internal_error( - "Missing JMESPath configuration value for claim", - None, - ) - })? - .to_string(); - - // Create a new JMESPath Runtime. TODO: Somehow make this a single - // persistent runtime to which API request handling threads (such as - // ours) dispatch search commands to be compiled and executed and - // which can receive results back. Perhaps with a pair of - // channels, one to to send search requests and the other to - // receive search results? - let runtime = jmespathext::init_runtime(); - - // We don't precompile the JMESPath expression because the jmespath - // crate requires it to have a lifetime and storing that in our state - // struct would infect the entire struct with lifetimes, plus logins - // don't happen very often and are slow anyway (as the user has to - // visit the OpenID Connect providers own login form then be - // redirected back to us) so this doesn't have to be fast. - // Note to self: perhaps the lifetime issue could be worked - // around using a Box? - let expr = &runtime.compile(&jmespath_string).map_err(|e| { - OpenIDConnectAuthProvider::internal_error( - format!( - "OpenID Connect: Unable to compile JMESPath expression '{}'", - &jmespath_string - ), - Some(stringify_cause_chain(e)), - ) - })?; - - let claims_to_search = match searchable_claims { - Some(claim) => vec![(claim_conf.source.as_ref().unwrap(), claim)], - None => { - let mut claims = vec![ - ( - &ClaimSource::IdTokenStandardClaim, - id_token_claims.to_jmespath(), - ), - ( - &ClaimSource::IdTokenAdditionalClaim, - id_token_claims.additional_claims().to_jmespath(), - ), - ]; - - if let Some(user_info_claims) = user_info_claims { - claims.extend(vec![ - ( - &ClaimSource::UserInfoStandardClaim, - user_info_claims.to_jmespath(), - ), - ( - &ClaimSource::UserInfoAdditionalClaim, - user_info_claims - .additional_claims() - .to_jmespath(), - ), - ]); - } - - claims - } - }; - - for (source, claims) in claims_to_search.clone() { - let claims = claims.map_err(|e| { - OpenIDConnectAuthProvider::internal_error( - "OpenID Connect: Unable to prepare claims for parsing", - Some(&stringify_cause_chain(e)), - ) - })?; - - debug!("Searching {:?} for \"{}\"..", source, &jmespath_string); - - let result = expr.search(&claims).map_err(|e| { - OpenIDConnectAuthProvider::internal_error( - "OpenID Connect: Error while searching claims", - Some(&stringify_cause_chain(e)), - ) - })?; - debug!("Search result in {:?}: '{:?}'", source, &result); - - // Did the JMESPath search find a match? - if !matches!(*result, jmespath::Variable::Null) { - // Yes. Is it a JMESPath String type? - if let Some(result_str) = result.as_string() { - // Yes. Is it non-empty after trimming leading and - // trailing whitespace? - if !result_str.trim().is_empty() { - // Yes - return Ok(Some(result_str.clone())); - } - } - } - } - - let err_msg_parts = &claims_to_search - .iter() - .map(|(source, claims)| format!("{} {:?}", source, claims)) - .collect::>() - .join(", "); - - debug!( - "Claim \"{}\" not found in {}", - &jmespath_string, err_msg_parts - ); - - Ok(None) - } - fn init_session_key(config: &Config) -> KrillResult { debug!("Initializing session encryption key"); crypt::crypt_init(config) @@ -977,12 +836,12 @@ impl OpenIDConnectAuthProvider { self.extract_cookie(request, CSRF_COOKIE_NAME) { trace!("OpenID Connect: Detected RFC-6749 section 4.1.2 redirected Authorization Response"); - return Some(Auth::authorization_code( - Token::from(code), + return Some(Auth { + code: Token::from(code), state, nonce, csrf_token_hash, - )); + }); } else { debug!("OpenID Connect: Ignoring potential RFC-6749 section 4.1.2 redirected Authorization Response due to missing CSRF token hash cookie."); } @@ -1005,7 +864,7 @@ impl OpenIDConnectAuthProvider { let conn_guard = self.conn.read().await; conn_guard.as_ref().ok_or_else(|| { - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( "Connection to provider not yet established", None, ) @@ -1121,7 +980,7 @@ impl OpenIDConnectAuthProvider { None => cause_chain_str, }; - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( format!("OpenID Connect: Code exchange failed: {}", msg), Some(additional_info), ) @@ -1155,14 +1014,14 @@ impl OpenIDConnectAuthProvider { .extra_fields() .id_token() .ok_or_else(|| { - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( "OpenID Connect: ID token is missing, does the provider support OpenID Connect?", None, ) })? // happens if the server only supports OAuth2 .claims(&id_token_verifier, &nonce_hash) .map_err(|e| { - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( format!("OpenID Connect: ID token verification failed: {}", e), Some(stringify_cause_chain(e)), ) @@ -1194,7 +1053,7 @@ impl OpenIDConnectAuthProvider { conn.client .user_info(token_response.access_token().clone(), None) .map_err(|e| { - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( "OpenID Connect: Provider has no user info endpoint", Some(&stringify_cause_chain(e)), ) @@ -1229,7 +1088,7 @@ impl OpenIDConnectAuthProvider { _ => "Unknown error".to_string(), }; - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( format!("OpenID Connect: UserInfo request failed: {}", msg), Some(stringify_cause_chain(e)), ) @@ -1242,84 +1101,25 @@ impl OpenIDConnectAuthProvider { Ok(user_info_claims) } - fn resolve_claims( - &self, - claims_conf: HashMap, - user: Option<&ConfigUserDetails>, - id_token_claims: &FlexibleIdTokenClaims, - user_info_claims: Option, - id: &str, - ) -> KrillResult> { - let mut attributes: HashMap = HashMap::new(); - for (attr_name, claim_conf) in claims_conf { - if attr_name == "id" { - continue; - } - let attr_value = match &claim_conf.source { - Some(ClaimSource::ConfigFile) if user.is_some() => { - // Lookup the claim value in the auth_users config file - // section - user.unwrap() - .attributes - .get(&attr_name.to_string()) - .cloned() - } - _ => self.extract_claim( - &claim_conf, - id_token_claims, - user_info_claims.as_ref(), - )?, - }; - - if let Some(attr_value) = attr_value { - // Apply any defined destination mapping for this claim. - // A destination causes the created attribute to have a - // different name than the claim key in the - // configuration. With this we can handle situations - // such as the extracted role value not matching a valid - // role according to policy (by specifying the same - // source claim field multiple times but each time - // using a different JMESPath expression to extract (and - // optionally transform) a different value each time, - // but mapping all of them to the same final attribute, - // e.g. 'role'. A similar case this addresses is where - // different values for an attribute (e.g. 'role') are - // not present in a single claim field but instead may - // be present in one of several claims (e.g. use (part - // of) claim A to check for admins but use (part of) - // claim B to check for readonly users). - let final_attr_name = match claim_conf.dest { - None => attr_name.to_string(), - Some(alt_attr_name) => alt_attr_name.to_string(), - }; - // Only use the first found value - match attributes.entry(final_attr_name.clone()) { - Occupied(found) => { - info!("Skipping found value '{}' for claim '{}' as attribute '{}': attribute already has a value: '{}'", - attr_value, attr_name, final_attr_name, found.get()); - } - Vacant(vacant) => { - debug!( - "Storing found value '{}' for claim '{}' as attribute '{}'", - attr_value, attr_name, final_attr_name - ); - vacant.insert(attr_value); - } - } - } else { - // With Oso policy based configuration the absence of - // claim values isn't necessarily a problem, it's very - // client configuration dependent, but let's mention - // that we didn't find anything just to make it easier - // to spot configuration mistakes via the logs. - info!("No '{}' claim found for user: {}", &attr_name, &id); - } - } - Ok(attributes) + fn auth_from_session( + &self, session: &Session + ) -> KrillResult { + Ok(AuthInfo::user( + session.user_id.clone(), + self.config.auth_roles.get(&session.secrets.role).ok_or_else(|| { + ApiAuthError::ApiAuthPermanentError( + format!( + "user '{}' with undefined role '{}' \ + not caught during login", + session.user_id, session.secrets.role, + ) + ) + })? + )) } } -impl OpenIDConnectAuthProvider { +impl AuthProvider { // Connect Core 1.0 section 3.1.26 Authentication Error Response // OAuth 2.0 RFC-674 4.1.2.1 (Authorization Request Errors) & 5.2 (Access // Token Request Errors) @@ -1334,11 +1134,11 @@ impl OpenIDConnectAuthProvider { pub async fn authenticate( &self, request: &HyperRequest, - ) -> KrillResult> { + ) -> Result, ApiAuthError> { trace!("Attempting to authenticate the request.."); self.initialize_connection_if_needed().await.map_err(|err| { - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( "OpenID Connect: Cannot authenticate request: Failed to connect to provider", Some(&stringify_cause_chain(err)), ) @@ -1359,25 +1159,14 @@ impl OpenIDConnectAuthProvider { // return match status { SessionStatus::Active => { - return Ok(Some(ActorDef::user( - session.id, - session.attributes, - None, - ))); + return Ok(Some(self.auth_from_session(&session)?)) } SessionStatus::NeedsRefresh => { // If we have a refresh token try and extend the // session. Otherwise return the cached token // and continue the login session until it expires. - if !session - .secrets - .contains_key(TokenKind::RefreshToken.into()) - { - return Ok(Some(ActorDef::user( - session.id, - session.attributes, - None, - ))); + if session.secrets.refresh_token.is_none() { + return Ok(Some(self.auth_from_session(&session)?)) } } SessionStatus::Expired => { @@ -1385,11 +1174,8 @@ impl OpenIDConnectAuthProvider { // refresh token. Otherwise, return early // with an error that indicates the user needs to // login again. - if !session - .secrets - .contains_key(TokenKind::RefreshToken.into()) - { - return Err(Error::ApiAuthSessionExpired( + if session.secrets.refresh_token.is_none() { + return Err(ApiAuthError::ApiAuthSessionExpired( "No token to be refreshed".to_string(), )); } @@ -1398,19 +1184,19 @@ impl OpenIDConnectAuthProvider { // Token needs refresh and we have a refresh token, try to // refresh - let new_auth = match self.try_refresh_token(&session).await { - Ok(auth) => { + let new_token = match self.try_refresh_token(&session).await { + Ok(token) => { trace!( "OpenID Connect: Successfully refreshed token for user \"{}\"", - &session.id + session.user_id ); - auth + token } Err(err) => { trace!("OpenID Connect: RFC 6749 5.2 Error response returned..."); debug!( "OpenID Connect: Refreshing the token for user '{}' failed: {}", - &session.id, &err + session.user_id, err ); match err { // This is the Error returned by the OpenID @@ -1422,7 +1208,7 @@ impl OpenIDConnectAuthProvider { "OpenID Connect: invalid_grant {:?}", err ); - return Err(Error::ApiInvalidCredentials( + return Err(ApiAuthError::ApiInvalidCredentials( "Unable to extend login session: your session has been terminated.".to_string(), )); } @@ -1432,7 +1218,7 @@ impl OpenIDConnectAuthProvider { "OpenID Connect: RFC 6749 5.2 {:?}", err ); - return Err(Error::ApiAuthPermanentError( + return Err(ApiAuthError::ApiAuthPermanentError( "Unable to extend login session: the provider rejected the request.".to_string(), )); } @@ -1448,7 +1234,7 @@ impl OpenIDConnectAuthProvider { "OpenID Connect: RFC 6749 5.2 {:?}", err ); - return Err(Error::ApiInsufficientRights( + return Err(ApiAuthError::ApiInsufficientRights( "Unable to extend login session: the authorization was revoked for this user, client or action.".to_string(), )); } @@ -1467,13 +1253,13 @@ impl OpenIDConnectAuthProvider { "temporarily_unavailable" | "server_error" => { warn!("OpenID Connect: RFC 6749 5.2 {:?}", err); - return Err(Error::ApiAuthTransientError( + return Err(ApiAuthError::ApiAuthTransientError( "Unable to extend login session: could not contact the provider".to_string(), )); } _ => { warn!("OpenID Connect: RFC 6749 5.2 unknown error {:?}", err); - return Err(Error::ApiAuthTransientError( + return Err(ApiAuthError::ApiAuthTransientError( "Unable to extend login session: unknown error".to_string(), )); } @@ -1483,11 +1269,9 @@ impl OpenIDConnectAuthProvider { } }; - Ok(Some(ActorDef::user( - session.id, - session.attributes, - Some(new_auth), - ))) + let mut auth = self.auth_from_session(&session)?; + auth.set_new_token(new_token); + Ok(Some(auth)) } _ => Ok(None), }; @@ -1525,7 +1309,7 @@ impl OpenIDConnectAuthProvider { // parties" self.initialize_connection_if_needed().await.map_err(|err| { - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( "OpenID Connect: Cannot get login URL: Failed to connect to provider", Some(&stringify_cause_chain(err)), ) @@ -1711,7 +1495,7 @@ impl OpenIDConnectAuthProvider { cookie_name, cookie_value ); HeaderValue::from_str(&cookie_str).map_err(|err| { - OpenIDConnectAuthProvider::internal_error( + AuthProvider::internal_error( format!( "Unable to construct HTTP cookie '{}' with value '{}'", cookie_name, cookie_value @@ -1741,7 +1525,7 @@ impl OpenIDConnectAuthProvider { request: &HyperRequest, ) -> KrillResult { self.initialize_connection_if_needed().await.map_err(|err| { - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( "OpenID Connect: Cannot login user: Failed to connect to provider", Some(&stringify_cause_chain(err)), ) @@ -1751,7 +1535,7 @@ impl OpenIDConnectAuthProvider { // OpenID Connect Authorization Code Flow // See: https://tools.ietf.org/html/rfc6749#section-4.1 // https://openid.net/specs/openid-connect-core-1_0.html#CodeFlowSteps - Some(Auth::AuthorizationCode { + Some(Auth { code, state, nonce, @@ -1874,49 +1658,47 @@ impl OpenIDConnectAuthProvider { // configuration without the "id" key :-) // ========================================================================================== - let claims_conf = - with_default_claims(&self.oidc_conf()?.claims); + let mut claims = Claims::new( + id_token_claims, user_info_claims + ); + let id = claims.extract_claims( + &self.oidc_conf()?.id_claims + )?.ok_or_else(|| { + Self::internal_error( + "OpenID Connect: cannot determine user ID.", + None + ) + })?; + let role_name = claims.extract_claims( + &self.oidc_conf()?.role_claims + )?.ok_or_else(|| { + Self::internal_error( + "OpenID Connect: cannot determine user's role.", + None + ) + })?; + let role = self.config.auth_roles.get( + &role_name + ).ok_or_else(|| { + let reason = format!( + "Login denied for user '{}': \ + user is assigned undefined role '{}'.", + id, role_name + ); + warn!("{}", reason); + Error::ApiInsufficientRights(reason) + })?; - let id_claim_conf = - claims_conf.get("id").ok_or_else(|| { - OpenIDConnectAuthProvider::internal_error( - "Missing 'id' claim configuration", - None, - ) - })?; - - let id = self - .extract_claim( - id_claim_conf, - id_token_claims, - user_info_claims.as_ref(), - )? - .ok_or_else(|| { - OpenIDConnectAuthProvider::internal_error( - "No value found for 'id' claim", - None, - ) - })?; - - // Lookup the a user in the config file authentication - // provider configuration by the id value that - // we just obtained, if present. Any claim - // configurations that refer to attributes of - // users configured in the config file will be looked up on - // this user. - let user = self - .config - .auth_users - .as_ref() - .and_then(|users| users.get(&id)); - - let attributes = self.resolve_claims( - claims_conf, - user, - id_token_claims, - user_info_claims, - &id, - )?; + // Step 4 1/2: Check that the user is allowed to log in. + if !role.is_allowed(Permission::Login, None) { + let reason = format!( + "Login denied for user '{}': \ + User is not permitted to 'login'", + id, + ); + warn!("{}", reason); + return Err(Error::ApiInsufficientRights(reason)); + } // ========================================================================================== // Step 5: Respond to the user: access granted, or access @@ -1944,22 +1726,17 @@ impl OpenIDConnectAuthProvider { // so attempting to refresh an access token // after that much time would also fail. // ========================================================================================== - let api_token = self.session_cache.encode( - &id, - &attributes, - secrets_from_token_response(&token_response), + let token = self.session_cache.encode( + id.clone(), + SessionSecrets::new(role_name.clone(), &token_response), &self.session_key, token_response.expires_in(), )?; - Ok(LoggedInUser { - token: api_token, - id, - attributes, - }) + Ok(LoggedInUser::new(token, id, role_name)) } - _ => Err(Error::ApiInvalidCredentials( + None => Err(Error::ApiInvalidCredentials( "Request is not RFC-6749 section 4.1.2 compliant".to_string(), )), } @@ -2014,7 +1791,7 @@ impl OpenIDConnectAuthProvider { )?; // announce that the user requested to be logged out - info!("User logged out: {}", session.id); + info!("User logged out: {}", session.user_id); // perform the logout: @@ -2026,7 +1803,7 @@ impl OpenIDConnectAuthProvider { // there's no point trying to log the token out of the provider if // we know there's a problem with the provider self.initialize_connection_if_needed().await.map_err(|err| { - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( "OpenID Connect: Cannot logout with provider: Failed to connect to provider", Some(&stringify_cause_chain(err)), ) @@ -2043,10 +1820,10 @@ impl OpenIDConnectAuthProvider { .. } => { if let Err(err) = self.try_revoke_token(&session).await { - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( format!( "Error while revoking token for user '{}'", - session.id + session.user_id ), Some(err.to_string()), ); @@ -2069,14 +1846,12 @@ impl OpenIDConnectAuthProvider { } => { trace!("OpenID Connect: Directing user to RP-Initiated Logout 1.0 compliant logout endpoint"); - let id_token = session.secrets.get(TokenKind::IdToken.into()); - - self.build_rpinitiated_logout_url(provider_url, post_logout_redirect_url, id_token) + self.build_rpinitiated_logout_url(provider_url, post_logout_redirect_url, session.secrets.id_token.as_ref()) .unwrap_or_else(|err| { - OpenIDConnectAuthProvider::internal_error( + Self::internal_error( format!( "Error while building OpenID Connect RP-Initiated Logout URL for user '{}'", - session.id + session.user_id ), Some(stringify_cause_chain(err)), ); @@ -2091,58 +1866,29 @@ impl OpenIDConnectAuthProvider { ); Ok(HttpResponse::text_no_cache(go_to_url.into())) } -} -fn secrets_from_token_response( - token_response: &FlexibleTokenResponse, -) -> HashMap { - let mut secrets: HashMap = HashMap::new(); + pub fn sweep(&self) -> KrillResult<()> { + self.session_cache.sweep() + } - secrets.insert( - TokenKind::AccessToken.into(), - token_response.access_token().secret().clone(), - ); + pub fn cache_size(&self) -> usize { + self.session_cache.size() + } +} - if let Some(refresh_token) = token_response.refresh_token() { - secrets.insert( - TokenKind::RefreshToken.into(), - refresh_token.secret().clone(), - ); - }; - if let Some(id_token) = token_response.extra_fields().id_token() { - secrets.insert(TokenKind::IdToken.into(), id_token.to_string()); - } +//------------ Auth ---------------------------------------------------------- - secrets +#[derive(Clone, Debug)] +struct Auth { + code: Token, + state: String, + nonce: String, + csrf_token_hash: String, } -fn with_default_claims( - claims: &Option, -) -> ConfigAuthOpenIDConnectClaims { - let mut claims = match claims { - Some(claims) => claims.clone(), - None => ConfigAuthOpenIDConnectClaims::new(), - }; - - claims - .entry("id".into()) - .or_insert(ConfigAuthOpenIDConnectClaim { - source: None, - jmespath: Some("email".to_string()), - dest: None, - }); - - claims - .entry("role".into()) - .or_insert(ConfigAuthOpenIDConnectClaim { - source: None, - jmespath: Some("role".to_string()), - dest: None, - }); - - claims -} + +//------------ Helper Functions ---------------------------------------------- // Based on: https://github.com/ramosbugs/openidconnect-rs/blob/main/examples/google.rs#L38 pub fn stringify_cause_chain(fail: F) -> String { @@ -2158,3 +1904,4 @@ pub fn stringify_cause_chain(fail: F) -> String { } cause_chain } + diff --git a/src/daemon/auth/roles.rs b/src/daemon/auth/roles.rs new file mode 100644 index 000000000..8137ed44d --- /dev/null +++ b/src/daemon/auth/roles.rs @@ -0,0 +1,201 @@ +//! Roles and related types. +//! +//! This is a private module. Its public items are re-exported by the parent. + +use std::collections::HashMap; +use std::sync::Arc; +use rpki::ca::idexchange::MyHandle; +use serde::Deserialize; +use super::{Permission, PermissionSet}; + + +//------------ Role ---------------------------------------------------------- + +/// A set of access permissions for resources. +/// +/// Roles provide an intermediary for assigning access permissions to users +/// by managing [permission sets][PermissionSet]. Separete sets can be +/// provided for specific resources, all other resources, and requests that +/// do not operate on resources. +/// +/// Currently, roles are given names and are defined in +/// [Config::auth_roles][crate::daemon::config::Config::auth_roles] and +/// referenced by authorization providers through those names. +#[derive(Clone, Debug, Deserialize, Eq, PartialEq)] +#[serde(from = "RoleConf")] +pub struct Role { + /// Permissions for requests without specific resources. + none: PermissionSet, + + /// Blanket permission for all resources. + /// + /// This is checked for any resource that isn’t included in + /// the `resources` field. + any: PermissionSet, + + /// Permissions for specific resources. + resources: HashMap, +} + +impl Role { + /// Creates the special admin role. + /// + /// This role allows all access to everything. + pub fn admin() -> Self { + Self::simple(PermissionSet::ANY) + } + + /// Creates the default read-write role. + /// + /// This role uses `PermissionSet::READWRITE` for everything. + pub fn readwrite() -> Self { + Self::simple(PermissionSet::READWRITE) + } + + /// Creates the default read-only role. + /// + /// This role uses `PermissionSet::READONLY` for everything. + pub fn readonly() -> Self { + Self::simple(PermissionSet::READONLY) + } + + /// Creates the special testbed role. + /// + /// This role uses `PermissionSet::TESTBED` for everything. + pub fn testbed() -> Self { + Self::simple(PermissionSet::TESTBED) + } + + /// Creates the anonymous special role. + /// + /// This role allows nothing. + pub fn anonymous() -> Self { + Self::simple(PermissionSet::NONE) + } + + /// Creates a role that uses the provided permission set for all access. + pub fn simple(permissions: PermissionSet) -> Self { + Self { + none: permissions, + any: permissions, + resources: Default::default() + } + } + + /// Creates a role that uses the provided set for the given resources. + /// + /// The role will allow access with the set to non-resource requests and + /// all resources provided. Access to all other resources will be denied. + pub fn with_resources( + permissions: PermissionSet, + resources: impl IntoIterator + ) -> Self { + Self { + none: permissions, + any: PermissionSet::NONE, + resources: resources.into_iter().map(|handle| { + (handle, permissions) + }).collect() + } + } + + /// Creates a comples role. + /// + /// The permission set `none` will be used for non-resource requests. + /// The `resources` hash map contains special permission sets for the + /// provided resources. The `any` set will be used for all resources + /// not mentioned in the hash map. + pub fn complex( + none: PermissionSet, + any: PermissionSet, + resources: HashMap + ) -> Self { + Self { none, any, resources } + } + + /// Returns whether access is allowed. + /// + /// The method whether the role allows access with the provided + /// `permission` to the provided `resource`. If the resource is `None`, + /// access for non-resource requests is checked. + /// + /// Returns `true` if access is allowed or `false` if not. + pub fn is_allowed( + &self, + permission: Permission, + resource: Option<&MyHandle> + ) -> bool { + match resource { + Some(resource) => { + match self.resources.get(resource) { + Some(permissions) => permissions.has(permission), + None => self.any.has(permission), + } + } + None => { + self.none.has(permission) + } + } + } +} + +impl From for Role { + fn from(src: RoleConf) -> Self { + match src.cas { + Some(cas) => Self::with_resources(src.permissions, cas), + None => Self::simple(src.permissions) + } + } +} + + +//------------ RoleConf ------------------------------------------------------ + +/// The role definition used in the config file. +/// +/// This currently only allows creation of a subset of the things that +/// [`Role`] supports. This is on purpose to keep the config format simple. +#[derive(Clone, Debug, Deserialize)] +struct RoleConf { + /// The permission set to use. + permissions: PermissionSet, + + /// An optional list of resources to limit access to. + /// + /// If this is `None`, access to all resources will be allowed. + cas: Option>, +} + + +//------------ RoleMap ------------------------------------------------------- + +/// A mapping storing roles under a name. +/// +/// Roles are stored behind an arc to users to keep a keep of the role around. +#[derive(Clone, Debug, Default, Deserialize)] +pub struct RoleMap(HashMap>); + +impl RoleMap { + /// Creates a new, empty role map. + pub fn new() -> Self { + Self::default() + } + + /// Adds the given role. + pub fn add( + &mut self, name: impl Into, role: impl Into> + ) { + self.0.insert(name.into(), role.into()); + } + + /// Returns whether the map contains a role by the given name. + pub fn contains(&self, name: &str) -> bool { + self.0.contains_key(name) + } + + /// Returns the role of the given name if present. + pub fn get(&self, name: &str) -> Option> { + self.0.get(name).cloned() + } +} + diff --git a/src/daemon/auth/common/session.rs b/src/daemon/auth/session.rs similarity index 76% rename from src/daemon/auth/common/session.rs rename to src/daemon/auth/session.rs index 1a209cc90..e96035cb2 100644 --- a/src/daemon/auth/common/session.rs +++ b/src/daemon/auth/session.rs @@ -1,26 +1,26 @@ -use std::{ - collections::HashMap, - sync::RwLock, - time::{Duration, SystemTime, UNIX_EPOCH}, -}; - +use std::collections::HashMap; +use std::fmt::Debug; +use std::sync::{Arc, RwLock}; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; use base64::engine::general_purpose::STANDARD as BASE64_ENGINE; use base64::engine::Engine as _; +use serde::{Deserialize, Serialize}; +use serde::de::DeserializeOwned; +use crate::commons::KrillResult; +use crate::commons::api::Token; +use crate::commons::error::{ApiAuthError, Error}; +use crate::daemon::auth::crypt; +use crate::daemon::auth::crypt::{CryptState, NonceState}; -use crate::{ - commons::{api::Token, error::Error, KrillResult}, - daemon::auth::common::crypt::{self, CryptState, NonceState}, -}; const MAX_CACHE_SECS: u64 = 30; -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ClientSession { +#[derive(Debug, Serialize, Deserialize)] +pub struct ClientSession { pub start_time: u64, pub expires_in: Option, - pub id: String, - pub attributes: HashMap, - pub secrets: HashMap, + pub user_id: Arc, + pub secrets: S, } #[derive(Debug, Eq, PartialEq)] @@ -30,7 +30,18 @@ pub enum SessionStatus { Expired, } -impl ClientSession { +impl Clone for ClientSession { + fn clone(&self) -> Self { + Self { + start_time: self.start_time, + expires_in: self.expires_in, + user_id: self.user_id.clone(), + secrets: self.secrets.clone(), + } + } +} + +impl ClientSession { pub fn status(&self) -> SessionStatus { if let Some(expires_in) = &self.expires_in { match SystemTime::now().duration_since(UNIX_EPOCH) { @@ -49,8 +60,8 @@ impl ClientSession { }; trace!( - "Login session status check: id={}, status={:?}, max age={} secs, cur age={} secs", - &self.id, + "Login session status check: user_id={}, status={:?}, max age={} secs, cur age={} secs", + &self.user_id, &status, max_age_secs, cur_age_secs @@ -69,39 +80,35 @@ impl ClientSession { SessionStatus::Active } - - pub fn get_secret(&self, key: &str) -> Option<&String> { - self.secrets.get(key) - } } -struct CachedSession { +struct CachedSession { pub evict_after: u64, - pub session: ClientSession, + pub session: ClientSession, } pub type EncryptFn = fn(&[u8], &[u8], &NonceState) -> KrillResult>; -pub type DecryptFn = fn(&[u8], &[u8]) -> KrillResult>; +pub type DecryptFn = fn(&[u8], &[u8]) -> Result, ApiAuthError>; /// A short term cache to reduce the impact of session token decryption and /// deserialization (e.g. for multiple requests in a short space of time by /// the Lagosta UI client) while keeping potentially sensitive data in-memory /// for as short as possible. This cache is NOT responsible for enforcing /// token expiration, that is handled separately by the AuthProvider. -pub struct LoginSessionCache { - cache: RwLock>, +pub struct LoginSessionCache { + cache: RwLock>>, encrypt_fn: EncryptFn, decrypt_fn: DecryptFn, ttl_secs: u64, } -impl Default for LoginSessionCache { +impl Default for LoginSessionCache { fn default() -> Self { Self::new() } } -impl LoginSessionCache { +impl LoginSessionCache { pub fn new() -> Self { LoginSessionCache { cache: RwLock::new(HashMap::new()), @@ -111,33 +118,6 @@ impl LoginSessionCache { } } - pub fn with_ttl(self, ttl_secs: u64) -> Self { - LoginSessionCache { - cache: self.cache, - encrypt_fn: self.encrypt_fn, - decrypt_fn: self.decrypt_fn, - ttl_secs, - } - } - - pub fn with_encrypter(self, encrypt_fn: EncryptFn) -> Self { - LoginSessionCache { - cache: self.cache, - encrypt_fn, - decrypt_fn: self.decrypt_fn, - ttl_secs: self.ttl_secs, - } - } - - pub fn with_decrypter(self, decrypt_fn: DecryptFn) -> Self { - LoginSessionCache { - cache: self.cache, - encrypt_fn: self.encrypt_fn, - decrypt_fn, - ttl_secs: self.ttl_secs, - } - } - fn time_now_secs_since_epoch() -> KrillResult { Ok(SystemTime::now() .duration_since(UNIX_EPOCH) @@ -150,7 +130,8 @@ impl LoginSessionCache { .as_secs()) } - fn lookup_session(&self, token: &Token) -> Option { + fn lookup_session(&self, token: &Token) -> Option> + where S: Clone { match self.cache.read() { Ok(readable_cache) => { if let Some(cache_item) = readable_cache.get(token) { @@ -163,7 +144,7 @@ impl LoginSessionCache { None } - fn cache_session(&self, token: &Token, session: &ClientSession) { + fn cache_session(&self, token: &Token, session: ClientSession) { match self.cache.write() { Ok(mut writeable_cache) => { match Self::time_now_secs_since_epoch() { @@ -172,7 +153,7 @@ impl LoginSessionCache { token.clone(), CachedSession { evict_after: now + self.ttl_secs, - session: session.clone(), + session, }, ); } @@ -190,18 +171,17 @@ impl LoginSessionCache { pub fn encode( &self, - id: &str, - attributes: &HashMap, - secrets: HashMap, + user_id: Arc, + secrets: S, crypt_state: &CryptState, expires_in: Option, - ) -> KrillResult { + ) -> KrillResult + where S: Debug + Serialize { let session = ClientSession { start_time: Self::time_now_secs_since_epoch()?, expires_in, - id: id.to_string(), - attributes: attributes.clone(), - secrets, + user_id, + secrets }; debug!("Creating token for session: {:?}", &session); @@ -222,7 +202,7 @@ impl LoginSessionCache { )?; let token = Token::from(BASE64_ENGINE.encode(encrypted_bytes)); - self.cache_session(&token, &session); + self.cache_session(&token, session); Ok(token) } @@ -231,9 +211,10 @@ impl LoginSessionCache { token: Token, key: &CryptState, add_to_cache: bool, - ) -> KrillResult { + ) -> Result, ApiAuthError> + where S: Clone + DeserializeOwned { if let Some(session) = self.lookup_session(&token) { - trace!("Session cache hit for session id {}", &session.id); + trace!("Session cache hit for session id {}", &session.user_id); return Ok(session); } else { trace!("Session cache miss, deserializing..."); @@ -242,7 +223,7 @@ impl LoginSessionCache { let bytes = BASE64_ENGINE.decode(token.as_ref().as_bytes()).map_err( |err| { debug!("Invalid bearer token: cannot decode: {}", err); - Error::ApiInvalidCredentials( + ApiAuthError::ApiInvalidCredentials( "Invalid bearer token".to_string(), ) }, @@ -251,24 +232,24 @@ impl LoginSessionCache { let unencrypted_bytes = (self.decrypt_fn)(&key.key, &bytes)?; let session = - serde_json::from_slice::(&unencrypted_bytes) + serde_json::from_slice::>(&unencrypted_bytes) .map_err(|err| { debug!( "Invalid bearer token: cannot deserialize: {}", err ); - Error::ApiInvalidCredentials( + ApiAuthError::ApiInvalidCredentials( "Invalid bearer token".to_string(), ) })?; trace!( "Session cache miss, deserialized session id {}", - &session.id + &session.user_id ); if add_to_cache { - self.cache_session(&token, &session); + self.cache_session(&token, session.clone()); } Ok(session) @@ -318,6 +299,7 @@ impl LoginSessionCache { } } + mod tests { #[test] fn basic_login_session_cache_test() { @@ -334,20 +316,20 @@ mod tests { // Create a new cache whose items are elligible for eviction after one // second and which does no actual encryption or decryption. - let cache = LoginSessionCache::new() - .with_ttl(1) - .with_encrypter(|_, v, _| Ok(v.to_vec())) - .with_decrypter(|_, v| Ok(v.to_vec())); + let mut cache = LoginSessionCache::new(); + cache.ttl_secs = 1; + cache.encrypt_fn = |_, v, _| Ok(v.to_vec()); + cache.decrypt_fn = |_, v| Ok(v.to_vec()); + let cache = cache; // Add an item to the cache and verify that the cache now has 1 item let item1_token = cache - .encode("some id", &HashMap::new(), HashMap::new(), &key, None) + .encode("some id".into(), HashMap::new(), &key, None) .unwrap(); assert_eq!(cache.size(), 1); let item1 = cache.decode(item1_token, &key, true).unwrap(); - assert_eq!(item1.id, "some id"); - assert_eq!(item1.attributes, HashMap::new()); + assert_eq!(item1.user_id.as_ref(), "some id"); assert_eq!(item1.expires_in, None); assert_eq!(item1.secrets, HashMap::new()); @@ -358,12 +340,10 @@ mod tests { assert_eq!(cache.size(), 1); // Add another item to the cache - let some_attrs = one_attr_map("some attr key", "some attr val"); let some_secrets = one_attr_map("some secret key", "some secret val"); let item2_token = cache .encode( - "other id", - &some_attrs, + "other id".into(), some_secrets, &key, Some(Duration::from_secs(10)), @@ -383,11 +363,7 @@ mod tests { assert_eq!(cache.size(), 1); let item2 = cache.decode(item2_token, &key, true).unwrap(); - assert_eq!(item2.id, "other id"); - assert_eq!( - item2.attributes, - one_attr_map("some attr key", "some attr val") - ); + assert_eq!(item2.user_id.as_ref(), "other id"); assert_eq!(item2.expires_in, Some(Duration::from_secs(10))); assert_eq!( item2.secrets, diff --git a/src/daemon/ca/manager.rs b/src/daemon/ca/manager.rs index e1cfc5b6b..b22ba1cdf 100644 --- a/src/daemon/ca/manager.rs +++ b/src/daemon/ca/manager.rs @@ -48,8 +48,7 @@ use crate::{ CASERVER_NS, STATUS_NS, TA_PROXY_SERVER_NS, TA_SIGNER_SERVER_NS, }, daemon::{ - auth::common::permissions::Permission, - auth::Handle, + auth::{AuthInfo, Permission}, ca::{ CaObjectsStore, CaStatus, CertAuth, CertAuthCommand, CertAuthCommandDetails, DeprecatedRepository, @@ -611,20 +610,23 @@ impl CaManager { Ok(()) } - /// Get the CAs that the given actor is permitted to see. - pub fn ca_list(&self, actor: &Actor) -> KrillResult { + /// Returns all known CA handles. + pub fn ca_handles(&self) -> KrillResult> { + Ok(self.ca_store.list()?) + } + + /// Gets the CAs that the given policy allows read access to. + pub fn ca_list( + &self, auth: &AuthInfo, + ) -> KrillResult { Ok(CertAuthList::new( self.ca_store .list()? .into_iter() .filter(|handle| { - matches!( - actor.is_allowed( - Permission::CA_READ, - Handle::from(handle) - ), - Ok(true) - ) + auth.check_permission( + Permission::CaRead, Some(handle) + ).is_ok() }) .map(CertAuthSummary::new) .collect(), @@ -2460,9 +2462,9 @@ impl CaManager { /// Schedule synchronizing all CAs with their repositories. pub fn cas_schedule_repo_sync_all( &self, - actor: &Actor, + auth: &AuthInfo, ) -> KrillResult<()> { - for ca in self.ca_list(actor)?.cas() { + for ca in self.ca_list(auth)?.cas() { self.cas_schedule_repo_sync(ca.handle().clone())?; } Ok(()) @@ -3056,7 +3058,9 @@ impl CaManager { /// Note: this does not re-issue issued CA certificates, because child /// CAs are expected to note extended validity eligibility and request /// updated certificates themselves. - pub async fn renew_objects_all(&self, actor: &Actor) -> KrillResult<()> { + pub async fn renew_objects_all( + &self, actor: &Actor + ) -> KrillResult<()> { for ca in self.ca_store.list()? { let cmd = CertAuthCommand::new( &ca, diff --git a/src/daemon/config.rs b/src/daemon/config.rs index 26a045761..ad525d642 100644 --- a/src/daemon/config.rs +++ b/src/daemon/config.rs @@ -5,6 +5,7 @@ use std::{ net::{IpAddr, Ipv4Addr, SocketAddr}, path::{Path, PathBuf}, str::FromStr, + sync::Arc, }; use chrono::Duration; @@ -32,6 +33,7 @@ use crate::{ }, constants::*, daemon::{ + auth::{Role, RoleMap}, http::tls_keys::{self, HTTPS_SUB_DIR}, mq::{in_seconds, Priority}, }, @@ -40,7 +42,7 @@ use crate::{ #[cfg(feature = "multi-user")] use crate::daemon::auth::providers::{ - config_file::config::ConfigAuthUsers, + config_file::ConfigAuthUsers, openid_connect::ConfigAuthOpenIDConnect, }; @@ -119,6 +121,14 @@ impl ConfigDefaults { AuthType::AdminToken } + pub fn auth_roles() -> Arc { + let mut res = RoleMap::new(); + res.add("admin", Role::admin()); + res.add("readwrite", Role::readwrite()); + res.add("readonly", Role::readonly()); + res.into() + } + pub fn admin_token() -> Token { match env::var(KRILL_ENV_ADMIN_TOKEN) { Ok(token) => Token::from(token), @@ -132,16 +142,6 @@ impl ConfigDefaults { } } - #[cfg(feature = "multi-user")] - pub fn auth_policies() -> Vec { - vec![] - } - - #[cfg(feature = "multi-user")] - pub fn auth_private_attributes() -> Vec { - vec![] - } - pub fn ca_refresh_seconds() -> u32 { 24 * 3600 // 24 hours } @@ -554,20 +554,15 @@ pub struct Config { #[serde(default = "ConfigDefaults::auth_type")] pub auth_type: AuthType, - #[cfg(feature = "multi-user")] - #[serde(default = "ConfigDefaults::auth_policies")] - pub auth_policies: Vec, - - #[cfg(feature = "multi-user")] - #[serde(default = "ConfigDefaults::auth_private_attributes")] - pub auth_private_attributes: Vec, - #[cfg(feature = "multi-user")] pub auth_users: Option, #[cfg(feature = "multi-user")] pub auth_openidconnect: Option, + #[serde(default = "ConfigDefaults::auth_roles")] + pub auth_roles: Arc, + #[serde(default, deserialize_with = "deserialize_signer_ref")] pub default_signer: SignerReference, @@ -1119,13 +1114,10 @@ impl Config { let auth_type = AuthType::AdminToken; let admin_token = Token::from("secret"); #[cfg(feature = "multi-user")] - let auth_policies = vec![]; - #[cfg(feature = "multi-user")] - let auth_private_attributes = vec![]; - #[cfg(feature = "multi-user")] let auth_users = None; #[cfg(feature = "multi-user")] let auth_openidconnect = None; + let auth_roles = ConfigDefaults::auth_roles(); let default_signer = SignerReference::default(); let one_off_signer = SignerReference::default(); @@ -1248,13 +1240,10 @@ impl Config { admin_token, auth_type, #[cfg(feature = "multi-user")] - auth_policies, - #[cfg(feature = "multi-user")] - auth_private_attributes, - #[cfg(feature = "multi-user")] auth_users, #[cfg(feature = "multi-user")] auth_openidconnect, + auth_roles, default_signer, one_off_signer, signers, diff --git a/src/daemon/http/auth.rs b/src/daemon/http/auth.rs index b3f123b3f..e2f91230a 100644 --- a/src/daemon/http/auth.rs +++ b/src/daemon/http/auth.rs @@ -8,7 +8,8 @@ use crate::{ #[cfg(feature = "multi-user")] use { crate::daemon::{ - auth::LoggedInUser, http::server::render_error_redirect, + auth::LoggedInUser, + http::server::render_error_redirect, }, urlparse::quote, }; @@ -24,10 +25,8 @@ pub fn url_encode>(s: S) -> Result { #[cfg(feature = "multi-user")] fn build_auth_redirect_location(user: LoggedInUser) -> Result { - use std::collections::HashMap; - fn b64_encode_attributes_with_mapped_error( - a: &HashMap, + a: &impl serde::Serialize, ) -> Result { use base64::engine::general_purpose::STANDARD as BASE64_ENGINE; use base64::engine::Engine as _; @@ -38,14 +37,15 @@ fn build_auth_redirect_location(user: LoggedInUser) -> Result { )) } - let attributes = - b64_encode_attributes_with_mapped_error(&user.attributes)?; + let attributes = b64_encode_attributes_with_mapped_error( + user.attributes() + )?; Ok(format!( "/ui/login?token={}&id={}&attributes={}", - &url_encode(user.token)?, - &url_encode(user.id)?, - &url_encode(attributes)? + &url_encode(user.token())?, + &url_encode(user.id())?, + &url_encode(attributes)?, )) } diff --git a/src/daemon/http/mod.rs b/src/daemon/http/mod.rs index 94905a5a7..a910c199a 100644 --- a/src/daemon/http/mod.rs +++ b/src/daemon/http/mod.rs @@ -1,24 +1,26 @@ -use std::{io, str::from_utf8, str::FromStr}; - +use std::io; +use std::str::FromStr; +use std::str::from_utf8; use bytes::Bytes; -use serde::{de::DeserializeOwned, Serialize}; - use http_body_util::{BodyExt, Either, Empty, Full, Limited}; use hyper::body::Body; use hyper::header::USER_AGENT; use hyper::http::uri::PathAndQuery; use hyper::{HeaderMap, Method, StatusCode}; - use rpki::ca::{provisioning, publication}; +use rpki::ca::idexchange::MyHandle; +use serde::Serialize; +use serde::de::DeserializeOwned; +use crate::daemon::auth::{AuthInfo, LoggedInUser, Permission}; use crate::{ commons::{ - actor::{Actor, ActorDef}, - error::Error, + actor::Actor, + error::{ApiAuthError, Error}, KrillResult, }, constants::HTTP_USER_AGENT_TRUNCATE, - daemon::{auth::LoggedInUser, http::server::State}, + daemon::http::server::State, }; pub mod auth; @@ -340,8 +342,8 @@ impl HttpResponse { Self::response_from_error(Error::ApiInvalidCredentials(reason)) } - pub fn forbidden(reason: String) -> Self { - Self::response_from_error(Error::ApiInsufficientRights(reason)) + pub fn forbidden(err: String) -> Self { + Self::response_from_error(Error::ApiInsufficientRights(err)) } } @@ -351,19 +353,19 @@ pub struct Request { request: HyperRequest, path: RequestPath, state: State, - actor: Actor, + auth: AuthInfo, } impl Request { pub async fn new(request: HyperRequest, state: State) -> Self { let path = RequestPath::from_request(&request); - let actor = state.actor_from_request(&request).await; + let auth = state.authenticate_request(&request).await; Request { request, path, state, - actor, + auth, } } @@ -387,18 +389,35 @@ impl Request { } } - pub async fn upgrade_from_anonymous(&mut self, actor_def: ActorDef) { - if self.actor.is_anonymous() { - self.actor = self.state.actor_from_def(actor_def); + pub async fn upgrade_from_anonymous(&mut self, auth: AuthInfo) { + if self.auth.actor().is_anonymous() { + self.auth = auth; info!( - "Permitted anonymous actor to become actor '{}' for the duration of this request", - self.actor.name() + "Permitted anonymous actor to become actor '{}' \ + for the duration of this request", + self.auth.actor().name() ); } } + pub fn check_permission( + &self, + permission: Permission, + resource: Option<&MyHandle> + ) -> Result<(), ApiAuthError> { + self.auth.check_permission(permission, resource) + } + pub fn actor(&self) -> Actor { - self.actor.clone() + self.auth.actor().clone() + } + + pub fn auth_info(&self) -> &AuthInfo { + &self.auth + } + + pub fn auth_info_mut(&mut self) -> &mut AuthInfo { + &mut self.auth } /// Returns the complete path. diff --git a/src/daemon/http/server.rs b/src/daemon/http/server.rs index a6c73c305..1f0f76c67 100644 --- a/src/daemon/http/server.rs +++ b/src/daemon/http/server.rs @@ -18,7 +18,7 @@ use hyper::Method; use hyper_util::rt::{TokioExecutor, TokioIo}; use rpki::ca::idexchange; use rpki::ca::idexchange::{ - CaHandle, ChildHandle, ParentHandle, PublisherHandle, + CaHandle, ChildHandle, MyHandle, ParentHandle, PublisherHandle, }; use rpki::repository::resources::Asn; use serde::Serialize; @@ -41,11 +41,10 @@ use crate::{ }, constants::{ KRILL_ENV_HTTP_LOG_INFO, KRILL_ENV_UPGRADE_ONLY, KRILL_VERSION_MAJOR, - KRILL_VERSION_MINOR, KRILL_VERSION_PATCH, NO_RESOURCE, + KRILL_VERSION_MINOR, KRILL_VERSION_PATCH, }, daemon::{ - auth::common::permissions::Permission, - auth::{Auth, Handle}, + auth::Permission, ca::CaStatus, config::Config, http::{ @@ -346,11 +345,11 @@ async fn map_requests( ) -> Result { let logger = RequestLogger::begin(&req); - let req = Request::new(req, state).await; + let mut req = Request::new(req, state).await; // Save any updated auth details, e.g. if an OpenID Connect token needed // refreshing. - let new_auth = req.actor().new_auth(); + let new_token = req.auth_info_mut().take_new_token(); // We used to use .or_else() here but that causes a large recursive call // tree due to these calls being to async functions, large enough with the @@ -402,7 +401,7 @@ async fn map_requests( // Augment the response with any updated auth details that were determined // above. - let res = add_new_auth_to_response(res, new_auth); + let res = add_new_token_to_response(res, new_token); // Log the request and the response. logger.end(res.as_ref()); @@ -1171,11 +1170,11 @@ fn add_authorization_headers_to_response( } } -fn add_new_auth_to_response( +fn add_new_token_to_response( res: Result, - opt_auth: Option, + opt_token: Option, ) -> Result { - if let Some(Auth::Bearer(token)) = opt_auth { + if let Some(token) = opt_token { res.map(|ok_res| add_authorization_headers_to_response(ok_res, token)) } else { res @@ -1197,42 +1196,26 @@ fn add_new_auth_to_response( // similar to how this macro is used in each function. macro_rules! aa { (no_warn $req:ident, $perm:expr, $action:expr) => {{ - aa!($req, $perm, NO_RESOURCE, $action, true) + aa!($req, $perm, Option::<&MyHandle>::None, $action, true) }}; ($req:ident, $perm:expr, $action:expr) => {{ - aa!($req, $perm, NO_RESOURCE, $action, false) + aa!($req, $perm, Option::<&MyHandle>::None, $action, false) }}; (no_warn $req:ident, $perm:expr, $resource:expr, $action:expr) => {{ - aa!($req, $perm, $resource, $action, true) + aa!($req, $perm, Some(&$resource), $action, true) }}; ($req:ident, $perm:expr, $resource:expr, $action:expr) => {{ - aa!($req, $perm, $resource, $action, false) + aa!($req, $perm, Some(&$resource), $action, false) }}; ($req:ident, $perm:expr, $resource:expr, $action:expr, $benign:expr) => {{ - match $req.actor().is_allowed($perm, $resource) { - Ok(true) => $action, - Ok(false) => { - let msg = format!( - "User '{}' does not have permission '{}' on resource '{}'", - $req.actor().name(), - $perm, - $resource - ); - Ok(HttpResponse::forbidden(msg).with_benign($benign)) - } + match $req.check_permission($perm, $resource) { + Ok(()) => { $action } Err(err) => { - // Avoid an extra round of error -> string -> error conversion - // which causes the error message to nest, e.g. - // "Invalid credentials: Invalid credentials: Session expired" - match err { - Error::ApiInvalidCredentials(_) - | Error::ApiInsufficientRights(_) - | Error::ApiAuthPermanentError(_) - | Error::ApiAuthTransientError(_) - | Error::ApiAuthSessionExpired(_) - | Error::ApiLoginError(_) => Ok(HttpResponse::response_from_error(err).with_benign($benign)), - _ => Ok(HttpResponse::forbidden(format!("{}", err)).with_benign($benign)), - } + Ok( + HttpResponse::forbidden( + err.to_string() + ).with_benign($benign) + ) } } }}; @@ -1251,18 +1234,18 @@ async fn api(req: Request) -> RoutingResult { Some("authorized") => api_authorized(req).await, restricted_endpoint => { // Make sure access is allowed - aa!(req, Permission::LOGIN, { + aa!(req, Permission::Login, { match restricted_endpoint { Some("bulk") => api_bulk(req, &mut path).await, Some("cas") => api_cas(req, &mut path).await, Some("pubd") => aa!( req, - Permission::PUB_ADMIN, + Permission::PubAdmin, api_publication_server(req, &mut path).await ), Some("ta") => aa!( req, - Permission::CA_ADMIN, + Permission::CaAdmin, api_ta(req, &mut path).await ), _ => render_unknown_method(), @@ -1280,7 +1263,7 @@ async fn api_authorized(req: Request) -> RoutingResult { // triggers Lagosta to show a login form, not something to warn about! aa!(no_warn req, - Permission::LOGIN, + Permission::Login, match *req.method() { Method::GET => render_ok(), _ => render_unknown_method(), @@ -1305,7 +1288,7 @@ async fn api_bulk(req: Request, path: &mut RequestPath) -> RoutingResult { async fn api_cas(req: Request, path: &mut RequestPath) -> RoutingResult { match path.path_arg::() { - Some(ca) => aa!(req, Permission::CA_READ, Handle::from(&ca), { + Some(ca) => aa!(req, Permission::CaRead, ca, { match path.next() { None => match *req.method() { Method::GET => api_ca_info(req, ca).await, @@ -1434,7 +1417,7 @@ async fn api_ca_sync( path: &mut RequestPath, ca: CaHandle, ) -> RoutingResult { - aa!(req, Permission::CA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::CaUpdate, ca, { if req.is_post() { match path.next() { Some("parents") => { @@ -1531,7 +1514,7 @@ pub async fn api_stale_publishers( req: Request, seconds: Option<&str>, ) -> RoutingResult { - aa!(req, Permission::PUB_LIST, { + aa!(req, Permission::PubList, { let seconds = seconds.unwrap_or(""); match i64::from_str(seconds) { Ok(seconds) => { @@ -1546,7 +1529,7 @@ pub async fn api_stale_publishers( /// Returns a json structure with all publishers in it. pub async fn api_list_pbl(req: Request) -> RoutingResult { - aa!(req, Permission::PUB_LIST, { + aa!(req, Permission::PubList, { render_json_res( req.state() .publishers() @@ -1557,7 +1540,7 @@ pub async fn api_list_pbl(req: Request) -> RoutingResult { /// Adds a publisher pub async fn api_add_pbl(req: Request) -> RoutingResult { - aa!(req, Permission::PUB_CREATE, { + aa!(req, Permission::PubCreate, { let actor = req.actor(); let server = req.state().clone(); match req.json().await { @@ -1574,7 +1557,7 @@ pub async fn api_remove_pbl( req: Request, publisher: PublisherHandle, ) -> RoutingResult { - aa!(req, Permission::PUB_DELETE, { + aa!(req, Permission::PubDelete, { let actor = req.actor(); render_empty_res(req.state().remove_publisher(publisher, &actor)) }) @@ -1588,7 +1571,7 @@ pub async fn api_show_pbl( ) -> RoutingResult { aa!( req, - Permission::PUB_READ, + Permission::PubRead, render_json_res(req.state().get_publisher(&publisher)) ) } @@ -1601,7 +1584,7 @@ pub async fn api_repository_response_xml( req: Request, publisher: PublisherHandle, ) -> RoutingResult { - aa!(req, Permission::PUB_READ, { + aa!(req, Permission::PubRead, { match repository_response(&req, &publisher).await { Ok(repository_response) => { Ok(HttpResponse::xml(repository_response.to_xml_vec())) @@ -1616,7 +1599,7 @@ pub async fn api_repository_response_json( req: Request, publisher: PublisherHandle, ) -> RoutingResult { - aa!(req, Permission::PUB_READ, { + aa!(req, Permission::PubRead, { match repository_response(&req, &publisher).await { Ok(res) => render_json(res), Err(e) => render_error(e), @@ -1632,7 +1615,7 @@ async fn repository_response( } pub async fn api_ca_add_child(req: Request, ca: CaHandle) -> RoutingResult { - aa!(req, Permission::CA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::CaUpdate, ca, { let actor = req.actor(); let server = req.state().clone(); match req.json().await { @@ -1649,7 +1632,7 @@ async fn api_ca_child_update( ca: CaHandle, child: ChildHandle, ) -> RoutingResult { - aa!(req, Permission::CA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::CaUpdate, ca, { let actor = req.actor(); let server = req.state().clone(); match req.json().await { @@ -1666,7 +1649,7 @@ pub async fn api_ca_child_remove( ca: CaHandle, child: ChildHandle, ) -> RoutingResult { - aa!(req, Permission::CA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::CaUpdate, ca, { let actor = req.actor(); render_empty_res( req.state().ca_child_remove(&ca, child, &actor).await, @@ -1681,8 +1664,8 @@ async fn api_ca_child_show( ) -> RoutingResult { aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, render_json_res(req.state().ca_child_show(&ca, &child).await) ) } @@ -1694,14 +1677,14 @@ async fn api_ca_child_export( ) -> RoutingResult { aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, render_json_res(req.state().api_ca_child_export(&ca, &child).await) ) } async fn api_ca_child_import(req: Request, ca: CaHandle) -> RoutingResult { - aa!(req, Permission::CA_ADMIN, Handle::from(&ca), { + aa!(req, Permission::CaAdmin, ca, { let actor = req.actor(); let server = req.state().clone(); match req.json().await { @@ -1719,8 +1702,8 @@ async fn api_ca_stats_child_connections( ) -> RoutingResult { aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, render_json_res(req.state().ca_stats_child_connections(&ca).await) ) } @@ -1732,8 +1715,8 @@ async fn api_ca_parent_res_json( ) -> RoutingResult { aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, render_json_res( req.state().ca_parent_response(&ca, child.clone()).await ) @@ -1745,7 +1728,7 @@ pub async fn api_ca_parent_res_xml( ca: CaHandle, child: ChildHandle, ) -> RoutingResult { - aa!(req, Permission::CA_READ, Handle::from(&ca), { + aa!(req, Permission::CaRead, ca, { match req.state().ca_parent_response(&ca, child.clone()).await { Ok(res) => Ok(HttpResponse::xml(res.to_xml_vec())), Err(e) => render_error(e), @@ -1757,7 +1740,7 @@ pub async fn api_ca_parent_res_xml( async fn api_cas_import(req: Request) -> RoutingResult { match *req.method() { - Method::POST => aa!(req, Permission::CA_ADMIN, { + Method::POST => aa!(req, Permission::CaAdmin, { let server = req.state().clone(); match req.json().await { Ok(structure) => { @@ -1772,9 +1755,10 @@ async fn api_cas_import(req: Request) -> RoutingResult { async fn api_all_ca_issues(req: Request) -> RoutingResult { match *req.method() { - Method::GET => aa!(req, Permission::CA_READ, { - let actor = req.actor(); - render_json_res(req.state().all_ca_issues(&actor).await) + Method::GET => aa!(req, Permission::CaRead, { + render_json_res( + req.state().all_ca_issues(req.auth_info()).await + ) }), _ => render_unknown_method(), } @@ -1785,8 +1769,8 @@ async fn api_ca_issues(req: Request, ca: CaHandle) -> RoutingResult { match *req.method() { Method::GET => aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, render_json_res(req.state().ca_issues(&ca).await) ), _ => render_unknown_method(), @@ -1794,14 +1778,13 @@ async fn api_ca_issues(req: Request, ca: CaHandle) -> RoutingResult { } async fn api_cas_list(req: Request) -> RoutingResult { - aa!(req, Permission::CA_LIST, { - let actor = req.actor(); - render_json_res(req.state().ca_list(&actor)) + aa!(req, Permission::CaList, { + render_json_res(req.state().ca_list(req.auth_info())) }) } pub async fn api_ca_init(req: Request) -> RoutingResult { - aa!(req, Permission::CA_CREATE, { + aa!(req, Permission::CaCreate, { let state = req.state().clone(); match req.json().await { @@ -1817,7 +1800,7 @@ async fn api_ca_id( ca: CaHandle, ) -> RoutingResult { match *req.method() { - Method::POST => aa!(req, Permission::CA_UPDATE, Handle::from(&ca), { + Method::POST => aa!(req, Permission::CaUpdate, ca, { let actor = req.actor(); render_empty_res(req.state().ca_update_id(ca, &actor).await) }), @@ -1841,8 +1824,8 @@ async fn api_ca_id( async fn api_ca_info(req: Request, handle: CaHandle) -> RoutingResult { aa!( req, - Permission::CA_READ, - Handle::from(&handle), + Permission::CaRead, + handle, render_json_res(req.state().ca_info(&handle).await) ) } @@ -1851,8 +1834,8 @@ async fn api_ca_delete(req: Request, handle: CaHandle) -> RoutingResult { let actor = req.actor(); aa!( req, - Permission::CA_DELETE, - Handle::from(&handle), + Permission::CaDelete, + handle, render_json_res(req.state().ca_delete(&handle, &actor).await) ) } @@ -1864,8 +1847,8 @@ async fn api_ca_my_parent_contact( ) -> RoutingResult { aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, render_json_res(req.state().ca_my_parent_contact(&ca, &parent).await) ) } @@ -1876,8 +1859,8 @@ async fn api_ca_my_parent_statuses( ) -> RoutingResult { aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, render_json_res( req.state() .ca_status(&ca) @@ -1947,7 +1930,7 @@ async fn api_ca_bgpsec_definitions_show( req: Request, ca: CaHandle, ) -> RoutingResult { - aa!(req, Permission::BGPSEC_READ, Handle::from(&ca), { + aa!(req, Permission::BgpsecRead, ca, { render_json_res(req.state().ca_bgpsec_definitions_show(ca).await) }) } @@ -1956,7 +1939,7 @@ async fn api_ca_bgpsec_definitions_update( req: Request, ca: CaHandle, ) -> RoutingResult { - aa!(req, Permission::BGPSEC_UPDATE, Handle::from(&ca), { + aa!(req, Permission::BgpsecUpdate, ca, { let actor = req.actor(); let server = req.state().clone(); match req.json().await { @@ -2007,7 +1990,7 @@ async fn api_ca_history_commands( ) -> RoutingResult { match *req.method() { Method::GET => { - aa!(req, Permission::CA_READ, Handle::from(&handle), { + aa!(req, Permission::CaRead, handle, { // /api/v1/cas/{ca}/history/commands // //// let mut crit = CommandHistoryCriteria::default(); @@ -2059,7 +2042,7 @@ async fn api_ca_command_details( match path.path_arg() { Some(key) => match *req.method() { Method::GET => { - aa!(req, Permission::CA_READ, Handle::from(&ca), { + aa!(req, Permission::CaRead, ca, { match req.state().ca_command_details(&ca, key) { Ok(details) => render_json(details), Err(e) => match e { @@ -2081,8 +2064,8 @@ async fn api_ca_child_req_xml(req: Request, ca: CaHandle) -> RoutingResult { match *req.method() { Method::GET => aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, match ca_child_req(&req, &ca).await { Ok(child_request) => Ok(HttpResponse::xml(child_request.to_xml_vec())), @@ -2097,8 +2080,8 @@ async fn api_ca_child_req_json(req: Request, ca: CaHandle) -> RoutingResult { match *req.method() { Method::GET => aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, match ca_child_req(&req, &ca).await { Ok(req) => render_json(req), Err(e) => render_error(e), @@ -2122,8 +2105,8 @@ async fn api_ca_publisher_req_json( match *req.method() { Method::GET => aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, render_json_res(req.state().ca_publisher_req(&ca).await) ), _ => render_unknown_method(), @@ -2137,8 +2120,8 @@ async fn api_ca_publisher_req_xml( match *req.method() { Method::GET => aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, match req.state().ca_publisher_req(&ca).await { Ok(publisher_request) => Ok(HttpResponse::xml(publisher_request.to_xml_vec())), @@ -2152,8 +2135,8 @@ async fn api_ca_publisher_req_xml( async fn api_ca_repo_details(req: Request, ca: CaHandle) -> RoutingResult { aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, render_json_res(req.state().ca_repo_details(&ca).await) ) } @@ -2162,8 +2145,8 @@ async fn api_ca_repo_status(req: Request, ca: CaHandle) -> RoutingResult { match *req.method() { Method::GET => aa!( req, - Permission::CA_READ, - Handle::from(&ca), + Permission::CaRead, + ca, render_json_res( req.state() .ca_status(&ca) @@ -2210,7 +2193,7 @@ fn extract_repository_contact( } async fn api_ca_repo_update(req: Request, ca: CaHandle) -> RoutingResult { - aa!(req, Permission::CA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::CaUpdate, ca, { let actor = req.actor(); let server = req.state().clone(); @@ -2232,7 +2215,7 @@ async fn api_ca_parent_add_or_update( ca: CaHandle, parent_override: Option, ) -> RoutingResult { - aa!(req, Permission::CA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::CaUpdate, ca, { let actor = req.actor(); let server = req.state().clone(); @@ -2301,7 +2284,7 @@ async fn api_ca_remove_parent( ca: CaHandle, parent: ParentHandle, ) -> RoutingResult { - aa!(req, Permission::CA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::CaUpdate, ca, { let actor = req.actor(); render_empty_res( req.state().ca_parent_remove(ca, parent, &actor).await, @@ -2311,7 +2294,7 @@ async fn api_ca_remove_parent( /// Force a key roll for a CA, i.e. use a max key age of 0 seconds. async fn api_ca_kr_init(req: Request, ca: CaHandle) -> RoutingResult { - aa!(req, Permission::CA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::CaUpdate, ca, { let actor = req.actor(); render_empty_res(req.state().ca_keyroll_init(ca, &actor).await) }) @@ -2320,7 +2303,7 @@ async fn api_ca_kr_init(req: Request, ca: CaHandle) -> RoutingResult { /// Force key activation for all new keys, i.e. use a staging period of 0 /// seconds. async fn api_ca_kr_activate(req: Request, ca: CaHandle) -> RoutingResult { - aa!(req, Permission::CA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::CaUpdate, ca, { let actor = req.actor(); render_empty_res(req.state().ca_keyroll_activate(ca, &actor).await) }) @@ -2333,7 +2316,7 @@ async fn api_ca_aspas_definitions_show( req: Request, ca: CaHandle, ) -> RoutingResult { - aa!(req, Permission::ASPAS_READ, Handle::from(&ca), { + aa!(req, Permission::AspasRead, ca, { let state = req.state().clone(); render_json_res(state.ca_aspas_definitions_show(ca).await) }) @@ -2344,7 +2327,7 @@ async fn api_ca_aspas_definitions_update( req: Request, ca: CaHandle, ) -> RoutingResult { - aa!(req, Permission::ASPAS_UPDATE, Handle::from(&ca), { + aa!(req, Permission::AspasUpdate, ca, { let actor = req.actor(); let state = req.state().clone(); @@ -2364,7 +2347,7 @@ async fn api_ca_aspas_update_aspa( ca: CaHandle, customer: Asn, ) -> RoutingResult { - aa!(req, Permission::ASPAS_UPDATE, Handle::from(&ca), { + aa!(req, Permission::AspasUpdate, ca, { let actor = req.actor(); let state = req.state().clone(); @@ -2385,7 +2368,7 @@ async fn api_ca_aspas_delete( ca: CaHandle, customer: Asn, ) -> RoutingResult { - aa!(req, Permission::ASPAS_UPDATE, Handle::from(&ca), { + aa!(req, Permission::AspasUpdate, ca, { let actor = req.actor(); let state = req.state().clone(); @@ -2398,7 +2381,7 @@ async fn api_ca_aspas_delete( /// Update the route authorizations for this CA async fn api_ca_routes_update(req: Request, ca: CaHandle) -> RoutingResult { - aa!(req, Permission::ROUTES_UPDATE, Handle::from(&ca), { + aa!(req, Permission::RoutesUpdate, ca, { let actor = req.actor(); let state = req.state().clone(); @@ -2418,7 +2401,7 @@ async fn api_ca_routes_try_update( req: Request, ca: CaHandle, ) -> RoutingResult { - aa!(req, Permission::ROUTES_UPDATE, Handle::from(&ca), { + aa!(req, Permission::RoutesUpdate, ca, { let actor = req.actor(); let state = req.state().clone(); @@ -2469,7 +2452,7 @@ async fn api_ca_routes_try_update( /// show the route authorizations for this CA async fn api_ca_routes_show(req: Request, ca: CaHandle) -> RoutingResult { - aa!(req, Permission::ROUTES_READ, Handle::from(&ca), { + aa!(req, Permission::RoutesRead, ca, { match req.state().ca_routes_show(&ca).await { Ok(roas) => render_json(roas), Err(_) => render_unknown_resource(), @@ -2483,7 +2466,7 @@ async fn api_ca_routes_analysis( path: &mut RequestPath, ca: CaHandle, ) -> RoutingResult { - aa!(req, Permission::ROUTES_ANALYSIS, Handle::from(&ca), { + aa!(req, Permission::RoutesAnalysis, ca, { match path.next() { Some("full") => { render_json_res(req.state().ca_routes_bgp_analysis(&ca).await) @@ -2526,7 +2509,7 @@ async fn api_ca_routes_analysis( async fn api_republish_all(req: Request, force: bool) -> RoutingResult { match *req.method() { - Method::POST => aa!(req, Permission::CA_ADMIN, { + Method::POST => aa!(req, Permission::CaAdmin, { render_empty_res(req.state().republish_all(force).await) }), _ => render_unknown_method(), @@ -2535,9 +2518,8 @@ async fn api_republish_all(req: Request, force: bool) -> RoutingResult { async fn api_resync_all(req: Request) -> RoutingResult { match *req.method() { - Method::POST => aa!(req, Permission::CA_ADMIN, { - let actor = req.actor(); - render_empty_res(req.state().cas_repo_sync_all(&actor)) + Method::POST => aa!(req, Permission::CaAdmin, { + render_empty_res(req.state().cas_repo_sync_all(req.auth_info())) }), _ => render_unknown_method(), } @@ -2546,7 +2528,7 @@ async fn api_resync_all(req: Request) -> RoutingResult { /// Refresh all CAs async fn api_refresh_all(req: Request) -> RoutingResult { match *req.method() { - Method::POST => aa!(req, Permission::CA_ADMIN, { + Method::POST => aa!(req, Permission::CaAdmin, { render_empty_res(req.state().cas_refresh_all().await) }), _ => render_unknown_method(), @@ -2556,7 +2538,7 @@ async fn api_refresh_all(req: Request) -> RoutingResult { /// Schedule check suspend for all CAs async fn api_suspend_all(req: Request) -> RoutingResult { match *req.method() { - Method::POST => aa!(req, Permission::CA_ADMIN, { + Method::POST => aa!(req, Permission::CaAdmin, { render_empty_res(req.state().cas_schedule_suspend_all()) }), _ => render_unknown_method(), @@ -2636,8 +2618,8 @@ async fn api_ca_rta( async fn api_ca_rta_list(req: Request, ca: CaHandle) -> RoutingResult { aa!( req, - Permission::RTA_LIST, - Handle::from(&ca), + Permission::RtaList, + ca, render_json_res(req.state().rta_list(ca).await) ) } @@ -2649,8 +2631,8 @@ async fn api_ca_rta_show( ) -> RoutingResult { aa!( req, - Permission::RTA_READ, - Handle::from(&ca), + Permission::RtaRead, + ca, render_json_res(req.state().rta_show(ca, name).await) ) } @@ -2660,7 +2642,7 @@ async fn api_ca_rta_sign( ca: CaHandle, name: RtaName, ) -> RoutingResult { - aa!(req, Permission::RTA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::RtaUpdate, ca, { let actor = req.actor(); let state = req.state().clone(); match req.json().await { @@ -2677,7 +2659,7 @@ async fn api_ca_rta_multi_prep( ca: CaHandle, name: RtaName, ) -> RoutingResult { - aa!(req, Permission::RTA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::RtaUpdate, ca, { let actor = req.actor(); let state = req.state().clone(); @@ -2695,7 +2677,7 @@ async fn api_ca_rta_multi_sign( ca: CaHandle, name: RtaName, ) -> RoutingResult { - aa!(req, Permission::RTA_UPDATE, Handle::from(&ca), { + aa!(req, Permission::RtaUpdate, ca, { let actor = req.actor(); let state = req.state().clone(); match req.json().await { @@ -2766,7 +2748,7 @@ async fn api_ta(req: Request, path: &mut RequestPath) -> RoutingResult { Method::POST => { let ta_handle = ta::ta_handle(); let server = req.state().clone(); - let actor = req.actor.clone(); + let actor = req.actor(); match req.api_bytes().await.map(|bytes| { extract_repository_contact(&ta_handle, bytes) @@ -2792,7 +2774,7 @@ async fn api_ta(req: Request, path: &mut RequestPath) -> RoutingResult { Some("signer") => match path.next() { Some("add") => { let server = req.state().clone(); - let actor = req.actor.clone(); + let actor = req.actor(); match req.json().await { Ok(ta_signer_info) => render_empty_res( server @@ -2816,7 +2798,7 @@ async fn api_ta(req: Request, path: &mut RequestPath) -> RoutingResult { Some("response") => match *req.method() { Method::POST => { let server = req.state().clone(); - let actor = req.actor.clone(); + let actor = req.actor(); match req.json().await { Ok(response) => render_empty_res( diff --git a/src/daemon/http/testbed.rs b/src/daemon/http/testbed.rs index 0f7f390d1..17eae300b 100644 --- a/src/daemon/http/testbed.rs +++ b/src/daemon/http/testbed.rs @@ -3,8 +3,8 @@ use hyper::Method; use rpki::ca::idexchange::PublisherHandle; use crate::{ - constants::ACTOR_DEF_TESTBED, daemon::{ + auth::AuthInfo, ca::testbed_ca_handle, http::{ server::{ @@ -55,7 +55,7 @@ pub async fn testbed(mut req: Request) -> RoutingResult { // Krill CAs and publishers. Upgrade anonymous users with testbed // rights ready for the next call in the chain to the testbed() // API call handler functions. - req.upgrade_from_anonymous(ACTOR_DEF_TESTBED).await; + req.upgrade_from_anonymous(AuthInfo::testbed()).await; let mut path = req.path().clone(); match path.next() { diff --git a/src/daemon/krillserver.rs b/src/daemon/krillserver.rs index 9d6c13191..ffaed10f2 100644 --- a/src/daemon/krillserver.rs +++ b/src/daemon/krillserver.rs @@ -1,9 +1,10 @@ //! An RPKI publication protocol server. -use std::{collections::HashMap, path::PathBuf, str::FromStr, sync::Arc}; - +use std::collections::HashMap; +use std::path::PathBuf; +use std::str::FromStr; +use std::sync::Arc; use bytes::Bytes; use chrono::Duration; - use futures_util::future::try_join_all; use rpki::{ @@ -15,9 +16,10 @@ use rpki::{ uri, }; +use crate::daemon::auth::AuthInfo; use crate::{ commons::{ - actor::{Actor, ActorDef}, + actor::Actor, api::{ self, import::{ExportChild, ImportChild}, @@ -40,12 +42,12 @@ use crate::{ }, constants::*, daemon::{ - auth::{providers::AdminTokenAuthProvider, Authorizer, LoggedInUser}, + auth::{Authorizer, LoggedInUser}, ca::{ self, testbed_ca_handle, CaManager, CaStatus, ResourceTaggedAttestation, RtaContentRequest, RtaPrepareRequest, }, - config::{AuthType, Config}, + config::Config, http::{HttpResponse, HyperRequest}, mq::{now, Task, TaskQueue}, scheduler::Scheduler, @@ -57,12 +59,6 @@ use crate::{ }, }; -#[cfg(feature = "multi-user")] -use crate::daemon::auth::{ - common::session::LoginSessionCache, - providers::{ConfigFileAuthProvider, OpenIDConnectAuthProvider}, -}; - //------------ KrillServer --------------------------------------------------- /// This is the Krill server that is doing all the orchestration for all @@ -72,7 +68,7 @@ pub struct KrillServer { service_uri: uri::Https, // Component responsible for API authorization checks - authorizer: Authorizer, + authorizer: Arc, // Publication server, with configured publishers repo_manager: Arc, @@ -89,10 +85,6 @@ pub struct KrillServer { // Time this server was started started: Timestamp, - #[cfg(feature = "multi-user")] - // Global login session cache - login_session_cache: Arc, - // System actor system_actor: Actor, @@ -125,40 +117,8 @@ impl KrillServer { .build()?; let signer = Arc::new(signer); - #[cfg(feature = "multi-user")] - let login_session_cache = Arc::new(LoginSessionCache::new()); - - // Construct the authorizer used to verify API access requests and to - // tell Lagosta where to send end-users to login and logout. - // TODO: remove the ugly duplication, however attempts to do so have - // so far failed due to incompatible match arm types, or - // unknown size of dyn AuthProvider, or concrete type needs to - // be known in async fn, etc. - let authorizer = match config.auth_type { - AuthType::AdminToken => Authorizer::new( - config.clone(), - AdminTokenAuthProvider::new(config.clone()).into(), - )?, - #[cfg(feature = "multi-user")] - AuthType::ConfigFile => Authorizer::new( - config.clone(), - ConfigFileAuthProvider::new( - config.clone(), - login_session_cache.clone(), - )? - .into(), - )?, - #[cfg(feature = "multi-user")] - AuthType::OpenIDConnect => Authorizer::new( - config.clone(), - OpenIDConnectAuthProvider::new( - config.clone(), - login_session_cache.clone(), - )? - .into(), - )?, - }; - let system_actor = authorizer.actor_from_def(ACTOR_DEF_KRILL); + let authorizer = Authorizer::new(config.clone())?.into(); + let system_actor = ACTOR_DEF_KRILL; // Task queue Arc is shared between ca_manager, repo_manager and the // scheduler. @@ -204,8 +164,6 @@ impl KrillServer { bgp_analyser, mq, started: Timestamp::now(), - #[cfg(feature = "multi-user")] - login_session_cache, system_actor, config: config.clone(), }; @@ -316,7 +274,7 @@ impl KrillServer { self.repo_manager.clone(), self.bgp_analyser.clone(), #[cfg(feature = "multi-user")] - self.login_session_cache.clone(), + self.authorizer.clone(), self.config.clone(), self.system_actor.clone(), ) @@ -337,12 +295,10 @@ impl KrillServer { &self.system_actor } - pub async fn actor_from_request(&self, request: &HyperRequest) -> Actor { - self.authorizer.actor_from_request(request).await - } - - pub fn actor_from_def(&self, actor_def: ActorDef) -> Actor { - self.authorizer.actor_from_def(actor_def) + pub async fn authenticate_request( + &self, request: &HyperRequest + ) -> AuthInfo { + self.authorizer.authenticate_request(request).await } pub async fn get_login_url(&self) -> KrillResult { @@ -369,7 +325,7 @@ impl KrillServer { #[cfg(feature = "multi-user")] pub fn login_session_cache_size(&self) -> usize { - self.login_session_cache.size() + self.authorizer.login_session_cache_size() } } @@ -708,9 +664,9 @@ impl KrillServer { ) -> KrillResult> { let mut res = HashMap::new(); - for ca in self.ca_list(&self.system_actor)?.cas() { + for handle in self.ca_manager.ca_handles()? { // can't fail really, but to be sure - if let Ok(ca) = self.ca_manager.get_ca(ca.handle()).await { + if let Ok(ca) = self.ca_manager.get_ca(&handle).await { let roas = ca.configured_roas(); let roa_count = roas.len(); let child_count = ca.children().count(); @@ -748,10 +704,10 @@ impl KrillServer { // We need to know which CAs already exist. They should not be // imported again, but can serve as parents. let mut existing_cas = HashMap::new(); - for ca in self.ca_list(&actor)?.cas() { - let parent_handle = ca.handle().convert(); + for handle in self.ca_manager.ca_handles()? { + let parent_handle = handle.convert(); let resources = - self.ca_manager.get_ca(ca.handle()).await?.all_resources(); + self.ca_manager.get_ca(&handle).await?.all_resources(); existing_cas.insert(parent_handle, resources); } structure.validate_ca_hierarchy(existing_cas)?; @@ -975,10 +931,10 @@ impl KrillServer { pub async fn all_ca_issues( &self, - actor: &Actor, + auth: &AuthInfo, ) -> KrillResult { let mut all_issues = AllCertAuthIssues::default(); - for ca in self.ca_list(actor)?.cas() { + for ca in self.ca_list(auth)?.cas() { let issues = self.ca_issues(ca.handle()).await?; if !issues.is_empty() { all_issues.add(ca.handle().clone(), issues); @@ -1023,8 +979,8 @@ impl KrillServer { } /// Re-sync all CAs with their repositories - pub fn cas_repo_sync_all(&self, actor: &Actor) -> KrillEmptyResult { - self.ca_manager.cas_schedule_repo_sync_all(actor) + pub fn cas_repo_sync_all(&self, auth: &AuthInfo) -> KrillEmptyResult { + self.ca_manager.cas_schedule_repo_sync_all(auth) } /// Re-sync a specific CA with its repository @@ -1053,8 +1009,8 @@ impl KrillServer { /// # Admin CAS impl KrillServer { - pub fn ca_list(&self, actor: &Actor) -> KrillResult { - self.ca_manager.ca_list(actor) + pub fn ca_list(&self, auth: &AuthInfo) -> KrillResult { + self.ca_manager.ca_list(auth) } /// Returns the public CA info for a CA, or NONE if the CA cannot be diff --git a/src/daemon/properties/mod.rs b/src/daemon/properties/mod.rs index e6b07b3be..945c5937e 100644 --- a/src/daemon/properties/mod.rs +++ b/src/daemon/properties/mod.rs @@ -32,7 +32,7 @@ use crate::{ util::KrillVersion, KrillResult, }, - constants::{PROPERTIES_DFLT_NAME, PROPERTIES_NS}, + constants::{ACTOR_DEF_KRILL, PROPERTIES_DFLT_NAME, PROPERTIES_NS}, }; //------------ PropertiesInitCommand --------------------------------------- @@ -282,7 +282,7 @@ impl PropertiesManager { .map(|store| PropertiesManager { store, main_key, - system_actor: Actor::system_actor(), + system_actor: ACTOR_DEF_KRILL, }) .map_err(Error::AggregateStoreError) } diff --git a/src/daemon/scheduler.rs b/src/daemon/scheduler.rs index f211c0545..2a8d1756a 100644 --- a/src/daemon/scheduler.rs +++ b/src/daemon/scheduler.rs @@ -40,7 +40,7 @@ use crate::{ }; #[cfg(feature = "multi-user")] -use crate::daemon::auth::common::session::LoginSessionCache; +use crate::daemon::auth::Authorizer; use super::mq::TaskResult; @@ -51,7 +51,7 @@ pub struct Scheduler { bgp_analyser: Arc, #[cfg(feature = "multi-user")] // Responsible for purging expired cached login tokens - login_session_cache: Arc, + authorizer: Arc, config: Arc, system_actor: Actor, started: Timestamp, @@ -63,9 +63,7 @@ impl Scheduler { ca_manager: Arc, repo_manager: Arc, bgp_analyser: Arc, - #[cfg(feature = "multi-user")] login_session_cache: Arc< - LoginSessionCache, - >, + #[cfg(feature = "multi-user")] authorizer: Arc, config: Arc, system_actor: Actor, ) -> Self { @@ -75,7 +73,7 @@ impl Scheduler { repo_manager, bgp_analyser, #[cfg(feature = "multi-user")] - login_session_cache, + authorizer, config, system_actor, started: Timestamp::now(), @@ -237,11 +235,7 @@ impl Scheduler { // to avoid a thundering herd. Note that the operator can always // choose to run bulk operations manually if they know that they // cannot wait. - let ca_list = self - .ca_manager - .ca_list(&self.system_actor) - .map_err(FatalError)?; - let cas = ca_list.cas(); + let cas = self.ca_manager.ca_handles().map_err(FatalError)?; debug!("Adding missing tasks at start up"); // If we have many CAs then we need to apply some jitter @@ -250,10 +244,10 @@ impl Scheduler { let use_jitter = cas.len() >= SCHEDULER_USE_JITTER_CAS_THRESHOLD; - for summary in cas { + for handle in &cas { let ca = self .ca_manager - .get_ca(summary.handle()) + .get_ca(handle) .await .map_err(FatalError)?; let ca_handle = ca.handle(); @@ -566,7 +560,7 @@ impl Scheduler { #[cfg(feature = "multi-user")] fn sweep_login_cache(&self) -> Result { - if let Err(e) = self.login_session_cache.sweep() { + if let Err(e) = self.authorizer.sweep() { error!( "Background sweep of session decryption cache failed: {}", e diff --git a/src/pubd/manager.rs b/src/pubd/manager.rs index a5b05f096..71513ba55 100644 --- a/src/pubd/manager.rs +++ b/src/pubd/manager.rs @@ -471,7 +471,7 @@ mod tests { let publisher_req = make_publisher_req(alice_handle.as_str(), alice.id_cert()); - let actor = Actor::actor_from_def(ACTOR_DEF_TEST); + let actor = ACTOR_DEF_TEST; server.create_publisher(publisher_req, &actor).unwrap(); let alice_found = @@ -493,7 +493,7 @@ mod tests { let publisher_req = make_publisher_req(alice_handle.as_str(), alice.id_cert()); - let actor = Actor::actor_from_def(ACTOR_DEF_TEST); + let actor = ACTOR_DEF_TEST; server .create_publisher(publisher_req.clone(), &actor) .unwrap(); @@ -517,7 +517,7 @@ mod tests { let publisher_req = make_publisher_req(alice_handle.as_str(), alice.id_cert()); - let actor = Actor::actor_from_def(ACTOR_DEF_TEST); + let actor = ACTOR_DEF_TEST; server.create_publisher(publisher_req, &actor).unwrap(); let list_reply = server.list(&alice_handle).unwrap(); @@ -544,7 +544,7 @@ mod tests { let publisher_req = make_publisher_req(alice_handle.as_str(), alice.id_cert()); - let actor = Actor::actor_from_def(ACTOR_DEF_TEST); + let actor = ACTOR_DEF_TEST; server.create_publisher(publisher_req, &actor).unwrap(); // get the file out of a list_reply @@ -780,7 +780,7 @@ mod tests { let publisher_req = make_publisher_req(alice_handle.as_str(), alice.id_cert()); - let actor = Actor::actor_from_def(ACTOR_DEF_TEST); + let actor = ACTOR_DEF_TEST; server.create_publisher(publisher_req, &actor).unwrap(); // get the file out of a list_reply diff --git a/src/pubd/repository.rs b/src/pubd/repository.rs index 010fa20ea..8aba46a98 100644 --- a/src/pubd/repository.rs +++ b/src/pubd/repository.rs @@ -43,7 +43,7 @@ use crate::{ KrillResult, }, constants::{ - PUBSERVER_CONTENT_NS, PUBSERVER_DFLT, PUBSERVER_NS, + ACTOR_DEF_KRILL, PUBSERVER_CONTENT_NS, PUBSERVER_DFLT, PUBSERVER_NS, REPOSITORY_RRDP_ARCHIVE_DIR, REPOSITORY_RRDP_DIR, REPOSITORY_RSYNC_DIR, RRDP_FIRST_SERIAL, }, @@ -1882,7 +1882,7 @@ impl RepositoryAccessProxy { if self.initialized()? { Err(Error::RepositoryServerAlreadyInitialized) } else { - let actor = Actor::system_actor(); + let actor = ACTOR_DEF_KRILL; let (rrdp_base_uri, rsync_jail) = uris.unpack(); diff --git a/src/ta/mod.rs b/src/ta/mod.rs index e1010ad04..12148ce94 100644 --- a/src/ta/mod.rs +++ b/src/ta/mod.rs @@ -84,9 +84,7 @@ mod tests { let timing = TaTimingConfig::default(); - let actor = crate::commons::actor::Actor::actor_from_def( - crate::constants::ACTOR_DEF_KRILL, - ); + let actor = crate::constants::ACTOR_DEF_KRILL; let proxy_handle = TrustAnchorHandle::new("proxy".into()); let proxy_init = TrustAnchorProxyInitCommand::make( diff --git a/src/upgrades/mod.rs b/src/upgrades/mod.rs index 767246754..d3afb64d0 100644 --- a/src/upgrades/mod.rs +++ b/src/upgrades/mod.rs @@ -15,7 +15,6 @@ use rpki::{ use crate::{ commons::{ - actor::Actor, api::{ AspaDefinition, AspaDefinitionUpdates, CustomerAsn, ProviderAsn, }, @@ -30,7 +29,7 @@ use crate::{ KrillResult, }, constants::{ - CASERVER_NS, CA_OBJECTS_NS, KEYS_NS, KRILL_VERSION, + ACTOR_DEF_KRILL, CASERVER_NS, CA_OBJECTS_NS, KEYS_NS, KRILL_VERSION, PUBSERVER_CONTENT_NS, PUBSERVER_NS, SIGNERS_NS, STATUS_NS, TA_PROXY_SERVER_NS, TA_SIGNER_SERVER_NS, }, @@ -434,7 +433,7 @@ pub trait UpgradeAggregateStorePre0_14 { // From 0.14.x and up we will have command '0' for the init, // where beforehand we only had an event. We // will have to make up some values for the actor and time. - let actor = Actor::system_actor().to_string(); + let actor = ACTOR_DEF_KRILL; // The time is tricky.. our best guess is to set this to the // same value as the first command, if there @@ -454,7 +453,7 @@ pub trait UpgradeAggregateStorePre0_14 { let command = self.convert_init_event( old_init, handle.clone(), - actor, + actor.audit_name(), time, )?; diff --git a/tests/common/mod.rs b/tests/common/mod.rs index e92ac5acd..aac78bcdb 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -128,13 +128,10 @@ impl TestConfig { let auth_type = AuthType::AdminToken; let admin_token = Token::from("secret"); #[cfg(feature = "multi-user")] - let auth_policies = vec![]; - #[cfg(feature = "multi-user")] - let auth_private_attributes = vec![]; - #[cfg(feature = "multi-user")] let auth_users = None; #[cfg(feature = "multi-user")] let auth_openidconnect = None; + let auth_roles = ConfigDefaults::auth_roles(); let default_signer = SignerReference::default(); let one_off_signer = SignerReference::default(); @@ -266,13 +263,10 @@ impl TestConfig { admin_token, auth_type, #[cfg(feature = "multi-user")] - auth_policies, - #[cfg(feature = "multi-user")] - auth_private_attributes, - #[cfg(feature = "multi-user")] auth_users, #[cfg(feature = "multi-user")] auth_openidconnect, + auth_roles, default_signer, one_off_signer, signers,